Java Code Examples for org.apache.hadoop.mapreduce.RecordWriter

The following examples show how to use org.apache.hadoop.mapreduce.RecordWriter. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: mnemonic   Source File: MneMapreducePersonDataTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test(enabled = true)
public void testWritePersonData() throws Exception {
  NullWritable nada = NullWritable.get();
  MneDurableOutputSession<Person<Long>> sess =
      new MneDurableOutputSession<Person<Long>>(m_tacontext, null,
          MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX);
  MneDurableOutputValue<Person<Long>> mdvalue =
      new MneDurableOutputValue<Person<Long>>(sess);
  OutputFormat<NullWritable, MneDurableOutputValue<Person<Long>>> outputFormat =
      new MneOutputFormat<MneDurableOutputValue<Person<Long>>>();
  RecordWriter<NullWritable, MneDurableOutputValue<Person<Long>>> writer =
      outputFormat.getRecordWriter(m_tacontext);
  Person<Long> person = null;
  for (int i = 0; i < m_reccnt; ++i) {
    person = sess.newDurableObjectRecord();
    person.setAge((short) m_rand.nextInt(50));
    person.setName(String.format("Name: [%s]", Utils.genRandomString()), true);
    m_sumage += person.getAge();
    writer.write(nada, mdvalue.of(person));
  }
  writer.close(m_tacontext);
  sess.close();
}
 
Example 2
Source Project: hadoop   Source File: TestFileOutputCommitter.java    License: Apache License 2.0 6 votes vote down vote up
private void writeOutput(RecordWriter theRecordWriter,
    TaskAttemptContext context) throws IOException, InterruptedException {
  NullWritable nullWritable = NullWritable.get();

  try {
    theRecordWriter.write(key1, val1);
    theRecordWriter.write(null, nullWritable);
    theRecordWriter.write(null, val1);
    theRecordWriter.write(nullWritable, val2);
    theRecordWriter.write(key2, nullWritable);
    theRecordWriter.write(key1, null);
    theRecordWriter.write(null, null);
    theRecordWriter.write(key2, val2);
  } finally {
    theRecordWriter.close(context);
  }
}
 
Example 3
Source Project: big-c   Source File: TestMRCJCFileOutputCommitter.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
private void writeOutput(RecordWriter theRecordWriter,
    TaskAttemptContext context) throws IOException, InterruptedException {
  NullWritable nullWritable = NullWritable.get();

  try {
    theRecordWriter.write(key1, val1);
    theRecordWriter.write(null, nullWritable);
    theRecordWriter.write(null, val1);
    theRecordWriter.write(nullWritable, val2);
    theRecordWriter.write(key2, nullWritable);
    theRecordWriter.write(key1, null);
    theRecordWriter.write(null, null);
    theRecordWriter.write(key2, val2);
  } finally {
    theRecordWriter.close(context);
  }
}
 
Example 4
Source Project: datawave   Source File: SafeFileOutputCommitterTest.java    License: Apache License 2.0 6 votes vote down vote up
private void writeOutput(RecordWriter theRecordWriter, TaskAttemptContext context) throws IOException, InterruptedException {
    NullWritable nullWritable = NullWritable.get();
    
    try {
        theRecordWriter.write(key1, val1);
        theRecordWriter.write(null, nullWritable);
        theRecordWriter.write(null, val1);
        theRecordWriter.write(nullWritable, val2);
        theRecordWriter.write(key2, nullWritable);
        theRecordWriter.write(key1, null);
        theRecordWriter.write(null, null);
        theRecordWriter.write(key2, val2);
    } finally {
        theRecordWriter.close(context);
    }
}
 
Example 5
Source Project: hadoopoffice   Source File: ExcelRowFileOutputFormat.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public RecordWriter<NullWritable, ArrayWritable> getRecordWriter(TaskAttemptContext context) throws IOException {
	// check if mimeType is set. If not assume new Excel format (.xlsx)
	Configuration conf=context.getConfiguration();
	String defaultConf=conf.get(HadoopOfficeWriteConfiguration.CONF_MIMETYPE,ExcelFileOutputFormat.DEFAULT_MIMETYPE);
	conf.set(HadoopOfficeWriteConfiguration.CONF_MIMETYPE,defaultConf);
	// add suffix	
	Path file = getDefaultWorkFile(context,ExcelFileOutputFormat.getSuffix(conf.get(HadoopOfficeWriteConfiguration.CONF_MIMETYPE)));


	 	try {
			return new ExcelRowRecordWriter<>(HadoopUtil.getDataOutputStream(conf,file,context,getCompressOutput(context),getOutputCompressorClass(context, ExcelFileOutputFormat.defaultCompressorClass)),file.getName(),conf);
		} catch (InvalidWriterConfigurationException | InvalidCellSpecificationException | FormatNotUnderstoodException
				| GeneralSecurityException | OfficeWriterException e) {
			LOG.error(e);
		}

	return null;
}
 
Example 6
Source Project: big-c   Source File: TestFileOutputCommitter.java    License: Apache License 2.0 6 votes vote down vote up
private void writeOutput(RecordWriter theRecordWriter,
    TaskAttemptContext context) throws IOException, InterruptedException {
  NullWritable nullWritable = NullWritable.get();

  try {
    theRecordWriter.write(key1, val1);
    theRecordWriter.write(null, nullWritable);
    theRecordWriter.write(null, val1);
    theRecordWriter.write(nullWritable, val2);
    theRecordWriter.write(key2, nullWritable);
    theRecordWriter.write(key1, null);
    theRecordWriter.write(null, null);
    theRecordWriter.write(key2, val2);
  } finally {
    theRecordWriter.close(context);
  }
}
 
Example 7
Source Project: hadoop   Source File: TestMRCJCFileOutputCommitter.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
private void writeOutput(RecordWriter theRecordWriter,
    TaskAttemptContext context) throws IOException, InterruptedException {
  NullWritable nullWritable = NullWritable.get();

  try {
    theRecordWriter.write(key1, val1);
    theRecordWriter.write(null, nullWritable);
    theRecordWriter.write(null, val1);
    theRecordWriter.write(nullWritable, val2);
    theRecordWriter.write(key2, nullWritable);
    theRecordWriter.write(key1, null);
    theRecordWriter.write(null, null);
    theRecordWriter.write(key2, val2);
  } finally {
    theRecordWriter.close(context);
  }
}
 
Example 8
Source Project: big-c   Source File: Chain.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Add mapper that reads and writes from/to the queue
 */
@SuppressWarnings("unchecked")
void addMapper(ChainBlockingQueue<KeyValuePair<?, ?>> input,
    ChainBlockingQueue<KeyValuePair<?, ?>> output,
    TaskInputOutputContext context, int index) throws IOException,
    InterruptedException {
  Configuration conf = getConf(index);
  Class<?> keyClass = conf.getClass(MAPPER_INPUT_KEY_CLASS, Object.class);
  Class<?> valueClass = conf.getClass(MAPPER_INPUT_VALUE_CLASS, Object.class);
  Class<?> keyOutClass = conf.getClass(MAPPER_OUTPUT_KEY_CLASS, Object.class);
  Class<?> valueOutClass = conf.getClass(MAPPER_OUTPUT_VALUE_CLASS,
      Object.class);
  RecordReader rr = new ChainRecordReader(keyClass, valueClass, input, conf);
  RecordWriter rw = new ChainRecordWriter(keyOutClass, valueOutClass, output,
      conf);
  MapRunner runner = new MapRunner(mappers.get(index), createMapContext(rr,
      rw, context, getConf(index)), rr, rw);
  threads.add(runner);
}
 
Example 9
Source Project: hadoop   Source File: Chain.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Add mapper that reads and writes from/to the queue
 */
@SuppressWarnings("unchecked")
void addMapper(ChainBlockingQueue<KeyValuePair<?, ?>> input,
    ChainBlockingQueue<KeyValuePair<?, ?>> output,
    TaskInputOutputContext context, int index) throws IOException,
    InterruptedException {
  Configuration conf = getConf(index);
  Class<?> keyClass = conf.getClass(MAPPER_INPUT_KEY_CLASS, Object.class);
  Class<?> valueClass = conf.getClass(MAPPER_INPUT_VALUE_CLASS, Object.class);
  Class<?> keyOutClass = conf.getClass(MAPPER_OUTPUT_KEY_CLASS, Object.class);
  Class<?> valueOutClass = conf.getClass(MAPPER_OUTPUT_VALUE_CLASS,
      Object.class);
  RecordReader rr = new ChainRecordReader(keyClass, valueClass, input, conf);
  RecordWriter rw = new ChainRecordWriter(keyOutClass, valueOutClass, output,
      conf);
  MapRunner runner = new MapRunner(mappers.get(index), createMapContext(rr,
      rw, context, getConf(index)), rr, rw);
  threads.add(runner);
}
 
Example 10
@Override
public RecordWriter<BaseDimension, BaseStatsValueWritable> getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();
    Connection conn = null;
    IDimensionConverter converter = DimensionConverterClient.createDimensionConverter(conf);
    try {
        conn = JdbcManager.getConnection(conf, GlobalConstants.WAREHOUSE_OF_WEBSITE);
        conn.setAutoCommit(false);
    } catch (SQLException e) {
        logger.error("获取数据库连接失败", e);
        throw new IOException("获取数据库连接失败", e);
    }
    return new TransformerRecordWriter(conn, conf, converter);
}
 
Example 11
Source Project: Flink-CEPplus   Source File: HadoopOutputFormatTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testWriteRecord() throws Exception {

	RecordWriter<String, Long> recordWriter = mock(DummyRecordWriter.class);
	HadoopOutputFormat<String, Long> hadoopOutputFormat = setupHadoopOutputFormat(new DummyOutputFormat(),
		Job.getInstance(), recordWriter, null, new Configuration());

	hadoopOutputFormat.writeRecord(new Tuple2<String, Long>());

	verify(recordWriter, times(1)).write(nullable(String.class), nullable(Long.class));
}
 
Example 12
Source Project: Flink-CEPplus   Source File: HadoopOutputFormatTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testCloseWithNeedsTaskCommitTrue() throws Exception {

	RecordWriter<String, Long> recordWriter = Mockito.mock(DummyRecordWriter.class);
	OutputCommitter outputCommitter = setupOutputCommitter(true);

	HadoopOutputFormat<String, Long> hadoopOutputFormat = setupHadoopOutputFormat(new DummyOutputFormat(),
		Job.getInstance(), recordWriter, outputCommitter, new Configuration());

	hadoopOutputFormat.close();

	verify(outputCommitter, times(1)).commitTask(nullable(TaskAttemptContext.class));
	verify(recordWriter, times(1)).close(nullable(TaskAttemptContext.class));
}
 
Example 13
Source Project: hadoop   Source File: Chain.java    License: Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
void runMapper(TaskInputOutputContext context, int index) throws IOException,
    InterruptedException {
  Mapper mapper = mappers.get(index);
  RecordReader rr = new ChainRecordReader(context);
  RecordWriter rw = new ChainRecordWriter(context);
  Mapper.Context mapperContext = createMapContext(rr, rw, context,
      getConf(index));
  mapper.run(mapperContext);
  rr.close();
  rw.close(context);
}
 
Example 14
Source Project: Flink-CEPplus   Source File: HadoopOutputFormatTest.java    License: Apache License 2.0 5 votes vote down vote up
private HadoopOutputFormat<String, Long> setupHadoopOutputFormat(
	OutputFormat<String, Long> outputFormat,
	Job job,
	RecordWriter<String, Long> recordWriter,
	OutputCommitter outputCommitter,
	Configuration configuration) {

	HadoopOutputFormat<String, Long> hadoopOutputFormat = new HadoopOutputFormat<>(outputFormat, job);
	hadoopOutputFormat.recordWriter = recordWriter;
	hadoopOutputFormat.outputCommitter = outputCommitter;
	hadoopOutputFormat.configuration = configuration;
	hadoopOutputFormat.configuration.set(MAPRED_OUTPUT_DIR_KEY, MAPRED_OUTPUT_PATH);

	return hadoopOutputFormat;
}
 
Example 15
Source Project: big-c   Source File: ChainMapContextImpl.java    License: Apache License 2.0 5 votes vote down vote up
ChainMapContextImpl(
    TaskInputOutputContext<KEYIN, VALUEIN, KEYOUT, VALUEOUT> base,
    RecordReader<KEYIN, VALUEIN> rr, RecordWriter<KEYOUT, VALUEOUT> rw,
    Configuration conf) {
  this.reader = rr;
  this.output = rw;
  this.base = base;
  this.conf = conf;
}
 
Example 16
Source Project: big-c   Source File: TestMRCJCFileOutputCommitter.java    License: Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
public void testAbort() throws IOException, InterruptedException {
  Job job = Job.getInstance();
  FileOutputFormat.setOutputPath(job, outDir);
  Configuration conf = job.getConfiguration();
  conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt);
  JobContext jContext = new JobContextImpl(conf, taskID.getJobID());
  TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, taskID);
  FileOutputCommitter committer = new FileOutputCommitter(outDir, tContext);

  // do setup
  committer.setupJob(jContext);
  committer.setupTask(tContext);

  // write output
  TextOutputFormat theOutputFormat = new TextOutputFormat();
  RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(tContext);
  writeOutput(theRecordWriter, tContext);

  // do abort
  committer.abortTask(tContext);
  File expectedFile = new File(new Path(committer.getWorkPath(), partFile)
      .toString());
  assertFalse("task temp dir still exists", expectedFile.exists());

  committer.abortJob(jContext, JobStatus.State.FAILED);
  expectedFile = new File(new Path(outDir, FileOutputCommitter.PENDING_DIR_NAME)
      .toString());
  assertFalse("job temp dir still exists", expectedFile.exists());
  assertEquals("Output directory not empty", 0, new File(outDir.toString())
      .listFiles().length);
  FileUtil.fullyDelete(new File(outDir.toString()));
}
 
Example 17
Source Project: big-c   Source File: TeraOutputFormat.java    License: Apache License 2.0 5 votes vote down vote up
public RecordWriter<Text,Text> getRecordWriter(TaskAttemptContext job
                                               ) throws IOException {
  Path file = getDefaultWorkFile(job, "");
  FileSystem fs = file.getFileSystem(job.getConfiguration());
   FSDataOutputStream fileOut = fs.create(file);
  return new TeraRecordWriter(fileOut, job);
}
 
Example 18
Source Project: flink   Source File: HadoopOutputFormatTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testCloseWithNeedsTaskCommitFalse() throws Exception {

	RecordWriter<String, Long> recordWriter = Mockito.mock(DummyRecordWriter.class);
	OutputCommitter outputCommitter = setupOutputCommitter(false);

	HadoopOutputFormat<String, Long> hadoopOutputFormat = setupHadoopOutputFormat(new DummyOutputFormat(),
		Job.getInstance(), recordWriter, outputCommitter, new Configuration());

	hadoopOutputFormat.close();

	verify(outputCommitter, times(0)).commitTask(nullable(TaskAttemptContext.class));
	verify(recordWriter, times(1)).close(nullable(TaskAttemptContext.class));
}
 
Example 19
Source Project: flink   Source File: HadoopOutputFormatTest.java    License: Apache License 2.0 5 votes vote down vote up
private HadoopOutputFormat<String, Long> setupHadoopOutputFormat(
	OutputFormat<String, Long> outputFormat,
	Job job,
	RecordWriter<String, Long> recordWriter,
	OutputCommitter outputCommitter,
	Configuration configuration) {

	HadoopOutputFormat<String, Long> hadoopOutputFormat = new HadoopOutputFormat<>(outputFormat, job);
	hadoopOutputFormat.recordWriter = recordWriter;
	hadoopOutputFormat.outputCommitter = outputCommitter;
	hadoopOutputFormat.configuration = configuration;
	hadoopOutputFormat.configuration.set(MAPRED_OUTPUT_DIR_KEY, MAPRED_OUTPUT_PATH);

	return hadoopOutputFormat;
}
 
Example 20
Source Project: hadoop   Source File: TestFileOutputCommitter.java    License: Apache License 2.0 5 votes vote down vote up
private void writeMapFileOutput(RecordWriter theRecordWriter,
    TaskAttemptContext context) throws IOException, InterruptedException {
  try {
    int key = 0;
    for (int i = 0 ; i < 10; ++i) {
      key = i;
      Text val = (i%2 == 1) ? val1 : val2;
      theRecordWriter.write(new LongWritable(key),
          val);        
    }
  } finally {
    theRecordWriter.close(context);
  }
}
 
Example 21
Source Project: beam   Source File: HadoopFormatIO.java    License: Apache License 2.0 5 votes vote down vote up
private RecordWriter<KeyT, ValueT> initRecordWriter(
    OutputFormat<KeyT, ValueT> outputFormatObj, TaskAttemptContext taskAttemptContext)
    throws IllegalStateException {
  try {
    LOG.info(
        "Creating new RecordWriter for task {} of Job with id {}.",
        taskAttemptContext.getTaskAttemptID().getTaskID().getId(),
        taskAttemptContext.getJobID().getJtIdentifier());
    return outputFormatObj.getRecordWriter(taskAttemptContext);
  } catch (InterruptedException | IOException e) {
    throw new IllegalStateException("Unable to create RecordWriter object: ", e);
  }
}
 
Example 22
Source Project: hadoop   Source File: TeraOutputFormat.java    License: Apache License 2.0 5 votes vote down vote up
public RecordWriter<Text,Text> getRecordWriter(TaskAttemptContext job
                                               ) throws IOException {
  Path file = getDefaultWorkFile(job, "");
  FileSystem fs = file.getFileSystem(job.getConfiguration());
   FSDataOutputStream fileOut = fs.create(file);
  return new TeraRecordWriter(fileOut, job);
}
 
Example 23
Source Project: gemfirexd-oss   Source File: GFOutputFormat.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public RecordWriter<Object, Object> getRecordWriter(TaskAttemptContext context)
    throws IOException, InterruptedException {
  Configuration conf = context.getConfiguration();
  ClientCache cache = getClientCacheInstance(conf);
  return new GFRecordWriter(cache, context.getConfiguration());
}
 
Example 24
Source Project: warp10-platform   Source File: Warp10OutputFormat.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public RecordWriter<Writable, Writable> getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException {
  
  Properties props = new Properties();
  
  Configuration conf = context.getConfiguration();
  
  props.setProperty(Warp10RecordWriter.WARP10_GZIP, Warp10InputFormat.getProperty(conf, this.suffix, Warp10RecordWriter.WARP10_GZIP, "false"));
  props.setProperty(Warp10RecordWriter.WARP10_ENDPOINT, Warp10InputFormat.getProperty(conf, this.suffix, Warp10RecordWriter.WARP10_ENDPOINT, ""));
  props.setProperty(Warp10RecordWriter.WARP10_TOKEN, Warp10InputFormat.getProperty(conf, this.suffix, Warp10RecordWriter.WARP10_TOKEN, ""));
  props.setProperty(Warp10RecordWriter.WARP10_MAXRATE, Warp10InputFormat.getProperty(conf, this.suffix, Warp10RecordWriter.WARP10_MAXRATE, Long.toString(Long.MAX_VALUE)));
  
  return new Warp10RecordWriter(props);
}
 
Example 25
Source Project: datawave   Source File: MultiRFileOutputFormatterTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testTableSeparationWithFilePerShardLoc() throws IOException, InterruptedException {
    MultiRFileOutputFormatter.setGenerateMapFilePerShardLocation(conf, true);
    RecordWriter<BulkIngestKey,Value> writer = createWriter(formatter, conf);
    writeShardPairs(writer, 2);
    assertNumFileNames(3);
    assertFileNameForShardIndex(0);
    assertFileNameForShard(1, "server1", 1);
    assertFileNameForShard(2, "server2", 1);
}
 
Example 26
Source Project: datawave   Source File: MultiRFileOutputFormatterTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testRFileEntrySizeLimit() throws IOException, InterruptedException, AccumuloSecurityException, AccumuloException, URISyntaxException {
    MultiRFileOutputFormatter.setRFileLimits(conf, 1, 0);
    RecordWriter<BulkIngestKey,Value> writer = createWriter(formatter, conf);
    writeShardPairs(writer, 2);
    assertNumFileNames(5);
    assertFileNameForShardIndex(0);
    expectShardFiles(4);
}
 
Example 27
Source Project: hadoop   Source File: Chain.java    License: Apache License 2.0 5 votes vote down vote up
public MapRunner(Mapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT> mapper,
    Mapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context mapperContext,
    RecordReader<KEYIN, VALUEIN> rr, RecordWriter<KEYOUT, VALUEOUT> rw)
    throws IOException, InterruptedException {
  this.mapper = mapper;
  this.rr = rr;
  this.rw = rw;
  this.chainContext = mapperContext;
}
 
Example 28
Source Project: datawave   Source File: MultiRFileOutputFormatterTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testRFileEntrySizeLimitWithFilePerShardLoc() throws IOException, InterruptedException {
    MultiRFileOutputFormatter.setRFileLimits(conf, 1, 0);
    MultiRFileOutputFormatter.setGenerateMapFilePerShardLocation(conf, true);
    RecordWriter<BulkIngestKey,Value> writer = createWriter(formatter, conf);
    assertFileNameForShardIndex(0);
    writeShardPairs(writer, 2);
    assertNumFileNames(5);
    assertFileNameForShardIndex(0);
    assertFileNameForShard(1, "server1", 1);
    assertFileNameForShard(2, "server2", 1);
    assertFileNameForShard(3, "server1", 2);
    assertFileNameForShard(4, "server2", 2);
}
 
Example 29
Source Project: hadoop   Source File: TestMRCJCFileOutputCommitter.java    License: Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
public void testAbort() throws IOException, InterruptedException {
  Job job = Job.getInstance();
  FileOutputFormat.setOutputPath(job, outDir);
  Configuration conf = job.getConfiguration();
  conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt);
  JobContext jContext = new JobContextImpl(conf, taskID.getJobID());
  TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, taskID);
  FileOutputCommitter committer = new FileOutputCommitter(outDir, tContext);

  // do setup
  committer.setupJob(jContext);
  committer.setupTask(tContext);

  // write output
  TextOutputFormat theOutputFormat = new TextOutputFormat();
  RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(tContext);
  writeOutput(theRecordWriter, tContext);

  // do abort
  committer.abortTask(tContext);
  File expectedFile = new File(new Path(committer.getWorkPath(), partFile)
      .toString());
  assertFalse("task temp dir still exists", expectedFile.exists());

  committer.abortJob(jContext, JobStatus.State.FAILED);
  expectedFile = new File(new Path(outDir, FileOutputCommitter.PENDING_DIR_NAME)
      .toString());
  assertFalse("job temp dir still exists", expectedFile.exists());
  assertEquals("Output directory not empty", 0, new File(outDir.toString())
      .listFiles().length);
  FileUtil.fullyDelete(new File(outDir.toString()));
}
 
Example 30
Source Project: big-c   Source File: TestRecovery.java    License: Apache License 2.0 5 votes vote down vote up
private void writeOutput(TaskAttempt attempt, Configuration conf)
  throws Exception {
  TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, 
      TypeConverter.fromYarn(attempt.getID()));
  
  TextOutputFormat<?, ?> theOutputFormat = new TextOutputFormat();
  RecordWriter theRecordWriter = theOutputFormat
      .getRecordWriter(tContext);
  
  NullWritable nullWritable = NullWritable.get();
  try {
    theRecordWriter.write(key1, val1);
    theRecordWriter.write(null, nullWritable);
    theRecordWriter.write(null, val1);
    theRecordWriter.write(nullWritable, val2);
    theRecordWriter.write(key2, nullWritable);
    theRecordWriter.write(key1, null);
    theRecordWriter.write(null, null);
    theRecordWriter.write(key2, val2);
  } finally {
    theRecordWriter.close(tContext);
  }
  
  OutputFormat outputFormat = ReflectionUtils.newInstance(
      tContext.getOutputFormatClass(), conf);
  OutputCommitter committer = outputFormat.getOutputCommitter(tContext);
  committer.commitTask(tContext);
}