Java Code Examples for org.apache.hadoop.mapred.RecordWriter#close()

The following examples show how to use org.apache.hadoop.mapred.RecordWriter#close() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MneMapredChunkDataTest.java    From mnemonic with Apache License 2.0 6 votes vote down vote up
@Test(enabled = true)
public void testWriteChunkData() throws Exception {
  NullWritable nada = NullWritable.get();
  MneDurableOutputSession<DurableChunk<?>> sess =
          new MneDurableOutputSession<DurableChunk<?>>(m_tacontext, null,
                  MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX);
  MneDurableOutputValue<DurableChunk<?>> mdvalue =
          new MneDurableOutputValue<DurableChunk<?>>(sess);
  OutputFormat<NullWritable, MneDurableOutputValue<DurableChunk<?>>> outputFormat =
          new MneOutputFormat<MneDurableOutputValue<DurableChunk<?>>>();
  RecordWriter<NullWritable, MneDurableOutputValue<DurableChunk<?>>> writer =
          outputFormat.getRecordWriter(null, m_conf, null, null);
  DurableChunk<?> dchunk = null;
  Checksum cs = new CRC32();
  cs.reset();
  for (int i = 0; i < m_reccnt; ++i) {
    dchunk = genupdDurableChunk(sess, cs);
    Assert.assertNotNull(dchunk);
    writer.write(nada, mdvalue.of(dchunk));
  }
  m_checksum = cs.getValue();
  writer.close(null);
  sess.close();
}
 
Example 2
Source File: MneMapredLongDataTest.java    From mnemonic with Apache License 2.0 6 votes vote down vote up
@Test(enabled = true)
public void testWriteLongData() throws Exception {
  NullWritable nada = NullWritable.get();
  MneDurableOutputSession<Long> sess =
          new MneDurableOutputSession<Long>(m_tacontext, null,
                  MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX);
  MneDurableOutputValue<Long> mdvalue =
          new MneDurableOutputValue<Long>(sess);
  OutputFormat<NullWritable, MneDurableOutputValue<Long>> outputFormat =
          new MneOutputFormat<MneDurableOutputValue<Long>>();
  RecordWriter<NullWritable, MneDurableOutputValue<Long>> writer =
          outputFormat.getRecordWriter(null, m_conf, null, null);
  Long val = null;
  for (int i = 0; i < m_reccnt; ++i) {
    val = m_rand.nextLong();
    m_sum += val;
    writer.write(nada, mdvalue.of(val));
  }
  writer.close(null);
  sess.close();
}
 
Example 3
Source File: MneMapredPersonDataTest.java    From mnemonic with Apache License 2.0 6 votes vote down vote up
@Test(enabled = true)
public void testWritePersonData() throws Exception {
  NullWritable nada = NullWritable.get();
  MneDurableOutputSession<Person<Long>> sess =
          new MneDurableOutputSession<Person<Long>>(m_tacontext, null,
                  MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX);
  MneDurableOutputValue<Person<Long>> mdvalue =
          new MneDurableOutputValue<Person<Long>>(sess);
  OutputFormat<NullWritable, MneDurableOutputValue<Person<Long>>> outputFormat =
          new MneOutputFormat<MneDurableOutputValue<Person<Long>>>();
  RecordWriter<NullWritable, MneDurableOutputValue<Person<Long>>> writer =
          outputFormat.getRecordWriter(null, m_conf, null, null);
  Person<Long> person = null;
  for (int i = 0; i < m_reccnt; ++i) {
    person = sess.newDurableObjectRecord();
    person.setAge((short) m_rand.nextInt(50));
    person.setName(String.format("Name: [%s]", Utils.genRandomString()), true);
    m_sumage += person.getAge();
    writer.write(nada, mdvalue.of(person));
  }
  writer.close(null);
  sess.close();
}
 
Example 4
Source File: MneMapredBufferDataTest.java    From mnemonic with Apache License 2.0 6 votes vote down vote up
@Test(enabled = true)
public void testWriteBufferData() throws Exception {
  NullWritable nada = NullWritable.get();
  MneDurableOutputSession<DurableBuffer<?>> sess =
      new MneDurableOutputSession<DurableBuffer<?>>(null, m_conf,
          MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX);
  MneDurableOutputValue<DurableBuffer<?>> mdvalue =
      new MneDurableOutputValue<DurableBuffer<?>>(sess);
  OutputFormat<NullWritable, MneDurableOutputValue<DurableBuffer<?>>> outputFormat =
      new MneOutputFormat<MneDurableOutputValue<DurableBuffer<?>>>();
  RecordWriter<NullWritable, MneDurableOutputValue<DurableBuffer<?>>> writer =
      outputFormat.getRecordWriter(m_fs, m_conf, null, null);
  DurableBuffer<?> dbuf = null;
  Checksum cs = new CRC32();
  cs.reset();
  for (int i = 0; i < m_reccnt; ++i) {
    dbuf = genupdDurableBuffer(sess, cs);
    Assert.assertNotNull(dbuf);
    writer.write(nada, mdvalue.of(dbuf));
  }
  m_checksum = cs.getValue();
  writer.close(null);
  sess.close();
}
 
Example 5
Source File: TestTableOutputFormatConnectionExhaust.java    From hbase with Apache License 2.0 5 votes vote down vote up
/**
 * Open and close a TableOutputFormat.  The closing the RecordWriter should release HBase
 * Connection (ZK) resources, and will throw exception if they are exhausted.
 */
static void openCloseTableOutputFormat(int iter)  throws IOException {
  LOG.info("Instantiating TableOutputFormat connection  " + iter);
  JobConf conf = new JobConf();
  conf.addResource(UTIL.getConfiguration());
  conf.set(TableOutputFormat.OUTPUT_TABLE, TABLE);
  TableMapReduceUtil.initTableMapJob(TABLE, FAMILY, TableMap.class,
      ImmutableBytesWritable.class, ImmutableBytesWritable.class, conf);
  TableOutputFormat tof = new TableOutputFormat();
  RecordWriter rw = tof.getRecordWriter(null, conf, TABLE, null);
  rw.close(null);
}
 
Example 6
Source File: TestRecordReaderImpl.java    From hive-dwrf with Apache License 2.0 4 votes vote down vote up
@Test
public void testPartialReadingWithSeeks() throws IOException {
  // First we create a file of a small size and having two stripes.
  final String fileName = "file_with_long_col";
  final JobConf jobConf = new JobConf();
  jobConf.setLong(OrcConf.ConfVars.HIVE_ORC_STRIPE_SIZE.varname, 1024L);
  final Path filePath = new Path(this.stagingDir.toString(), fileName);
  final RecordWriter hiveRecordWriter = (RecordWriter) (new OrcOutputFormat().getHiveRecordWriter(
      jobConf, filePath, null, true, new Properties(), null));

  final OrcSerde orcSerde = new OrcSerde();
  ObjectInspector objectInspector = getObjectInspectorFor(
      ImmutableList.of("col1"),
      ImmutableList.of("bigint"));

  final Random rand = new Random();
  for (int i = 0; i < 1000; i++) {
    final List<Object> allColumns = new ArrayList<>();
    allColumns.add(rand.nextLong());
    Object obj = orcSerde.serialize(allColumns, objectInspector);
    hiveRecordWriter.write(NullWritable.get(), obj);
  }
  hiveRecordWriter.close(null);

  // Get all the stripes in the file written.
  final Configuration configuration = new Configuration();
  final FileSystem fileSystem = filePath.getFileSystem(configuration);
  final ReaderImpl readerImpl = new ReaderImpl(fileSystem, filePath, configuration);
  final ArrayList<StripeInformation> stripes = Lists.newArrayList(readerImpl.getStripes());
  assertTrue("Number of stripes produced should be >= 2", stripes.size() >= 2);

  // Read the file back and read with ReaderImpl over only the 2nd stripe.
  final boolean[] toRead = {true, true};
  final RecordReader recordReader = new ReaderImpl(fileSystem, filePath, configuration).rows(
      stripes.get(1).getOffset(), stripes.get(1).getDataLength(), toRead);
  OrcLazyRow row = null;
  final List<Long> longValuesForStripe = new ArrayList<>();
  while (recordReader.hasNext()) {
    row = (OrcLazyRow) recordReader.next(row);
    longValuesForStripe.add(row.getFieldValue(0).materializeLong());
  }

  // Seek to the beginning of the 2nd stripe and ensure that seeking works fine.
  recordReader.seekToRow(stripes.get(0).getNumberOfRows());
  assertEquals(recordReader.getRowNumber(), stripes.get(0).getNumberOfRows());
  row = (OrcLazyRow) recordReader.next(row);
  assertEquals((long) longValuesForStripe.get(0), row.getFieldValue(0).materializeLong());
}