Java Code Examples for org.apache.hadoop.io.DataOutputBuffer#close()

The following examples show how to use org.apache.hadoop.io.DataOutputBuffer#close() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SequenceFile.java    From gemfirexd-oss with Apache License 2.0 6 votes vote down vote up
/** Read a compressed buffer */
private synchronized void readBuffer(DataInputBuffer buffer, 
                                     CompressionInputStream filter) throws IOException {
  // Read data into a temporary buffer
  DataOutputBuffer dataBuffer = new DataOutputBuffer();

  try {
    int dataBufferLength = WritableUtils.readVInt(in);
    dataBuffer.write(in, dataBufferLength);
  
    // Set up 'buffer' connected to the input-stream
    buffer.reset(dataBuffer.getData(), 0, dataBuffer.getLength());
  } finally {
    dataBuffer.close();
  }

  // Reset the codec
  filter.resetState();
}
 
Example 2
Source File: SequenceFile.java    From gemfirexd-oss with Apache License 2.0 6 votes vote down vote up
/** Read a compressed buffer */
private synchronized void readBuffer(DataInputBuffer buffer, 
                                     CompressionInputStream filter) throws IOException {
  // Read data into a temporary buffer
  DataOutputBuffer dataBuffer = new DataOutputBuffer();

  try {
    int dataBufferLength = WritableUtils.readVInt(in);
    dataBuffer.write(in, dataBufferLength);
  
    // Set up 'buffer' connected to the input-stream
    buffer.reset(dataBuffer.getData(), 0, dataBuffer.getLength());
  } finally {
    dataBuffer.close();
  }

  // Reset the codec
  filter.resetState();
}
 
Example 3
Source File: TestClientKeyValueLocal.java    From phoenix with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
private void validate(KeyValue kv, byte[] row, byte[] family, byte[] qualifier, long ts,
    Type type, byte[] value) throws IOException {
  DataOutputBuffer out = new DataOutputBuffer();
  kv.write(out);
  out.close();
  byte[] data = out.getData();
  // read it back in
  KeyValue read = new KeyValue();
  DataInputBuffer in = new DataInputBuffer();
  in.reset(data, data.length);
  read.readFields(in);
  in.close();

  // validate that its the same
  assertTrue("Row didn't match!", Bytes.equals(row, read.getRow()));
  assertTrue("Family didn't match!", Bytes.equals(family, read.getFamily()));
  assertTrue("Qualifier didn't match!", Bytes.equals(qualifier, read.getQualifier()));
  assertTrue("Value didn't match!", Bytes.equals(value, read.getValue()));
  assertEquals("Timestamp didn't match", ts, read.getTimestamp());
  assertEquals("Type didn't match", type.getCode(), read.getType());
}
 
Example 4
Source File: HFilesystemAdmin.java    From spliceengine with GNU Affero General Public License v3.0 6 votes vote down vote up
private static byte[] toByteArray(Writable... writables) {
    final DataOutputBuffer out = new DataOutputBuffer();
    try {
        for(Writable w : writables) {
            w.write(out);
        }
        out.close();
    } catch (IOException e) {
        throw new RuntimeException("Fail to convert writables to a byte array",e);
    }
    byte[] bytes = out.getData();
    if (bytes.length == out.getLength()) {
        return bytes;
    }
    byte[] result = new byte[out.getLength()];
    System.arraycopy(bytes, 0, result, 0, out.getLength());
    return result;
}
 
Example 5
Source File: TestEditsDoubleBuffer.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@Test
public void testDoubleBuffer() throws IOException {
  EditsDoubleBuffer buf = new EditsDoubleBuffer(1024);
  
  assertTrue(buf.isFlushed());
  byte[] data = new byte[100];
  buf.writeRaw(data, 0, data.length);
  assertEquals("Should count new data correctly",
      data.length, buf.countBufferedBytes());

  assertTrue("Writing to current buffer should not affect flush state",
      buf.isFlushed());

  // Swap the buffers
  buf.setReadyToFlush();
  assertEquals("Swapping buffers should still count buffered bytes",
      data.length, buf.countBufferedBytes());
  assertFalse(buf.isFlushed());
 
  // Flush to a stream
  DataOutputBuffer outBuf = new DataOutputBuffer();
  buf.flushTo(outBuf);
  assertEquals(data.length, outBuf.getLength());
  assertTrue(buf.isFlushed());
  assertEquals(0, buf.countBufferedBytes());
  
  // Write some more
  buf.writeRaw(data, 0, data.length);
  assertEquals("Should count new data correctly",
      data.length, buf.countBufferedBytes());
  buf.setReadyToFlush();
  buf.flushTo(outBuf);
  
  assertEquals(data.length * 2, outBuf.getLength());
  
  assertEquals(0, buf.countBufferedBytes());

  outBuf.close();
}
 
Example 6
Source File: TestEditsDoubleBuffer.java    From big-c with Apache License 2.0 5 votes vote down vote up
@Test
public void testDoubleBuffer() throws IOException {
  EditsDoubleBuffer buf = new EditsDoubleBuffer(1024);
  
  assertTrue(buf.isFlushed());
  byte[] data = new byte[100];
  buf.writeRaw(data, 0, data.length);
  assertEquals("Should count new data correctly",
      data.length, buf.countBufferedBytes());

  assertTrue("Writing to current buffer should not affect flush state",
      buf.isFlushed());

  // Swap the buffers
  buf.setReadyToFlush();
  assertEquals("Swapping buffers should still count buffered bytes",
      data.length, buf.countBufferedBytes());
  assertFalse(buf.isFlushed());
 
  // Flush to a stream
  DataOutputBuffer outBuf = new DataOutputBuffer();
  buf.flushTo(outBuf);
  assertEquals(data.length, outBuf.getLength());
  assertTrue(buf.isFlushed());
  assertEquals(0, buf.countBufferedBytes());
  
  // Write some more
  buf.writeRaw(data, 0, data.length);
  assertEquals("Should count new data correctly",
      data.length, buf.countBufferedBytes());
  buf.setReadyToFlush();
  buf.flushTo(outBuf);
  
  assertEquals(data.length * 2, outBuf.getLength());
  
  assertEquals(0, buf.countBufferedBytes());

  outBuf.close();
}
 
Example 7
Source File: AvatarNode.java    From RDFS with Apache License 2.0 5 votes vote down vote up
/**
 * Create an empty edits log
 */
static void createEditsFile(String editDir) throws IOException {
  File editfile = new File(editDir + EDITSFILE);
  FileOutputStream fp = new FileOutputStream(editfile);
  DataOutputBuffer buf = new DataOutputBuffer(1024);
  buf.writeInt(FSConstants.LAYOUT_VERSION);
  buf.writeTo(fp);
  buf.close();
  fp.close();
}
 
Example 8
Source File: TestShuffleInputEventHandlerImpl.java    From tez with Apache License 2.0 5 votes vote down vote up
private InputContext createInputContext() throws IOException {
  DataOutputBuffer port_dob = new DataOutputBuffer();
  port_dob.writeInt(PORT);
  final ByteBuffer shuffleMetaData = ByteBuffer.wrap(port_dob.getData(), 0, port_dob.getLength());
  port_dob.close();

  ExecutionContext executionContext = mock(ExecutionContext.class);
  doReturn(HOST).when(executionContext).getHostName();

  InputContext inputContext = mock(InputContext.class);
  doReturn(new TezCounters()).when(inputContext).getCounters();
  doReturn("sourceVertex").when(inputContext).getSourceVertexName();
  doReturn(shuffleMetaData).when(inputContext)
      .getServiceProviderMetaData(conf.get(TezConfiguration.TEZ_AM_SHUFFLE_AUXILIARY_SERVICE_ID,
          TezConfiguration.TEZ_AM_SHUFFLE_AUXILIARY_SERVICE_ID_DEFAULT));
  doReturn(executionContext).when(inputContext).getExecutionContext();
  when(inputContext.createTezFrameworkExecutorService(anyInt(), anyString())).thenAnswer(
      new Answer<ExecutorService>() {
        @Override
        public ExecutorService answer(InvocationOnMock invocation) throws Throwable {
          return sharedExecutor.createExecutorService(
              invocation.getArgumentAt(0, Integer.class),
              invocation.getArgumentAt(1, String.class));
        }
      });
  return inputContext;
}
 
Example 9
Source File: TestShuffleManager.java    From tez with Apache License 2.0 5 votes vote down vote up
private InputContext createInputContext() throws IOException {
  DataOutputBuffer port_dob = new DataOutputBuffer();
  port_dob.writeInt(PORT);
  final ByteBuffer shuffleMetaData = ByteBuffer.wrap(port_dob.getData(), 0,
      port_dob.getLength());
  port_dob.close();

  ExecutionContext executionContext = mock(ExecutionContext.class);
  doReturn(FETCHER_HOST).when(executionContext).getHostName();

  InputContext inputContext = mock(InputContext.class);
  doReturn(new TezCounters()).when(inputContext).getCounters();
  doReturn("sourceVertex").when(inputContext).getSourceVertexName();
  doReturn(shuffleMetaData).when(inputContext)
      .getServiceProviderMetaData(conf.get(TezConfiguration.TEZ_AM_SHUFFLE_AUXILIARY_SERVICE_ID,
          TezConfiguration.TEZ_AM_SHUFFLE_AUXILIARY_SERVICE_ID_DEFAULT));
  doReturn(executionContext).when(inputContext).getExecutionContext();
  when(inputContext.createTezFrameworkExecutorService(anyInt(), anyString())).thenAnswer(
      new Answer<ExecutorService>() {
        @Override
        public ExecutorService answer(InvocationOnMock invocation) throws Throwable {
          return sharedExecutor.createExecutorService(
              invocation.getArgumentAt(0, Integer.class),
              invocation.getArgumentAt(1, String.class));
        }
      });
  return inputContext;
}
 
Example 10
Source File: TestEntityDescriptor.java    From tez with Apache License 2.0 5 votes vote down vote up
public void testSingularWrite(InputDescriptor entityDescriptor, InputDescriptor deserialized, UserPayload payload,
                              String confVal) throws IOException {
  DataOutputBuffer out = new DataOutputBuffer();
  entityDescriptor.write(out);
  out.close();
  ByteArrayOutputStream bos = new ByteArrayOutputStream(out.getData().length);
  bos.write(out.getData());

  Mockito.verify(entityDescriptor).writeSingular(eq(out), any(ByteBuffer.class));
  deserialized.readFields(new DataInputStream(new ByteArrayInputStream(bos.toByteArray())));
  verifyResults(entityDescriptor, deserialized, payload, confVal);
}
 
Example 11
Source File: ProtoSerializer.java    From incubator-retired-blur with Apache License 2.0 3 votes vote down vote up
public static void main(String[] args) throws ParseException, IOException {

    QueryParser parser = new QueryParser(Version.LUCENE_40, "", new StandardAnalyzer(Version.LUCENE_40));

    Query query = parser.parse("a:v1 b:v2 c:v3~ c:asda*asda");
    
    SuperQuery superQuery = new SuperQuery(query,ScoreType.SUPER,new Term("_primedoc_"));

    QueryWritable queryWritable = new QueryWritable(superQuery);
    DataOutputBuffer buffer = new DataOutputBuffer();
    queryWritable.write(buffer);
    buffer.close();

    System.out.println(new String(buffer.getData(), 0, buffer.getLength()));

    QueryWritable qw = new QueryWritable();

    DataInputBuffer in = new DataInputBuffer();
    in.reset(buffer.getData(), 0, buffer.getLength());
    qw.readFields(in);

    System.out.println("------------");
    
    System.out.println(qw.getQuery());
    
    System.out.println("------------");

    while (true) {
      run(superQuery);
    }
  }