Java Code Examples for org.apache.hadoop.io.DataInputBuffer

The following examples show how to use org.apache.hadoop.io.DataInputBuffer. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: hbase   Source File: TestSerialization.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Test RegionInfo serialization
 * @throws Exception
 */
@Test
public void testRegionInfo() throws Exception {
  RegionInfo hri = createRandomRegion("testRegionInfo");

  // test toByteArray()
  byte[] hrib = RegionInfo.toByteArray(hri);
  RegionInfo deserializedHri = RegionInfo.parseFrom(hrib);
  assertEquals(hri.getEncodedName(), deserializedHri.getEncodedName());
  assertEquals(hri, deserializedHri);

  // test toDelimitedByteArray()
  hrib = RegionInfo.toDelimitedByteArray(hri);
  DataInputBuffer buf = new DataInputBuffer();
  try {
    buf.reset(hrib, hrib.length);
    deserializedHri = RegionInfo.parseFrom(buf);
    assertEquals(hri.getEncodedName(), deserializedHri.getEncodedName());
    assertEquals(hri, deserializedHri);
  } finally {
    buf.close();
  }
}
 
Example 2
Source Project: big-c   Source File: IFile.java    License: Apache License 2.0 6 votes vote down vote up
public void nextRawValue(DataInputBuffer value) throws IOException {
  final byte[] valBytes = (value.getData().length < currentValueLength)
    ? new byte[currentValueLength << 1]
    : value.getData();
  int i = readData(valBytes, 0, currentValueLength);
  if (i != currentValueLength) {
    throw new IOException ("Asked for " + currentValueLength + " Got: " + i);
  }
  value.reset(valBytes, currentValueLength);
  
  // Record the bytes read
  bytesRead += currentValueLength;

  ++recNo;
  ++numRecordsRead;
}
 
Example 3
Source Project: incubator-tez   Source File: InMemoryReader.java    License: Apache License 2.0 6 votes vote down vote up
public void nextRawValue(DataInputBuffer value) throws IOException {
  try {
    int pos = memDataIn.getPosition();
    byte[] data = memDataIn.getData();
    value.reset(data, pos, currentValueLength);

    // Position for the next record
    long skipped = memDataIn.skip(currentValueLength);
    if (skipped != currentValueLength) {
      throw new IOException("Rec# " + recNo +
          ": Failed to skip past value of length: " +
          currentValueLength);
    }
    // Record the byte
    bytesRead += currentValueLength;
    ++recNo;
  } catch (IOException ioe) {
    dumpOnError();
    throw ioe;
  }
}
 
Example 4
Source Project: hadoop   Source File: TestPBRecordImpl.java    License: Apache License 2.0 6 votes vote down vote up
@Test(timeout=10000)
public void testLocalResourceStatusSerDe() throws Exception {
  LocalResourceStatus rsrcS = createLocalResourceStatus();
  assertTrue(rsrcS instanceof LocalResourceStatusPBImpl);
  LocalResourceStatusPBImpl rsrcPb = (LocalResourceStatusPBImpl) rsrcS;
  DataOutputBuffer out = new DataOutputBuffer();
  rsrcPb.getProto().writeDelimitedTo(out);
  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), 0, out.getLength());
  LocalResourceStatusProto rsrcPbD =
    LocalResourceStatusProto.parseDelimitedFrom(in);
  assertNotNull(rsrcPbD);
  LocalResourceStatus rsrcD =
    new LocalResourceStatusPBImpl(rsrcPbD);

  assertEquals(rsrcS, rsrcD);
  assertEquals(createResource(), rsrcS.getResource());
  assertEquals(createResource(), rsrcD.getResource());
}
 
Example 5
Source Project: hadoop   Source File: TestPBRecordImpl.java    License: Apache License 2.0 6 votes vote down vote up
@Test(timeout=10000)
public void testLocalizerStatusSerDe() throws Exception {
  LocalizerStatus rsrcS = createLocalizerStatus();
  assertTrue(rsrcS instanceof LocalizerStatusPBImpl);
  LocalizerStatusPBImpl rsrcPb = (LocalizerStatusPBImpl) rsrcS;
  DataOutputBuffer out = new DataOutputBuffer();
  rsrcPb.getProto().writeDelimitedTo(out);
  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), 0, out.getLength());
  LocalizerStatusProto rsrcPbD =
    LocalizerStatusProto.parseDelimitedFrom(in);
  assertNotNull(rsrcPbD);
  LocalizerStatus rsrcD =
    new LocalizerStatusPBImpl(rsrcPbD);

  assertEquals(rsrcS, rsrcD);
  assertEquals("localizer0", rsrcS.getLocalizerId());
  assertEquals("localizer0", rsrcD.getLocalizerId());
  assertEquals(createLocalResourceStatus(), rsrcS.getResourceStatus(0));
  assertEquals(createLocalResourceStatus(), rsrcD.getResourceStatus(0));
}
 
Example 6
Source Project: incubator-retired-blur   Source File: QueryWritableTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testTermQuery() throws IOException {
  TermQuery query = new TermQuery(new Term("field", "value"));
  QueryWritable queryWritable = new QueryWritable();
  queryWritable.setQuery(query);
  DataOutputBuffer out = new DataOutputBuffer();
  queryWritable.write(out);
  byte[] data = out.getData();
  int length = out.getLength();

  DataInputBuffer in = new DataInputBuffer();
  in.reset(data, length);

  QueryWritable newQueryWritable = new QueryWritable();
  newQueryWritable.readFields(in);

  Query termQuery = newQueryWritable.getQuery();

  assertEquals(query, termQuery);

}
 
Example 7
Source Project: hadoop   Source File: TestWritableJobConf.java    License: Apache License 2.0 6 votes vote down vote up
private <K> K serDeser(K conf) throws Exception {
  SerializationFactory factory = new SerializationFactory(CONF);
  Serializer<K> serializer =
    factory.getSerializer(GenericsUtil.getClass(conf));
  Deserializer<K> deserializer =
    factory.getDeserializer(GenericsUtil.getClass(conf));

  DataOutputBuffer out = new DataOutputBuffer();
  serializer.open(out);
  serializer.serialize(conf);
  serializer.close();

  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), out.getLength());
  deserializer.open(in);
  K after = deserializer.deserialize(null);
  deserializer.close();
  return after;
}
 
Example 8
Source Project: big-c   Source File: SerializationTestUtil.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * A utility that tests serialization/deserialization. 
 * @param conf configuration to use, "io.serializations" is read to 
 * determine the serialization
 * @param <K> the class of the item
 * @param before item to (de)serialize
 * @return deserialized item
 */
public static <K> K testSerialization(Configuration conf, K before)
	throws Exception {

  SerializationFactory factory = new SerializationFactory(conf);
  Serializer<K> serializer 
    = factory.getSerializer(GenericsUtil.getClass(before));
  Deserializer<K> deserializer 
    = factory.getDeserializer(GenericsUtil.getClass(before));

  DataOutputBuffer out = new DataOutputBuffer();
  serializer.open(out);
  serializer.serialize(before);
  serializer.close();

  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), out.getLength());
  deserializer.open(in);
  K after = deserializer.deserialize(null);
  deserializer.close();
  return after;
}
 
Example 9
Source Project: hadoop   Source File: IFile.java    License: Apache License 2.0 6 votes vote down vote up
public void append(DataInputBuffer key, DataInputBuffer value)
throws IOException {
  int keyLength = key.getLength() - key.getPosition();
  if (keyLength < 0) {
    throw new IOException("Negative key-length not allowed: " + keyLength + 
                          " for " + key);
  }
  
  int valueLength = value.getLength() - value.getPosition();
  if (valueLength < 0) {
    throw new IOException("Negative value-length not allowed: " + 
                          valueLength + " for " + value);
  }

  WritableUtils.writeVInt(out, keyLength);
  WritableUtils.writeVInt(out, valueLength);
  out.write(key.getData(), key.getPosition(), keyLength); 
  out.write(value.getData(), value.getPosition(), valueLength); 

  // Update bytes written
  decompressedBytesWritten += keyLength + valueLength + 
                  WritableUtils.getVIntSize(keyLength) + 
                  WritableUtils.getVIntSize(valueLength);
  ++numRecordsWritten;
}
 
Example 10
Source Project: spork   Source File: BinInterSedes.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
private int compareBinInterSedesBag(ByteBuffer bb1, ByteBuffer bb2, byte dt1, byte dt2) throws IOException {
    int s1 = bb1.position();
    int s2 = bb2.position();
    int l1 = bb1.remaining();
    int l2 = bb2.remaining();
    // first compare sizes
    int bsz1 = readSize(bb1, dt1);
    int bsz2 = readSize(bb2, dt2);
    if (bsz1 > bsz2)
        return 1;
    else if (bsz1 < bsz2)
        return -1;
    else {
        DataInputBuffer buffer1 = new DataInputBuffer();
        DataInputBuffer buffer2 = new DataInputBuffer();
        buffer1.reset(bb1.array(), s1, l1);
        buffer2.reset(bb2.array(), s2, l2);
        DataBag bag1 = (DataBag) mSedes.readDatum(buffer1, dt1);
        DataBag bag2 = (DataBag) mSedes.readDatum(buffer2, dt2);
        bb1.position(buffer1.getPosition());
        bb2.position(buffer2.getPosition());
        return bag1.compareTo(bag2);
    }
}
 
Example 11
Source Project: incubator-tez   Source File: ShuffledUnorderedKVReader.java    License: Apache License 2.0 6 votes vote down vote up
public ShuffledUnorderedKVReader(ShuffleManager shuffleManager, Configuration conf,
    CompressionCodec codec, boolean ifileReadAhead, int ifileReadAheadLength, int ifileBufferSize,
    TezCounter inputRecordCounter)
    throws IOException {
  this.shuffleManager = shuffleManager;

  this.codec = codec;
  this.ifileReadAhead = ifileReadAhead;
  this.ifileReadAheadLength = ifileReadAheadLength;
  this.ifileBufferSize = ifileBufferSize;
  this.inputRecordCounter = inputRecordCounter;

  this.keyClass = ConfigUtils.getIntermediateInputKeyClass(conf);
  this.valClass = ConfigUtils.getIntermediateInputValueClass(conf);

  this.keyIn = new DataInputBuffer();
  this.valIn = new DataInputBuffer();

  SerializationFactory serializationFactory = new SerializationFactory(conf);

  this.keyDeserializer = serializationFactory.getDeserializer(keyClass);
  this.keyDeserializer.open(keyIn);
  this.valDeserializer = serializationFactory.getDeserializer(valClass);
  this.valDeserializer.open(valIn);
}
 
Example 12
Source Project: RDFS   Source File: ReducePartition.java    License: Apache License 2.0 6 votes vote down vote up
public boolean next(DataInputBuffer key, DataInputBuffer value)
    throws IOException {
  MemoryBlockIndex memBlkIdx = keyValueIterator.next();
  if (memBlkIdx != null) {
    int pos = memBlkIdx.getIndex();
    MemoryBlock memBlk = memBlkIdx.getMemoryBlock();
    int offset = memBlk.offsets[pos];
    int keyLen = memBlk.keyLenArray[pos];
    int valLen = memBlk.valueLenArray[pos];
    dataOutputBuffer.reset();
    dataOutputBuffer.writeInt(keyLen);
    dataOutputBuffer.write(kvbuffer, offset, keyLen);
    dataOutputBuffer.writeInt(valLen);
    dataOutputBuffer.write(kvbuffer, offset + keyLen, valLen);
    key.reset(dataOutputBuffer.getData(), 0, keyLen
        + WritableUtils.INT_LENGTH_BYTES);
    value.reset(dataOutputBuffer.getData(), keyLen
        + WritableUtils.INT_LENGTH_BYTES, valLen
        + WritableUtils.INT_LENGTH_BYTES);
    return true;
  }
  return false;
}
 
Example 13
Source Project: tez   Source File: TestIFile.java    License: Apache License 2.0 6 votes vote down vote up
@Test(timeout = 5000)
//Test appendValue with DataInputBuffer
public void testAppendValueWithDataInputBuffer() throws IOException {
  List<KVPair> data = KVDataGen.generateTestData(false, rnd.nextInt(100));
  IFile.Writer writer = new IFile.Writer(defaultConf, localFs, outputPath,
      Text.class, IntWritable.class, codec, null, null);

  final DataInputBuffer previousKey = new DataInputBuffer();
  DataInputBuffer key = new DataInputBuffer();
  DataInputBuffer value = new DataInputBuffer();
  for (KVPair kvp : data) {
    populateData(kvp, key, value);

    if ((previousKey != null && BufferUtils.compare(key, previousKey) == 0)) {
      writer.appendValue(value);
    } else {
      writer.append(key, value);
    }
    previousKey.reset(k.getData(), 0, k.getLength());
  }

  writer.close();

  readAndVerifyData(writer.getRawLength(), writer.getCompressedLength(), data, codec);
}
 
Example 14
Source Project: hadoop   Source File: TestJspHelper.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testReadWriteReplicaState() {
  try {
    DataOutputBuffer out = new DataOutputBuffer();
    DataInputBuffer in = new DataInputBuffer();
    for (HdfsServerConstants.ReplicaState repState : HdfsServerConstants.ReplicaState
        .values()) {
      repState.write(out);
      in.reset(out.getData(), out.getLength());
      HdfsServerConstants.ReplicaState result = HdfsServerConstants.ReplicaState
          .read(in);
      assertTrue("testReadWrite error !!!", repState == result);
      out.reset();
      in.reset();
    }
  } catch (Exception ex) {
    fail("testReadWrite ex error ReplicaState");
  }
}
 
Example 15
Source Project: big-c   Source File: TestWritableSerialization.java    License: Apache License 2.0 6 votes vote down vote up
@Test
@SuppressWarnings({"rawtypes", "unchecked"})
public void testWritableComparatorJavaSerialization() throws Exception {
  Serialization ser = new JavaSerialization();

  Serializer<TestWC> serializer = ser.getSerializer(TestWC.class);
  DataOutputBuffer dob = new DataOutputBuffer();
  serializer.open(dob);
  TestWC orig = new TestWC(0);
  serializer.serialize(orig);
  serializer.close();

  Deserializer<TestWC> deserializer = ser.getDeserializer(TestWC.class);
  DataInputBuffer dib = new DataInputBuffer();
  dib.reset(dob.getData(), 0, dob.getLength());
  deserializer.open(dib);
  TestWC deser = deserializer.deserialize(null);
  deserializer.close();
  assertEquals(orig, deser);
}
 
Example 16
Source Project: hadoop-gpu   Source File: TestWritableJobConf.java    License: Apache License 2.0 6 votes vote down vote up
private <K> K serDeser(K conf) throws Exception {
  SerializationFactory factory = new SerializationFactory(CONF);
  Serializer<K> serializer =
    factory.getSerializer(GenericsUtil.getClass(conf));
  Deserializer<K> deserializer =
    factory.getDeserializer(GenericsUtil.getClass(conf));

  DataOutputBuffer out = new DataOutputBuffer();
  serializer.open(out);
  serializer.serialize(conf);
  serializer.close();

  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), out.getLength());
  deserializer.open(in);
  K after = deserializer.deserialize(null);
  deserializer.close();
  return after;
}
 
Example 17
Source Project: hadoop   Source File: TFile.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Constructor
 * 
 * @param reader
 *          The TFile reader object.
 * @param begin
 *          Begin location of the scan.
 * @param end
 *          End location of the scan.
 * @throws IOException
 */
Scanner(Reader reader, Location begin, Location end) throws IOException {
  this.reader = reader;
  // ensure the TFile index is loaded throughout the life of scanner.
  reader.checkTFileDataIndex();
  beginLocation = begin;
  endLocation = end;

  valTransferBuffer = new BytesWritable();
  // TODO: remember the longest key in a TFile, and use it to replace
  // MAX_KEY_SIZE.
  keyBuffer = new byte[MAX_KEY_SIZE];
  keyDataInputStream = new DataInputBuffer();
  valueBufferInputStream = new ChunkDecoder();
  valueDataInputStream = new DataInputStream(valueBufferInputStream);

  if (beginLocation.compareTo(endLocation) >= 0) {
    currentLocation = new Location(endLocation);
  } else {
    currentLocation = new Location(0, 0);
    initBlock(beginLocation.getBlockIndex());
    inBlockAdvance(beginLocation.getRecordIndex());
  }
}
 
Example 18
Source Project: hadoop   Source File: TestDelegationToken.java    License: Apache License 2.0 6 votes vote down vote up
private boolean testDelegationTokenIdentiferSerializationRoundTrip(Text owner,
    Text renewer, Text realUser) throws IOException {
  TestDelegationTokenIdentifier dtid = new TestDelegationTokenIdentifier(
      owner, renewer, realUser);
  DataOutputBuffer out = new DataOutputBuffer();
  dtid.writeImpl(out);
  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), out.getLength());
  try {
    TestDelegationTokenIdentifier dtid2 =
        new TestDelegationTokenIdentifier();
    dtid2.readFields(in);
    assertTrue(dtid.equals(dtid2));
    return true;
  } catch(IOException e){
    return false;
  }
}
 
Example 19
Source Project: big-c   Source File: TestPBRecordImpl.java    License: Apache License 2.0 6 votes vote down vote up
@Test(timeout=10000)
public void testLocalResourceStatusSerDe() throws Exception {
  LocalResourceStatus rsrcS = createLocalResourceStatus();
  assertTrue(rsrcS instanceof LocalResourceStatusPBImpl);
  LocalResourceStatusPBImpl rsrcPb = (LocalResourceStatusPBImpl) rsrcS;
  DataOutputBuffer out = new DataOutputBuffer();
  rsrcPb.getProto().writeDelimitedTo(out);
  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), 0, out.getLength());
  LocalResourceStatusProto rsrcPbD =
    LocalResourceStatusProto.parseDelimitedFrom(in);
  assertNotNull(rsrcPbD);
  LocalResourceStatus rsrcD =
    new LocalResourceStatusPBImpl(rsrcPbD);

  assertEquals(rsrcS, rsrcD);
  assertEquals(createResource(), rsrcS.getResource());
  assertEquals(createResource(), rsrcD.getResource());
}
 
Example 20
Source Project: tez   Source File: ValuesIterator.java    License: Apache License 2.0 6 votes vote down vote up
/** 
 * read the next key - which may be the same as the current key.
 */
private void readNextKey() throws IOException {
  more = in.next();
  if (more) {      
    DataInputBuffer nextKeyBytes = in.getKey();
    if (!in.isSameKey()) {
      keyIn.reset(nextKeyBytes.getData(), nextKeyBytes.getPosition(),
          nextKeyBytes.getLength() - nextKeyBytes.getPosition());
      nextKey = keyDeserializer.deserialize(nextKey);
      // hasMoreValues = is it first key or is key the same?
      hasMoreValues = (key == null) || (comparator.compare(key, nextKey) == 0);
      if (key == null || false == hasMoreValues) {
        // invariant: more=true & there are no more values in an existing key group
        // so this indicates start of new key group
        if(inputKeyCounter != null) {
          inputKeyCounter.increment(1);
        }
        ++keyCtr;
      }
    } else {
      hasMoreValues = in.isSameKey();
    }
  } else {
    hasMoreValues = false;
  }
}
 
Example 21
Source Project: hadoop-gpu   Source File: TestWritableSerialization.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * A utility that tests serialization/deserialization. 
 * @param <K> the class of the item
 * @param conf configuration to use, "io.serializations" is read to 
 * determine the serialization
 * @param before item to (de)serialize
 * @return deserialized item
 */
public static<K> K testSerialization(Configuration conf, K before) 
  throws Exception {
  
  SerializationFactory factory = new SerializationFactory(conf);
  Serializer<K> serializer 
    = factory.getSerializer(GenericsUtil.getClass(before));
  Deserializer<K> deserializer 
    = factory.getDeserializer(GenericsUtil.getClass(before));
 
  DataOutputBuffer out = new DataOutputBuffer();
  serializer.open(out);
  serializer.serialize(before);
  serializer.close();
  
  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), out.getLength());
  deserializer.open(in);
  K after = deserializer.deserialize(null);
  deserializer.close();
  
  assertEquals(before, after);
  return after;
}
 
Example 22
Source Project: hadoop   Source File: SerializationTestUtil.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * A utility that tests serialization/deserialization. 
 * @param conf configuration to use, "io.serializations" is read to 
 * determine the serialization
 * @param <K> the class of the item
 * @param before item to (de)serialize
 * @return deserialized item
 */
public static <K> K testSerialization(Configuration conf, K before)
	throws Exception {

  SerializationFactory factory = new SerializationFactory(conf);
  Serializer<K> serializer 
    = factory.getSerializer(GenericsUtil.getClass(before));
  Deserializer<K> deserializer 
    = factory.getDeserializer(GenericsUtil.getClass(before));

  DataOutputBuffer out = new DataOutputBuffer();
  serializer.open(out);
  serializer.serialize(before);
  serializer.close();

  DataInputBuffer in = new DataInputBuffer();
  in.reset(out.getData(), out.getLength());
  deserializer.open(in);
  K after = deserializer.deserialize(null);
  deserializer.close();
  return after;
}
 
Example 23
Source Project: gemfirexd-oss   Source File: SequenceFile.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void writeUncompressedBytes(DataOutputStream outStream)
  throws IOException {
  if (decompressedStream == null) {
    rawData = new DataInputBuffer();
    decompressedStream = codec.createInputStream(rawData);
  } else {
    decompressedStream.resetState();
  }
  rawData.reset(data, 0, dataSize);

  byte[] buffer = new byte[8192];
  int bytesRead = 0;
  while ((bytesRead = decompressedStream.read(buffer, 0, 8192)) != -1) {
    outStream.write(buffer, 0, bytesRead);
  }
}
 
Example 24
private void validate(KeyValue kv, byte[] row, byte[] family, byte[] qualifier, long ts,
    Type type, byte[] value) throws IOException {
  DataOutputBuffer out = new DataOutputBuffer();
  kv.write(out);
  out.close();
  byte[] data = out.getData();
  // read it back in
  KeyValue read = new KeyValue();
  DataInputBuffer in = new DataInputBuffer();
  in.reset(data, data.length);
  read.readFields(in);
  in.close();

  // validate that its the same
  assertTrue("Row didn't match!", Bytes.equals(row, read.getRow()));
  assertTrue("Family didn't match!", Bytes.equals(family, read.getFamily()));
  assertTrue("Qualifier didn't match!", Bytes.equals(qualifier, read.getQualifier()));
  assertTrue("Value didn't match!", Bytes.equals(value, read.getValue()));
  assertEquals("Timestamp didn't match", ts, read.getTimestamp());
  assertEquals("Type didn't match", type.getCode(), read.getType());
}
 
Example 25
Source Project: hadoop-gpu   Source File: TFile.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Constructor
 * 
 * @param reader
 *          The TFile reader object.
 * @param begin
 *          Begin location of the scan.
 * @param end
 *          End location of the scan.
 * @throws IOException
 */
Scanner(Reader reader, Location begin, Location end) throws IOException {
  this.reader = reader;
  // ensure the TFile index is loaded throughout the life of scanner.
  reader.checkTFileDataIndex();
  beginLocation = begin;
  endLocation = end;

  valTransferBuffer = new BytesWritable();
  // TODO: remember the longest key in a TFile, and use it to replace
  // MAX_KEY_SIZE.
  keyBuffer = new byte[MAX_KEY_SIZE];
  keyDataInputStream = new DataInputBuffer();
  valueBufferInputStream = new ChunkDecoder();
  valueDataInputStream = new DataInputStream(valueBufferInputStream);

  if (beginLocation.compareTo(endLocation) >= 0) {
    currentLocation = new Location(endLocation);
  } else {
    currentLocation = new Location(0, 0);
    initBlock(beginLocation.getBlockIndex());
    inBlockAdvance(beginLocation.getRecordIndex());
  }
}
 
Example 26
Source Project: RDFS   Source File: FreightStreamer.java    License: Apache License 2.0 5 votes vote down vote up
public TextRecordInputStream(FileStatus f) throws IOException {
  r = new SequenceFile.Reader(fs, f.getPath(), getConf());
  key = ReflectionUtils.newInstance(r.getKeyClass().asSubclass(WritableComparable.class),
                                    getConf());
  val = ReflectionUtils.newInstance(r.getValueClass().asSubclass(Writable.class),
                                    getConf());
  inbuf = new DataInputBuffer();
  outbuf = new DataOutputBuffer();
}
 
Example 27
Source Project: big-c   Source File: BackupStore.java    License: Apache License 2.0 5 votes vote down vote up
boolean reserveSpace(DataInputBuffer key, DataInputBuffer value)
throws IOException {
  int keyLength = key.getLength() - key.getPosition();
  int valueLength = value.getLength() - value.getPosition();

  int requestedSize = keyLength + valueLength + 
    WritableUtils.getVIntSize(keyLength) +
    WritableUtils.getVIntSize(valueLength);
  return reserveSpace(requestedSize);
}
 
Example 28
Source Project: big-c   Source File: TestViewFsFileStatusHdfs.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testFileStatusSerialziation()
    throws IOException, URISyntaxException {
 long len = fileSystemTestHelper.createFile(fHdfs, testfilename);
  FileStatus stat = vfs.getFileStatus(new Path(testfilename));
  assertEquals(len, stat.getLen());
  // check serialization/deserialization
  DataOutputBuffer dob = new DataOutputBuffer();
  stat.write(dob);
  DataInputBuffer dib = new DataInputBuffer();
  dib.reset(dob.getData(), 0, dob.getLength());
  FileStatus deSer = new FileStatus();
  deSer.readFields(dib);
  assertEquals(len, deSer.getLen());
}
 
Example 29
Source Project: incubator-tez   Source File: TezMerger.java    License: Apache License 2.0 5 votes vote down vote up
protected boolean lessThan(Object a, Object b) {
  DataInputBuffer key1 = ((Segment)a).getKey();
  DataInputBuffer key2 = ((Segment)b).getKey();
  int s1 = key1.getPosition();
  int l1 = key1.getLength() - s1;
  int s2 = key2.getPosition();
  int l2 = key2.getLength() - s2;

  return comparator.compare(key1.getData(), s1, l1, key2.getData(), s2, l2) < 0;
}
 
Example 30
Source Project: tez   Source File: TezCommonUtils.java    License: Apache License 2.0 5 votes vote down vote up
public static Credentials parseCredentialsBytes(byte[] credentialsBytes) throws IOException {
  Credentials credentials = new Credentials();
  DataInputBuffer dib = new DataInputBuffer();
  try {
    byte[] tokenBytes = credentialsBytes;
    dib.reset(tokenBytes, tokenBytes.length);
    credentials.readTokenStorageStream(dib);
    return credentials;
  } finally {
    dib.close();
  }
}