org.apache.hadoop.io.file.tfile.TFile.Reader Java Examples

The following examples show how to use org.apache.hadoop.io.file.tfile.TFile.Reader. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestTFileByteArrays.java    From RDFS with Apache License 2.0 6 votes vote down vote up
private void readValueBeforeKey(int recordIndex)
    throws IOException {
  Reader reader =
      new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner =
      reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
          .getBytes(), null);

  try {
    byte[] vbuf = new byte[BUF_SIZE];
    int vlen = scanner.entry().getValueLength();
    scanner.entry().getValue(vbuf);
    Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + recordIndex);

    byte[] kbuf = new byte[BUF_SIZE];
    int klen = scanner.entry().getKeyLength();
    scanner.entry().getKey(kbuf);
    Assert.assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY,
        recordIndex));
  } finally {
    scanner.close();
    reader.close();
  }
}
 
Example #2
Source File: TestTFileByteArrays.java    From RDFS with Apache License 2.0 6 votes vote down vote up
private void readValueWithoutKey(int recordIndex)
    throws IOException {
  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);

  Scanner scanner =
      reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
          .getBytes(), null);

  byte[] vbuf1 = new byte[BUF_SIZE];
  int vlen1 = scanner.entry().getValueLength();
  scanner.entry().getValue(vbuf1);
  Assert.assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex);

  if (scanner.advance() && !scanner.atEnd()) {
    byte[] vbuf2 = new byte[BUF_SIZE];
    int vlen2 = scanner.entry().getValueLength();
    scanner.entry().getValue(vbuf2);
    Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE
        + (recordIndex + 1));
  }

  scanner.close();
  reader.close();
}
 
Example #3
Source File: TestTFileByteArrays.java    From hadoop-gpu with Apache License 2.0 6 votes vote down vote up
public void testFailureReadValueManyTimes() throws IOException {
  if (skip)
    return;
  writeRecords(5);

  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner = reader.createScanner();

  byte[] vbuf = new byte[BUF_SIZE];
  int vlen = scanner.entry().getValueLength();
  scanner.entry().getValue(vbuf);
  Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + 0);
  try {
    scanner.entry().getValue(vbuf);
    Assert.fail("Cannot get the value mlutiple times.");
  }
  catch (Exception e) {
    // noop, expecting exceptions
  }

  scanner.close();
  reader.close();
}
 
Example #4
Source File: TestTFileUnsortedByteArrays.java    From big-c with Apache License 2.0 6 votes vote down vote up
public void testFailureScannerWithKeys() throws IOException {
  Reader reader =
      new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Assert.assertFalse(reader.isSorted());
  Assert.assertEquals((int) reader.getEntryCount(), 4);

  try {
    Scanner scanner =
        reader.createScannerByKey("aaa".getBytes(), "zzz".getBytes());
    Assert
        .fail("Failed to catch creating scanner with keys on unsorted file.");
  }
  catch (RuntimeException e) {
  }
  finally {
    reader.close();
  }
}
 
Example #5
Source File: TestTFileByteArrays.java    From hadoop-gpu with Apache License 2.0 6 votes vote down vote up
public void testFailureOpenRandomFile() throws IOException {
  if (skip)
    return;
  closeOutput();
  // create an random file
  path = new Path(fs.getWorkingDirectory(), outputFile);
  out = fs.create(path);
  Random rand = new Random();
  byte[] buf = new byte[K];
  // fill with > 1MB data
  for (int nx = 0; nx < K + 2; nx++) {
    rand.nextBytes(buf);
    out.write(buf);
  }
  out.close();
  try {
    Reader reader =
        new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
    Assert.fail("Error on handling random files.");
  }
  catch (IOException e) {
    // noop, expecting exceptions
  }
}
 
Example #6
Source File: TestTFileByteArrays.java    From hadoop-gpu with Apache License 2.0 6 votes vote down vote up
public void testFailureNegativeLength_2() throws IOException {
  if (skip)
    return;
  closeOutput();

  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner = reader.createScanner();
  try {
    scanner.lowerBound("keyX".getBytes(), 0, -1);
    Assert.fail("Error on handling negative length.");
  }
  catch (Exception e) {
    // noop, expecting exceptions
  }
  finally {
    scanner.close();
    reader.close();
  }
  closeOutput();
}
 
Example #7
Source File: TestTFile.java    From RDFS with Apache License 2.0 6 votes vote down vote up
void unsortedWithSomeCodec(String codec) throws IOException {
  Path uTfile = new Path(ROOT, "unsorted.tfile");
  FSDataOutputStream fout = createFSOutput(uTfile);
  Writer writer = new Writer(fout, minBlockSize, codec, null, conf);
  writeRecords(writer);
  writer.close();
  fout.close();
  FSDataInputStream fin = fs.open(uTfile);
  Reader reader =
      new Reader(fs.open(uTfile), fs.getFileStatus(uTfile).getLen(), conf);

  Scanner scanner = reader.createScanner();
  readAllRecords(scanner);
  scanner.close();
  reader.close();
  fin.close();
  fs.delete(uTfile, true);
}
 
Example #8
Source File: TestTFileStreams.java    From hadoop with Apache License 2.0 6 votes vote down vote up
public void testFailureNegativeOffset() throws IOException {
  if (skip)
    return;
  writeRecords(2, true, true);

  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner = reader.createScanner();
  byte[] buf = new byte[K];
  try {
    scanner.entry().getKey(buf, -1);
    Assert.fail("Failed to handle key negative offset.");
  }
  catch (Exception e) {
    // noop, expecting exceptions
  }
  finally {
  }
  scanner.close();
  reader.close();
}
 
Example #9
Source File: TestTFile.java    From hadoop-gpu with Apache License 2.0 6 votes vote down vote up
void unsortedWithSomeCodec(String codec) throws IOException {
  Path uTfile = new Path(ROOT, "unsorted.tfile");
  FSDataOutputStream fout = createFSOutput(uTfile);
  Writer writer = new Writer(fout, minBlockSize, codec, null, conf);
  writeRecords(writer);
  writer.close();
  fout.close();
  FSDataInputStream fin = fs.open(uTfile);
  Reader reader =
      new Reader(fs.open(uTfile), fs.getFileStatus(uTfile).getLen(), conf);

  Scanner scanner = reader.createScanner();
  readAllRecords(scanner);
  scanner.close();
  reader.close();
  fin.close();
  fs.delete(uTfile, true);
}
 
Example #10
Source File: TestTFileByteArrays.java    From hadoop-gpu with Apache License 2.0 6 votes vote down vote up
public void testFailureNegativeOffset_2() throws IOException {
  if (skip)
    return;
  closeOutput();

  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner = reader.createScanner();
  try {
    scanner.lowerBound("keyX".getBytes(), -1, 4);
    Assert.fail("Error on handling negative offset.");
  }
  catch (Exception e) {
    // noop, expecting exceptions
  }
  finally {
    reader.close();
    scanner.close();
  }
  closeOutput();
}
 
Example #11
Source File: TestTFile.java    From hadoop with Apache License 2.0 6 votes vote down vote up
void unsortedWithSomeCodec(String codec) throws IOException {
  Path uTfile = new Path(ROOT, "unsorted.tfile");
  FSDataOutputStream fout = createFSOutput(uTfile);
  Writer writer = new Writer(fout, minBlockSize, codec, null, conf);
  writeRecords(writer);
  writer.close();
  fout.close();
  FSDataInputStream fin = fs.open(uTfile);
  Reader reader =
      new Reader(fs.open(uTfile), fs.getFileStatus(uTfile).getLen(), conf);

  Scanner scanner = reader.createScanner();
  readAllRecords(scanner);
  scanner.close();
  reader.close();
  fin.close();
  fs.delete(uTfile, true);
}
 
Example #12
Source File: TestTFileByteArrays.java    From hadoop with Apache License 2.0 6 votes vote down vote up
private void readValueBeforeKey(int recordIndex)
    throws IOException {
  Reader reader =
      new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner =
      reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
          .getBytes(), null);

  try {
    byte[] vbuf = new byte[BUF_SIZE];
    int vlen = scanner.entry().getValueLength();
    scanner.entry().getValue(vbuf);
    Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + recordIndex);

    byte[] kbuf = new byte[BUF_SIZE];
    int klen = scanner.entry().getKeyLength();
    scanner.entry().getKey(kbuf);
    Assert.assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY,
        recordIndex));
  } finally {
    scanner.close();
    reader.close();
  }
}
 
Example #13
Source File: TestTFileByteArrays.java    From hadoop with Apache License 2.0 6 votes vote down vote up
private void readValueWithoutKey(int recordIndex)
    throws IOException {
  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);

  Scanner scanner =
      reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
          .getBytes(), null);

  byte[] vbuf1 = new byte[BUF_SIZE];
  int vlen1 = scanner.entry().getValueLength();
  scanner.entry().getValue(vbuf1);
  Assert.assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex);

  if (scanner.advance() && !scanner.atEnd()) {
    byte[] vbuf2 = new byte[BUF_SIZE];
    int vlen2 = scanner.entry().getValueLength();
    scanner.entry().getValue(vbuf2);
    Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE
        + (recordIndex + 1));
  }

  scanner.close();
  reader.close();
}
 
Example #14
Source File: TestTFileByteArrays.java    From big-c with Apache License 2.0 6 votes vote down vote up
private void readValueWithoutKey(int recordIndex)
    throws IOException {
  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);

  Scanner scanner =
      reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
          .getBytes(), null);

  byte[] vbuf1 = new byte[BUF_SIZE];
  int vlen1 = scanner.entry().getValueLength();
  scanner.entry().getValue(vbuf1);
  Assert.assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex);

  if (scanner.advance() && !scanner.atEnd()) {
    byte[] vbuf2 = new byte[BUF_SIZE];
    int vlen2 = scanner.entry().getValueLength();
    scanner.entry().getValue(vbuf2);
    Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE
        + (recordIndex + 1));
  }

  scanner.close();
  reader.close();
}
 
Example #15
Source File: TestTFileUnsortedByteArrays.java    From hadoop with Apache License 2.0 6 votes vote down vote up
public void testFailureScannerWithKeys() throws IOException {
  Reader reader =
      new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Assert.assertFalse(reader.isSorted());
  Assert.assertEquals((int) reader.getEntryCount(), 4);

  try {
    Scanner scanner =
        reader.createScannerByKey("aaa".getBytes(), "zzz".getBytes());
    Assert
        .fail("Failed to catch creating scanner with keys on unsorted file.");
  }
  catch (RuntimeException e) {
  }
  finally {
    reader.close();
  }
}
 
Example #16
Source File: TestTFileStreams.java    From hadoop-gpu with Apache License 2.0 6 votes vote down vote up
public void testFailureNegativeOffset() throws IOException {
  if (skip)
    return;
  writeRecords(2, true, true);

  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner = reader.createScanner();
  byte[] buf = new byte[K];
  try {
    scanner.entry().getKey(buf, -1);
    Assert.fail("Failed to handle key negative offset.");
  }
  catch (Exception e) {
    // noop, expecting exceptions
  }
  finally {
  }
  scanner.close();
  reader.close();
}
 
Example #17
Source File: TestTFileByteArrays.java    From hadoop-gpu with Apache License 2.0 6 votes vote down vote up
private void readValueWithoutKey(int count, int recordIndex)
    throws IOException {
  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);

  Scanner scanner =
      reader.createScanner(composeSortedKey(KEY, count, recordIndex)
          .getBytes(), null);

  byte[] vbuf1 = new byte[BUF_SIZE];
  int vlen1 = scanner.entry().getValueLength();
  scanner.entry().getValue(vbuf1);
  Assert.assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex);

  if (scanner.advance() && !scanner.atEnd()) {
    byte[] vbuf2 = new byte[BUF_SIZE];
    int vlen2 = scanner.entry().getValueLength();
    scanner.entry().getValue(vbuf2);
    Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE
        + (recordIndex + 1));
  }

  scanner.close();
  reader.close();
}
 
Example #18
Source File: TestTFileStreams.java    From RDFS with Apache License 2.0 6 votes vote down vote up
public void testFailureNegativeOffset() throws IOException {
  if (skip)
    return;
  writeRecords(2, true, true);

  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner = reader.createScanner();
  byte[] buf = new byte[K];
  try {
    scanner.entry().getKey(buf, -1);
    Assert.fail("Failed to handle key negative offset.");
  }
  catch (Exception e) {
    // noop, expecting exceptions
  }
  finally {
  }
  scanner.close();
  reader.close();
}
 
Example #19
Source File: TestTFile.java    From big-c with Apache License 2.0 6 votes vote down vote up
void unsortedWithSomeCodec(String codec) throws IOException {
  Path uTfile = new Path(ROOT, "unsorted.tfile");
  FSDataOutputStream fout = createFSOutput(uTfile);
  Writer writer = new Writer(fout, minBlockSize, codec, null, conf);
  writeRecords(writer);
  writer.close();
  fout.close();
  FSDataInputStream fin = fs.open(uTfile);
  Reader reader =
      new Reader(fs.open(uTfile), fs.getFileStatus(uTfile).getLen(), conf);

  Scanner scanner = reader.createScanner();
  readAllRecords(scanner);
  scanner.close();
  reader.close();
  fin.close();
  fs.delete(uTfile, true);
}
 
Example #20
Source File: TestTFileByteArrays.java    From RDFS with Apache License 2.0 6 votes vote down vote up
@Test
public void testFailureNegativeOffset_2() throws IOException {
  if (skip)
    return;
  closeOutput();

  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner = reader.createScanner();
  try {
    scanner.lowerBound("keyX".getBytes(), -1, 4);
    Assert.fail("Error on handling negative offset.");
  } catch (Exception e) {
    // noop, expecting exceptions
  } finally {
    reader.close();
    scanner.close();
  }
  closeOutput();
}
 
Example #21
Source File: TestTFileByteArrays.java    From RDFS with Apache License 2.0 6 votes vote down vote up
@Test
public void testFailureOpenRandomFile() throws IOException {
  if (skip)
    return;
  closeOutput();
  // create an random file
  path = new Path(fs.getWorkingDirectory(), outputFile);
  out = fs.create(path);
  Random rand = new Random();
  byte[] buf = new byte[K];
  // fill with > 1MB data
  for (int nx = 0; nx < K + 2; nx++) {
    rand.nextBytes(buf);
    out.write(buf);
  }
  out.close();
  try {
    new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
    Assert.fail("Error on handling random files.");
  } catch (IOException e) {
    // noop, expecting exceptions
  }
}
 
Example #22
Source File: TestTFileByteArrays.java    From RDFS with Apache License 2.0 6 votes vote down vote up
@Test
public void testFailureOpenEmptyFile() throws IOException {
  if (skip)
    return;
  closeOutput();
  // create an absolutely empty file
  path = new Path(fs.getWorkingDirectory(), outputFile);
  out = fs.create(path);
  out.close();
  try {
    new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
    Assert.fail("Error on handling empty files.");
  } catch (EOFException e) {
    // noop, expecting exceptions
  }
}
 
Example #23
Source File: TestTFileStreams.java    From big-c with Apache License 2.0 6 votes vote down vote up
public void testFailureNegativeOffset() throws IOException {
  if (skip)
    return;
  writeRecords(2, true, true);

  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner = reader.createScanner();
  byte[] buf = new byte[K];
  try {
    scanner.entry().getKey(buf, -1);
    Assert.fail("Failed to handle key negative offset.");
  }
  catch (Exception e) {
    // noop, expecting exceptions
  }
  finally {
  }
  scanner.close();
  reader.close();
}
 
Example #24
Source File: TestTFileByteArrays.java    From RDFS with Apache License 2.0 6 votes vote down vote up
@Test
public void testFailureReadValueManyTimes() throws IOException {
  if (skip)
    return;
  writeRecords(5);

  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner = reader.createScanner();

  byte[] vbuf = new byte[BUF_SIZE];
  int vlen = scanner.entry().getValueLength();
  scanner.entry().getValue(vbuf);
  Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + 0);
  try {
    scanner.entry().getValue(vbuf);
    Assert.fail("Cannot get the value mlutiple times.");
  } catch (Exception e) {
    // noop, expecting exceptions
  }

  scanner.close();
  reader.close();
}
 
Example #25
Source File: TestTFileUnsortedByteArrays.java    From hadoop-gpu with Apache License 2.0 6 votes vote down vote up
public void testFailureScannerWithKeys() throws IOException {
  Reader reader =
      new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Assert.assertFalse(reader.isSorted());
  Assert.assertEquals((int) reader.getEntryCount(), 4);

  try {
    Scanner scanner =
        reader.createScanner("aaa".getBytes(), "zzz".getBytes());
    Assert
        .fail("Failed to catch creating scanner with keys on unsorted file.");
  }
  catch (RuntimeException e) {
  }
  finally {
    reader.close();
  }
}
 
Example #26
Source File: TestTFileByteArrays.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Test
public void testFailureGetNonExistentMetaBlock() throws IOException {
  if (skip)
    return;
  writer.append("keyX".getBytes(), "valueX".getBytes());

  // create a new metablock
  DataOutputStream outMeta =
      writer.prepareMetaBlock("testX", Compression.Algorithm.GZ.getName());
  outMeta.write(123);
  outMeta.write("foo".getBytes());
  outMeta.close();
  closeOutput();

  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  DataInputStream mb = reader.getMetaBlock("testX");
  Assert.assertNotNull(mb);
  mb.close();
  try {
    DataInputStream mbBad = reader.getMetaBlock("testY");
    Assert.fail("Error on handling non-existent metablocks.");
  } catch (Exception e) {
    // noop, expecting exceptions
  }
  reader.close();
}
 
Example #27
Source File: TestTFileByteArrays.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Test
public void testFailureReadValueManyTimes() throws IOException {
  if (skip)
    return;
  writeRecords(5);

  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner = reader.createScanner();

  byte[] vbuf = new byte[BUF_SIZE];
  int vlen = scanner.entry().getValueLength();
  scanner.entry().getValue(vbuf);
  Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + 0);
  try {
    scanner.entry().getValue(vbuf);
    Assert.fail("Cannot get the value mlutiple times.");
  } catch (Exception e) {
    // noop, expecting exceptions
  }

  scanner.close();
  reader.close();
}
 
Example #28
Source File: TestTFileByteArrays.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Test
public void testFailureOpenEmptyFile() throws IOException {
  if (skip)
    return;
  closeOutput();
  // create an absolutely empty file
  path = new Path(fs.getWorkingDirectory(), outputFile);
  out = fs.create(path);
  out.close();
  try {
    new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
    Assert.fail("Error on handling empty files.");
  } catch (EOFException e) {
    // noop, expecting exceptions
  }
}
 
Example #29
Source File: TestTFileByteArrays.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Test
public void testFailureOpenRandomFile() throws IOException {
  if (skip)
    return;
  closeOutput();
  // create an random file
  path = new Path(fs.getWorkingDirectory(), outputFile);
  out = fs.create(path);
  Random rand = new Random();
  byte[] buf = new byte[K];
  // fill with > 1MB data
  for (int nx = 0; nx < K + 2; nx++) {
    rand.nextBytes(buf);
    out.write(buf);
  }
  out.close();
  try {
    new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
    Assert.fail("Error on handling random files.");
  } catch (IOException e) {
    // noop, expecting exceptions
  }
}
 
Example #30
Source File: TestTFileByteArrays.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Test
public void testFailureNegativeOffset_2() throws IOException {
  if (skip)
    return;
  closeOutput();

  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner = reader.createScanner();
  try {
    scanner.lowerBound("keyX".getBytes(), -1, 4);
    Assert.fail("Error on handling negative offset.");
  } catch (Exception e) {
    // noop, expecting exceptions
  } finally {
    reader.close();
    scanner.close();
  }
  closeOutput();
}