Java Code Examples for org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner#lowerBound()

The following examples show how to use org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner#lowerBound() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestTFileByteArrays.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@Test
public void testFailureNegativeOffset_2() throws IOException {
  if (skip)
    return;
  closeOutput();

  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner = reader.createScanner();
  try {
    scanner.lowerBound("keyX".getBytes(), -1, 4);
    Assert.fail("Error on handling negative offset.");
  } catch (Exception e) {
    // noop, expecting exceptions
  } finally {
    reader.close();
    scanner.close();
  }
  closeOutput();
}
 
Example 2
Source File: TestTFileByteArrays.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@Test
public void testFailureNegativeLength_2() throws IOException {
  if (skip)
    return;
  closeOutput();

  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner = reader.createScanner();
  try {
    scanner.lowerBound("keyX".getBytes(), 0, -1);
    Assert.fail("Error on handling negative length.");
  } catch (Exception e) {
    // noop, expecting exceptions
  } finally {
    scanner.close();
    reader.close();
  }
  closeOutput();
}
 
Example 3
Source File: TestTFileByteArrays.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Test
public void testFailureNegativeOffset_2() throws IOException {
  if (skip)
    return;
  closeOutput();

  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner = reader.createScanner();
  try {
    scanner.lowerBound("keyX".getBytes(), -1, 4);
    Assert.fail("Error on handling negative offset.");
  } catch (Exception e) {
    // noop, expecting exceptions
  } finally {
    reader.close();
    scanner.close();
  }
  closeOutput();
}
 
Example 4
Source File: TestTFileByteArrays.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Test
public void testFailureNegativeLength_2() throws IOException {
  if (skip)
    return;
  closeOutput();

  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner = reader.createScanner();
  try {
    scanner.lowerBound("keyX".getBytes(), 0, -1);
    Assert.fail("Error on handling negative length.");
  } catch (Exception e) {
    // noop, expecting exceptions
  } finally {
    scanner.close();
    reader.close();
  }
  closeOutput();
}
 
Example 5
Source File: TestTFileByteArrays.java    From RDFS with Apache License 2.0 6 votes vote down vote up
@Test
public void testFailureNegativeOffset_2() throws IOException {
  if (skip)
    return;
  closeOutput();

  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner = reader.createScanner();
  try {
    scanner.lowerBound("keyX".getBytes(), -1, 4);
    Assert.fail("Error on handling negative offset.");
  } catch (Exception e) {
    // noop, expecting exceptions
  } finally {
    reader.close();
    scanner.close();
  }
  closeOutput();
}
 
Example 6
Source File: TestTFileByteArrays.java    From RDFS with Apache License 2.0 6 votes vote down vote up
@Test
public void testFailureNegativeLength_2() throws IOException {
  if (skip)
    return;
  closeOutput();

  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner = reader.createScanner();
  try {
    scanner.lowerBound("keyX".getBytes(), 0, -1);
    Assert.fail("Error on handling negative length.");
  } catch (Exception e) {
    // noop, expecting exceptions
  } finally {
    scanner.close();
    reader.close();
  }
  closeOutput();
}
 
Example 7
Source File: TestTFileByteArrays.java    From hadoop-gpu with Apache License 2.0 6 votes vote down vote up
public void testFailureNegativeOffset_2() throws IOException {
  if (skip)
    return;
  closeOutput();

  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner = reader.createScanner();
  try {
    scanner.lowerBound("keyX".getBytes(), -1, 4);
    Assert.fail("Error on handling negative offset.");
  }
  catch (Exception e) {
    // noop, expecting exceptions
  }
  finally {
    reader.close();
    scanner.close();
  }
  closeOutput();
}
 
Example 8
Source File: TestTFileByteArrays.java    From hadoop-gpu with Apache License 2.0 6 votes vote down vote up
public void testFailureNegativeLength_2() throws IOException {
  if (skip)
    return;
  closeOutput();

  Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
  Scanner scanner = reader.createScanner();
  try {
    scanner.lowerBound("keyX".getBytes(), 0, -1);
    Assert.fail("Error on handling negative length.");
  }
  catch (Exception e) {
    // noop, expecting exceptions
  }
  finally {
    scanner.close();
    reader.close();
  }
  closeOutput();
}
 
Example 9
Source File: TestTFileSeek.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public void seekTFile() throws IOException {
  int miss = 0;
  long totalBytes = 0;
  FSDataInputStream fsdis = fs.open(path);
  Reader reader =
    new Reader(fsdis, fs.getFileStatus(path).getLen(), conf);
  KeySampler kSampler =
      new KeySampler(rng, reader.getFirstKey(), reader.getLastKey(),
          keyLenGen);
  Scanner scanner = reader.createScanner();
  BytesWritable key = new BytesWritable();
  BytesWritable val = new BytesWritable();
  timer.reset();
  timer.start();
  for (int i = 0; i < options.seekCount; ++i) {
    kSampler.next(key);
    scanner.lowerBound(key.get(), 0, key.getSize());
    if (!scanner.atEnd()) {
      scanner.entry().get(key, val);
      totalBytes += key.getSize();
      totalBytes += val.getSize();
    }
    else {
      ++miss;
    }
  }
  timer.stop();
  double duration = (double) timer.read() / 1000; // in us.
  System.out.printf(
      "time: %s...avg seek: %s...%d hit...%d miss...avg I/O size: %.2fKB\n",
      timer.toString(), NanoTimer.nanoTimeToString(timer.read()
          / options.seekCount), options.seekCount - miss, miss,
      (double) totalBytes / 1024 / (options.seekCount - miss));

}
 
Example 10
Source File: TestTFileSeek.java    From big-c with Apache License 2.0 5 votes vote down vote up
public void seekTFile() throws IOException {
  int miss = 0;
  long totalBytes = 0;
  FSDataInputStream fsdis = fs.open(path);
  Reader reader =
    new Reader(fsdis, fs.getFileStatus(path).getLen(), conf);
  KeySampler kSampler =
      new KeySampler(rng, reader.getFirstKey(), reader.getLastKey(),
          keyLenGen);
  Scanner scanner = reader.createScanner();
  BytesWritable key = new BytesWritable();
  BytesWritable val = new BytesWritable();
  timer.reset();
  timer.start();
  for (int i = 0; i < options.seekCount; ++i) {
    kSampler.next(key);
    scanner.lowerBound(key.get(), 0, key.getSize());
    if (!scanner.atEnd()) {
      scanner.entry().get(key, val);
      totalBytes += key.getSize();
      totalBytes += val.getSize();
    }
    else {
      ++miss;
    }
  }
  timer.stop();
  double duration = (double) timer.read() / 1000; // in us.
  System.out.printf(
      "time: %s...avg seek: %s...%d hit...%d miss...avg I/O size: %.2fKB\n",
      timer.toString(), NanoTimer.nanoTimeToString(timer.read()
          / options.seekCount), options.seekCount - miss, miss,
      (double) totalBytes / 1024 / (options.seekCount - miss));

}
 
Example 11
Source File: TestTFileSeek.java    From RDFS with Apache License 2.0 5 votes vote down vote up
public void seekTFile() throws IOException {
  int miss = 0;
  long totalBytes = 0;
  FSDataInputStream fsdis = fs.open(path);
  Reader reader =
    new Reader(fsdis, fs.getFileStatus(path).getLen(), conf);
  KeySampler kSampler =
      new KeySampler(rng, reader.getFirstKey(), reader.getLastKey(),
          keyLenGen);
  Scanner scanner = reader.createScanner();
  BytesWritable key = new BytesWritable();
  BytesWritable val = new BytesWritable();
  timer.reset();
  timer.start();
  for (int i = 0; i < options.seekCount; ++i) {
    kSampler.next(key);
    scanner.lowerBound(key.get(), 0, key.getSize());
    if (!scanner.atEnd()) {
      scanner.entry().get(key, val);
      totalBytes += key.getSize();
      totalBytes += val.getSize();
    }
    else {
      ++miss;
    }
  }
  timer.stop();
  double duration = (double) timer.read() / 1000; // in us.
  System.out.printf(
      "time: %s...avg seek: %s...%d hit...%d miss...avg I/O size: %.2fKB\n",
      timer.toString(), NanoTimer.nanoTimeToString(timer.read()
          / options.seekCount), options.seekCount - miss, miss,
      (double) totalBytes / 1024 / (options.seekCount - miss));

}
 
Example 12
Source File: TestTFileSeek.java    From hadoop-gpu with Apache License 2.0 5 votes vote down vote up
public void seekTFile() throws IOException {
  int miss = 0;
  long totalBytes = 0;
  FSDataInputStream fsdis = fs.open(path);
  Reader reader =
    new Reader(fsdis, fs.getFileStatus(path).getLen(), conf);
  KeySampler kSampler =
      new KeySampler(rng, reader.getFirstKey(), reader.getLastKey(),
          keyLenGen);
  Scanner scanner = reader.createScanner();
  BytesWritable key = new BytesWritable();
  BytesWritable val = new BytesWritable();
  timer.reset();
  timer.start();
  for (int i = 0; i < options.seekCount; ++i) {
    kSampler.next(key);
    scanner.lowerBound(key.get(), 0, key.getSize());
    if (!scanner.atEnd()) {
      scanner.entry().get(key, val);
      totalBytes += key.getSize();
      totalBytes += val.getSize();
    }
    else {
      ++miss;
    }
  }
  timer.stop();
  double duration = (double) timer.read() / 1000; // in us.
  System.out.printf(
      "time: %s...avg seek: %s...%d hit...%d miss...avg I/O size: %.2fKB\n",
      timer.toString(), NanoTimer.nanoTimeToString(timer.read()
          / options.seekCount), options.seekCount - miss, miss,
      (double) totalBytes / 1024 / (options.seekCount - miss));

}
 
Example 13
Source File: TestTFile.java    From hadoop with Apache License 2.0 4 votes vote down vote up
/**
 * test none codecs
 */
void basicWithSomeCodec(String codec) throws IOException {
  Path ncTFile = new Path(ROOT, "basic.tfile");
  FSDataOutputStream fout = createFSOutput(ncTFile);
  Writer writer = new Writer(fout, minBlockSize, codec, "memcmp", conf);
  writeRecords(writer);
  fout.close();
  FSDataInputStream fin = fs.open(ncTFile);
  Reader reader =
      new Reader(fs.open(ncTFile), fs.getFileStatus(ncTFile).getLen(), conf);

  Scanner scanner = reader.createScanner();
  readAllRecords(scanner);
  scanner.seekTo(getSomeKey(50));
  assertTrue("location lookup failed", scanner.seekTo(getSomeKey(50)));
  // read the key and see if it matches
  byte[] readKey = readKey(scanner);
  assertTrue("seeked key does not match", Arrays.equals(getSomeKey(50),
      readKey));

  scanner.seekTo(new byte[0]);
  byte[] val1 = readValue(scanner);
  scanner.seekTo(new byte[0]);
  byte[] val2 = readValue(scanner);
  assertTrue(Arrays.equals(val1, val2));
  
  // check for lowerBound
  scanner.lowerBound(getSomeKey(50));
  assertTrue("locaton lookup failed", scanner.currentLocation
      .compareTo(reader.end()) < 0);
  readKey = readKey(scanner);
  assertTrue("seeked key does not match", Arrays.equals(readKey,
      getSomeKey(50)));

  // check for upper bound
  scanner.upperBound(getSomeKey(50));
  assertTrue("location lookup failed", scanner.currentLocation
      .compareTo(reader.end()) < 0);
  readKey = readKey(scanner);
  assertTrue("seeked key does not match", Arrays.equals(readKey,
      getSomeKey(51)));

  scanner.close();
  // test for a range of scanner
  scanner = reader.createScannerByKey(getSomeKey(10), getSomeKey(60));
  readAndCheckbytes(scanner, 10, 50);
  assertFalse(scanner.advance());
  scanner.close();
  reader.close();
  fin.close();
  fs.delete(ncTFile, true);
}
 
Example 14
Source File: TestTFile.java    From big-c with Apache License 2.0 4 votes vote down vote up
/**
 * test none codecs
 */
void basicWithSomeCodec(String codec) throws IOException {
  Path ncTFile = new Path(ROOT, "basic.tfile");
  FSDataOutputStream fout = createFSOutput(ncTFile);
  Writer writer = new Writer(fout, minBlockSize, codec, "memcmp", conf);
  writeRecords(writer);
  fout.close();
  FSDataInputStream fin = fs.open(ncTFile);
  Reader reader =
      new Reader(fs.open(ncTFile), fs.getFileStatus(ncTFile).getLen(), conf);

  Scanner scanner = reader.createScanner();
  readAllRecords(scanner);
  scanner.seekTo(getSomeKey(50));
  assertTrue("location lookup failed", scanner.seekTo(getSomeKey(50)));
  // read the key and see if it matches
  byte[] readKey = readKey(scanner);
  assertTrue("seeked key does not match", Arrays.equals(getSomeKey(50),
      readKey));

  scanner.seekTo(new byte[0]);
  byte[] val1 = readValue(scanner);
  scanner.seekTo(new byte[0]);
  byte[] val2 = readValue(scanner);
  assertTrue(Arrays.equals(val1, val2));
  
  // check for lowerBound
  scanner.lowerBound(getSomeKey(50));
  assertTrue("locaton lookup failed", scanner.currentLocation
      .compareTo(reader.end()) < 0);
  readKey = readKey(scanner);
  assertTrue("seeked key does not match", Arrays.equals(readKey,
      getSomeKey(50)));

  // check for upper bound
  scanner.upperBound(getSomeKey(50));
  assertTrue("location lookup failed", scanner.currentLocation
      .compareTo(reader.end()) < 0);
  readKey = readKey(scanner);
  assertTrue("seeked key does not match", Arrays.equals(readKey,
      getSomeKey(51)));

  scanner.close();
  // test for a range of scanner
  scanner = reader.createScannerByKey(getSomeKey(10), getSomeKey(60));
  readAndCheckbytes(scanner, 10, 50);
  assertFalse(scanner.advance());
  scanner.close();
  reader.close();
  fin.close();
  fs.delete(ncTFile, true);
}
 
Example 15
Source File: TestTFile.java    From RDFS with Apache License 2.0 4 votes vote down vote up
/**
 * test none codecs
 */
void basicWithSomeCodec(String codec) throws IOException {
  Path ncTFile = new Path(ROOT, "basic.tfile");
  FSDataOutputStream fout = createFSOutput(ncTFile);
  Writer writer = new Writer(fout, minBlockSize, codec, "memcmp", conf);
  writeRecords(writer);
  fout.close();
  FSDataInputStream fin = fs.open(ncTFile);
  Reader reader =
      new Reader(fs.open(ncTFile), fs.getFileStatus(ncTFile).getLen(), conf);

  Scanner scanner = reader.createScanner();
  readAllRecords(scanner);
  scanner.seekTo(getSomeKey(50));
  assertTrue("location lookup failed", scanner.seekTo(getSomeKey(50)));
  // read the key and see if it matches
  byte[] readKey = readKey(scanner);
  assertTrue("seeked key does not match", Arrays.equals(getSomeKey(50),
      readKey));

  scanner.seekTo(new byte[0]);
  byte[] val1 = readValue(scanner);
  scanner.seekTo(new byte[0]);
  byte[] val2 = readValue(scanner);
  assertTrue(Arrays.equals(val1, val2));
  
  // check for lowerBound
  scanner.lowerBound(getSomeKey(50));
  assertTrue("locaton lookup failed", scanner.currentLocation
      .compareTo(reader.end()) < 0);
  readKey = readKey(scanner);
  assertTrue("seeked key does not match", Arrays.equals(readKey,
      getSomeKey(50)));

  // check for upper bound
  scanner.upperBound(getSomeKey(50));
  assertTrue("location lookup failed", scanner.currentLocation
      .compareTo(reader.end()) < 0);
  readKey = readKey(scanner);
  assertTrue("seeked key does not match", Arrays.equals(readKey,
      getSomeKey(51)));

  scanner.close();
  // test for a range of scanner
  scanner = reader.createScannerByKey(getSomeKey(10), getSomeKey(60));
  readAndCheckbytes(scanner, 10, 50);
  assertFalse(scanner.advance());
  scanner.close();
  reader.close();
  fin.close();
  fs.delete(ncTFile, true);
}
 
Example 16
Source File: TestTFile.java    From hadoop-gpu with Apache License 2.0 4 votes vote down vote up
/**
 * test none codecs
 */
void basicWithSomeCodec(String codec) throws IOException {
  Path ncTFile = new Path(ROOT, "basic.tfile");
  FSDataOutputStream fout = createFSOutput(ncTFile);
  Writer writer = new Writer(fout, minBlockSize, codec, "memcmp", conf);
  writeRecords(writer);
  fout.close();
  FSDataInputStream fin = fs.open(ncTFile);
  Reader reader =
      new Reader(fs.open(ncTFile), fs.getFileStatus(ncTFile).getLen(), conf);

  Scanner scanner = reader.createScanner();
  readAllRecords(scanner);
  scanner.seekTo(getSomeKey(50));
  assertTrue("location lookup failed", scanner.seekTo(getSomeKey(50)));
  // read the key and see if it matches
  byte[] readKey = readKey(scanner);
  assertTrue("seeked key does not match", Arrays.equals(getSomeKey(50),
      readKey));

  scanner.seekTo(new byte[0]);
  byte[] val1 = readValue(scanner);
  scanner.seekTo(new byte[0]);
  byte[] val2 = readValue(scanner);
  assertTrue(Arrays.equals(val1, val2));
  
  // check for lowerBound
  scanner.lowerBound(getSomeKey(50));
  assertTrue("locaton lookup failed", scanner.currentLocation
      .compareTo(reader.end()) < 0);
  readKey = readKey(scanner);
  assertTrue("seeked key does not match", Arrays.equals(readKey,
      getSomeKey(50)));

  // check for upper bound
  scanner.upperBound(getSomeKey(50));
  assertTrue("location lookup failed", scanner.currentLocation
      .compareTo(reader.end()) < 0);
  readKey = readKey(scanner);
  assertTrue("seeked key does not match", Arrays.equals(readKey,
      getSomeKey(51)));

  scanner.close();
  // test for a range of scanner
  scanner = reader.createScanner(getSomeKey(10), getSomeKey(60));
  readAndCheckbytes(scanner, 10, 50);
  assertFalse(scanner.advance());
  scanner.close();
  reader.close();
  fin.close();
  fs.delete(ncTFile, true);
}