Java Code Examples for org.apache.hadoop.io.file.tfile.TFile.Reader#close()
The following examples show how to use
org.apache.hadoop.io.file.tfile.TFile.Reader#close() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestTFileByteArrays.java From hadoop-gpu with Apache License 2.0 | 6 votes |
public void testFailureNegativeOffset_2() throws IOException { if (skip) return; closeOutput(); Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScanner(); try { scanner.lowerBound("keyX".getBytes(), -1, 4); Assert.fail("Error on handling negative offset."); } catch (Exception e) { // noop, expecting exceptions } finally { reader.close(); scanner.close(); } closeOutput(); }
Example 2
Source File: TestTFileStreams.java From hadoop-gpu with Apache License 2.0 | 6 votes |
public void testFailureNegativeOffset() throws IOException { if (skip) return; writeRecords(2, true, true); Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScanner(); byte[] buf = new byte[K]; try { scanner.entry().getKey(buf, -1); Assert.fail("Failed to handle key negative offset."); } catch (Exception e) { // noop, expecting exceptions } finally { } scanner.close(); reader.close(); }
Example 3
Source File: TestTFileByteArrays.java From big-c with Apache License 2.0 | 6 votes |
@Test public void testFailureGetNonExistentMetaBlock() throws IOException { if (skip) return; writer.append("keyX".getBytes(), "valueX".getBytes()); // create a new metablock DataOutputStream outMeta = writer.prepareMetaBlock("testX", Compression.Algorithm.GZ.getName()); outMeta.write(123); outMeta.write("foo".getBytes()); outMeta.close(); closeOutput(); Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); DataInputStream mb = reader.getMetaBlock("testX"); Assert.assertNotNull(mb); mb.close(); try { DataInputStream mbBad = reader.getMetaBlock("testY"); Assert.fail("Error on handling non-existent metablocks."); } catch (Exception e) { // noop, expecting exceptions } reader.close(); }
Example 4
Source File: TestTFileByteArrays.java From RDFS with Apache License 2.0 | 6 votes |
@Test public void testFailureGetNonExistentMetaBlock() throws IOException { if (skip) return; writer.append("keyX".getBytes(), "valueX".getBytes()); // create a new metablock DataOutputStream outMeta = writer.prepareMetaBlock("testX", Compression.Algorithm.GZ.getName()); outMeta.write(123); outMeta.write("foo".getBytes()); outMeta.close(); closeOutput(); Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); DataInputStream mb = reader.getMetaBlock("testX"); Assert.assertNotNull(mb); mb.close(); try { DataInputStream mbBad = reader.getMetaBlock("testY"); Assert.fail("Error on handling non-existent metablocks."); } catch (Exception e) { // noop, expecting exceptions } reader.close(); }
Example 5
Source File: TestTFileByteArrays.java From RDFS with Apache License 2.0 | 6 votes |
private void readValueBeforeKey(int recordIndex) throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex) .getBytes(), null); try { byte[] vbuf = new byte[BUF_SIZE]; int vlen = scanner.entry().getValueLength(); scanner.entry().getValue(vbuf); Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + recordIndex); byte[] kbuf = new byte[BUF_SIZE]; int klen = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf); Assert.assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY, recordIndex)); } finally { scanner.close(); reader.close(); } }
Example 6
Source File: TestTFileUnsortedByteArrays.java From big-c with Apache License 2.0 | 6 votes |
public void testFailureScannerWithKeys() throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Assert.assertFalse(reader.isSorted()); Assert.assertEquals((int) reader.getEntryCount(), 4); try { Scanner scanner = reader.createScannerByKey("aaa".getBytes(), "zzz".getBytes()); Assert .fail("Failed to catch creating scanner with keys on unsorted file."); } catch (RuntimeException e) { } finally { reader.close(); } }
Example 7
Source File: TestTFileUnsortedByteArrays.java From hadoop-gpu with Apache License 2.0 | 6 votes |
public void testFailureScannerWithKeys() throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Assert.assertFalse(reader.isSorted()); Assert.assertEquals((int) reader.getEntryCount(), 4); try { Scanner scanner = reader.createScanner("aaa".getBytes(), "zzz".getBytes()); Assert .fail("Failed to catch creating scanner with keys on unsorted file."); } catch (RuntimeException e) { } finally { reader.close(); } }
Example 8
Source File: TestTFileByteArrays.java From hadoop with Apache License 2.0 | 6 votes |
@Test public void testFailureNegativeOffset_2() throws IOException { if (skip) return; closeOutput(); Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScanner(); try { scanner.lowerBound("keyX".getBytes(), -1, 4); Assert.fail("Error on handling negative offset."); } catch (Exception e) { // noop, expecting exceptions } finally { reader.close(); scanner.close(); } closeOutput(); }
Example 9
Source File: TestTFileByteArrays.java From hadoop with Apache License 2.0 | 6 votes |
@Test public void testFailureNegativeLength_2() throws IOException { if (skip) return; closeOutput(); Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScanner(); try { scanner.lowerBound("keyX".getBytes(), 0, -1); Assert.fail("Error on handling negative length."); } catch (Exception e) { // noop, expecting exceptions } finally { scanner.close(); reader.close(); } closeOutput(); }
Example 10
Source File: TestTFileByteArrays.java From big-c with Apache License 2.0 | 6 votes |
private void readValueBeforeKey(int recordIndex) throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex) .getBytes(), null); try { byte[] vbuf = new byte[BUF_SIZE]; int vlen = scanner.entry().getValueLength(); scanner.entry().getValue(vbuf); Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + recordIndex); byte[] kbuf = new byte[BUF_SIZE]; int klen = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf); Assert.assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY, recordIndex)); } finally { scanner.close(); reader.close(); } }
Example 11
Source File: TestTFileByteArrays.java From hadoop-gpu with Apache License 2.0 | 6 votes |
private void readValueBeforeKey(int count, int recordIndex) throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScanner(composeSortedKey(KEY, count, recordIndex) .getBytes(), null); try { byte[] vbuf = new byte[BUF_SIZE]; int vlen = scanner.entry().getValueLength(); scanner.entry().getValue(vbuf); Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + recordIndex); byte[] kbuf = new byte[BUF_SIZE]; int klen = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf); Assert.assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY, count, recordIndex)); } finally { scanner.close(); reader.close(); } }
Example 12
Source File: TestTFileByteArrays.java From hadoop-gpu with Apache License 2.0 | 5 votes |
private void readKeyWithoutValue(int count, int recordIndex) throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScanner(composeSortedKey(KEY, count, recordIndex) .getBytes(), null); try { // read the indexed key byte[] kbuf1 = new byte[BUF_SIZE]; int klen1 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf1); Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, count, recordIndex)); if (scanner.advance() && !scanner.atEnd()) { // read the next key following the indexed byte[] kbuf2 = new byte[BUF_SIZE]; int klen2 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf2); Assert.assertEquals(new String(kbuf2, 0, klen2), composeSortedKey(KEY, count, recordIndex + 1)); } } finally { scanner.close(); reader.close(); } }
Example 13
Source File: TestTFileByteArrays.java From big-c with Apache License 2.0 | 5 votes |
private void readKeyManyTimes(int recordIndex) throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex) .getBytes(), null); // read the indexed key byte[] kbuf1 = new byte[BUF_SIZE]; int klen1 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf1); Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, recordIndex)); klen1 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf1); Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, recordIndex)); klen1 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf1); Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, recordIndex)); scanner.close(); reader.close(); }
Example 14
Source File: TestTFileByteArrays.java From hadoop with Apache License 2.0 | 5 votes |
private void readKeyManyTimes(int recordIndex) throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex) .getBytes(), null); // read the indexed key byte[] kbuf1 = new byte[BUF_SIZE]; int klen1 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf1); Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, recordIndex)); klen1 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf1); Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, recordIndex)); klen1 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf1); Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, recordIndex)); scanner.close(); reader.close(); }
Example 15
Source File: TestTFileUnsortedByteArrays.java From hadoop-gpu with Apache License 2.0 | 5 votes |
public void testScanRange() throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Assert.assertFalse(reader.isSorted()); Assert.assertEquals((int) reader.getEntryCount(), 4); Scanner scanner = reader.createScanner(); try { // read key and value byte[] kbuf = new byte[BUF_SIZE]; int klen = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf); Assert.assertEquals(new String(kbuf, 0, klen), "keyZ"); byte[] vbuf = new byte[BUF_SIZE]; int vlen = scanner.entry().getValueLength(); scanner.entry().getValue(vbuf); Assert.assertEquals(new String(vbuf, 0, vlen), "valueZ"); scanner.advance(); // now try get value first vbuf = new byte[BUF_SIZE]; vlen = scanner.entry().getValueLength(); scanner.entry().getValue(vbuf); Assert.assertEquals(new String(vbuf, 0, vlen), "valueM"); kbuf = new byte[BUF_SIZE]; klen = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf); Assert.assertEquals(new String(kbuf, 0, klen), "keyM"); } finally { scanner.close(); reader.close(); } }
Example 16
Source File: TestTFileByteArrays.java From hadoop-gpu with Apache License 2.0 | 5 votes |
private void readKeyManyTimes(int count, int recordIndex) throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScanner(composeSortedKey(KEY, count, recordIndex) .getBytes(), null); // read the indexed key byte[] kbuf1 = new byte[BUF_SIZE]; int klen1 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf1); Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, count, recordIndex)); klen1 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf1); Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, count, recordIndex)); klen1 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf1); Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, count, recordIndex)); scanner.close(); reader.close(); }
Example 17
Source File: TestTFileByteArrays.java From big-c with Apache License 2.0 | 5 votes |
private void checkBlockIndex(int recordIndex, int blockIndexExpected) throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScanner(); scanner.seekTo(composeSortedKey(KEY, recordIndex).getBytes()); Assert.assertEquals(blockIndexExpected, scanner.currentLocation .getBlockIndex()); scanner.close(); reader.close(); }
Example 18
Source File: TestTFileByteArrays.java From hadoop with Apache License 2.0 | 5 votes |
@Test public void testLocate() throws IOException { if (skip) return; writeRecords(3 * records1stBlock); Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScanner(); locate(scanner, composeSortedKey(KEY, 2).getBytes()); locate(scanner, composeSortedKey(KEY, records1stBlock - 1).getBytes()); locate(scanner, composeSortedKey(KEY, records1stBlock).getBytes()); Location locX = locate(scanner, "keyX".getBytes()); Assert.assertEquals(scanner.endLocation, locX); scanner.close(); reader.close(); }
Example 19
Source File: TestTFile.java From big-c with Apache License 2.0 | 5 votes |
public void testMetaBlocks() throws IOException { Path mFile = new Path(ROOT, "meta.tfile"); FSDataOutputStream fout = createFSOutput(mFile); Writer writer = new Writer(fout, minBlockSize, "none", null, conf); someTestingWithMetaBlock(writer, "none"); writer.close(); fout.close(); FSDataInputStream fin = fs.open(mFile); Reader reader = new Reader(fin, fs.getFileStatus(mFile).getLen(), conf); someReadingWithMetaBlock(reader); fs.delete(mFile, true); reader.close(); fin.close(); }
Example 20
Source File: TestTFile.java From big-c with Apache License 2.0 | 4 votes |
/** * test none codecs */ void basicWithSomeCodec(String codec) throws IOException { Path ncTFile = new Path(ROOT, "basic.tfile"); FSDataOutputStream fout = createFSOutput(ncTFile); Writer writer = new Writer(fout, minBlockSize, codec, "memcmp", conf); writeRecords(writer); fout.close(); FSDataInputStream fin = fs.open(ncTFile); Reader reader = new Reader(fs.open(ncTFile), fs.getFileStatus(ncTFile).getLen(), conf); Scanner scanner = reader.createScanner(); readAllRecords(scanner); scanner.seekTo(getSomeKey(50)); assertTrue("location lookup failed", scanner.seekTo(getSomeKey(50))); // read the key and see if it matches byte[] readKey = readKey(scanner); assertTrue("seeked key does not match", Arrays.equals(getSomeKey(50), readKey)); scanner.seekTo(new byte[0]); byte[] val1 = readValue(scanner); scanner.seekTo(new byte[0]); byte[] val2 = readValue(scanner); assertTrue(Arrays.equals(val1, val2)); // check for lowerBound scanner.lowerBound(getSomeKey(50)); assertTrue("locaton lookup failed", scanner.currentLocation .compareTo(reader.end()) < 0); readKey = readKey(scanner); assertTrue("seeked key does not match", Arrays.equals(readKey, getSomeKey(50))); // check for upper bound scanner.upperBound(getSomeKey(50)); assertTrue("location lookup failed", scanner.currentLocation .compareTo(reader.end()) < 0); readKey = readKey(scanner); assertTrue("seeked key does not match", Arrays.equals(readKey, getSomeKey(51))); scanner.close(); // test for a range of scanner scanner = reader.createScannerByKey(getSomeKey(10), getSomeKey(60)); readAndCheckbytes(scanner, 10, 50); assertFalse(scanner.advance()); scanner.close(); reader.close(); fin.close(); fs.delete(ncTFile, true); }