org.apache.cassandra.io.util.SequentialWriter Java Examples

The following examples show how to use org.apache.cassandra.io.util.SequentialWriter. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BufferedRandomAccessFileTest.java    From hadoop-sstable with Apache License 2.0 6 votes vote down vote up
@Test
public void testBytesRemaining() throws IOException {
    SequentialWriter w = createTempFile("brafBytesRemaining");

    int toWrite = RandomAccessReader.DEFAULT_BUFFER_SIZE + 10;

    w.write(generateByteArray(toWrite));

    w.sync();

    RandomAccessReader r = RandomAccessReader.open(w, fs);

    assertEquals(r.bytesRemaining(), toWrite);

    for (int i = 1; i <= r.length(); i++) {
        r.read();
        assertEquals(r.bytesRemaining(), r.length() - i);
    }

    r.seek(0);
    r.skipBytes(10);
    assertEquals(r.bytesRemaining(), r.length() - 10);

    w.close();
    r.close();
}
 
Example #2
Source File: BufferedRandomAccessFileTest.java    From hadoop-sstable with Apache License 2.0 6 votes vote down vote up
@Test
public void testGetFilePointer() throws IOException {
    final SequentialWriter w = createTempFile("brafGetFilePointer");

    assertEquals(w.getFilePointer(), 0); // initial position should be 0

    w.write(generateByteArray(20));
    assertEquals(w.getFilePointer(), 20); // position 20 after writing 20 bytes

    w.sync();

    RandomAccessReader r = RandomAccessReader.open(w, fs);

    // position should change after skip bytes
    r.seek(0);
    r.skipBytes(15);
    assertEquals(r.getFilePointer(), 15);

    r.read();
    assertEquals(r.getFilePointer(), 16);
    r.read(new byte[4]);
    assertEquals(r.getFilePointer(), 20);

    w.close();
    r.close();
}
 
Example #3
Source File: OnDiskIndexBuilder.java    From sasi with Apache License 2.0 6 votes vote down vote up
@Override
public void flushAndClear(SequentialWriter out) throws IOException
{
    super.flushAndClear(out);

    out.writeInt((sparseValueTerms == 0) ? -1 : offset);

    if (containers.size() > 0)
    {
        for (TokenTreeBuilder tokens : containers)
            tokens.write(out);
    }

    if (sparseValueTerms > 0)
    {
        combinedIndex.finish().write(out);
    }

    alignToBlock(out);

    containers.clear();
    combinedIndex = new TokenTreeBuilder();

    offset = 0;
    sparseValueTerms = 0;
}
 
Example #4
Source File: TokenTreeTest.java    From sasi with Apache License 2.0 6 votes vote down vote up
@Test
public void testSerializedSize() throws Exception
{
    final TokenTreeBuilder builder = new TokenTreeBuilder(tokens).finish();

    final File treeFile = File.createTempFile("token-tree-size-test", "tt");
    treeFile.deleteOnExit();

    final SequentialWriter writer = new SequentialWriter(treeFile, 4096, false);
    builder.write(writer);
    writer.close();


    final RandomAccessReader reader = RandomAccessReader.open(treeFile);
    Assert.assertEquals((int) reader.bytesRemaining(), builder.serializedSize());
}
 
Example #5
Source File: TokenTreeTest.java    From sasi with Apache License 2.0 6 votes vote down vote up
@Test
public void skipPastEnd() throws Exception
{
    final TokenTreeBuilder builder = new TokenTreeBuilder(simpleTokenMap).finish();

    final File treeFile = File.createTempFile("token-tree-skip-past-test", "tt");
    treeFile.deleteOnExit();

    final SequentialWriter writer = new SequentialWriter(treeFile, 4096, false);
    builder.write(writer);
    writer.close();

    final RandomAccessReader reader = RandomAccessReader.open(treeFile);
    final RangeIterator<Long, Token> tokenTree = new TokenTree(new MappedBuffer(reader)).iterator(KEY_CONVERTER);

    tokenTree.skipTo(simpleTokenMap.lastKey() + 10);
}
 
Example #6
Source File: OnDiskIndexBuilder.java    From sasi with Apache License 2.0 5 votes vote down vote up
private MutableLevel<InMemoryPointerTerm> getIndexLevel(int idx, SequentialWriter out)
{
    if (levels.size() == 0)
        levels.add(new MutableLevel<>(out, new MutableBlock<InMemoryPointerTerm>()));

    if (levels.size() - 1 < idx)
    {
        int toAdd = idx - (levels.size() - 1);
        for (int i = 0; i < toAdd; i++)
            levels.add(new MutableLevel<>(out, new MutableBlock<InMemoryPointerTerm>()));
    }

    return levels.get(idx);
}
 
Example #7
Source File: BufferedRandomAccessFileTest.java    From hadoop-sstable with Apache License 2.0 5 votes vote down vote up
@Test(expected = AssertionError.class)
public void testAssertionErrorWhenBytesPastMarkIsNegative() throws IOException {
    SequentialWriter w = createTempFile("brafAssertionErrorWhenBytesPastMarkIsNegative");
    w.write(new byte[30]);
    w.close();

    RandomAccessReader r = RandomAccessReader.open(w, fs);
    r.seek(10);
    r.mark();

    r.seek(0);
    r.bytesPastMark();
}
 
Example #8
Source File: BufferedRandomAccessFileTest.java    From hadoop-sstable with Apache License 2.0 5 votes vote down vote up
@Test
public void testMarkAndReset() throws IOException {
    SequentialWriter w = createTempFile("brafTestMark");
    w.write(new byte[30]);

    w.close();

    RandomAccessReader file = RandomAccessReader.open(w, fs);

    file.seek(10);
    FileMark mark = file.mark();

    file.seek(file.length());
    assertTrue(file.isEOF());

    file.reset();
    assertEquals(file.bytesRemaining(), 20);

    file.seek(file.length());
    assertTrue(file.isEOF());

    file.reset(mark);
    assertEquals(file.bytesRemaining(), 20);

    file.seek(file.length());
    assertEquals(file.bytesPastMark(), 20);
    assertEquals(file.bytesPastMark(mark), 20);

    file.reset(mark);
    assertEquals(file.bytesPastMark(), 0);

    file.close();
}
 
Example #9
Source File: BufferedRandomAccessFileTest.java    From hadoop-sstable with Apache License 2.0 5 votes vote down vote up
@Test
public void testClose() throws IOException {
    final SequentialWriter w = createTempFile("brafClose");

    byte[] data = generateByteArray(RandomAccessReader.DEFAULT_BUFFER_SIZE + 20);

    w.write(data);
    w.close(); // will flush

    final RandomAccessReader r = RandomAccessReader.open(new Path(new File(w.getPath()).getPath()), fs);

    r.close(); // closing to test read after close

    expectException(new Callable<Object>() {
        public Object call() {
            return r.read();
        }
    }, AssertionError.class);

    expectException(new Callable<Object>() {
        public Object call() throws IOException {
            w.write(generateByteArray(1));
            return null;
        }
    }, ClosedChannelException.class);

    RandomAccessReader copy = RandomAccessReader.open(new Path(new File(r.getPath()).getPath()), fs);
    ByteBuffer contents = copy.readBytes((int) copy.length());

    assertEquals(contents.limit(), data.length);
    assertEquals(ByteBufferUtil.compare(contents, data), 0);
}
 
Example #10
Source File: BufferedRandomAccessFileTest.java    From hadoop-sstable with Apache License 2.0 5 votes vote down vote up
@Test
public void testSeek() throws Exception {
    SequentialWriter w = createTempFile("brafSeek");
    byte[] data = generateByteArray(RandomAccessReader.DEFAULT_BUFFER_SIZE + 20);
    w.write(data);
    w.close();

    final RandomAccessReader file = RandomAccessReader.open(w, fs);

    file.seek(0);
    assertEquals(file.getFilePointer(), 0);
    assertEquals(file.bytesRemaining(), file.length());

    file.seek(20);
    assertEquals(file.getFilePointer(), 20);
    assertEquals(file.bytesRemaining(), file.length() - 20);

    // trying to seek past the end of the file should produce EOFException
    expectException(new Callable<Object>() {
        public Object call() {
            file.seek(file.length() + 30);
            return null;
        }
    }, IllegalArgumentException.class);

    expectException(new Callable<Object>() {
        public Object call() throws IOException {
            file.seek(-1);
            return null;
        }
    }, IllegalArgumentException.class); // throws IllegalArgumentException

    file.close();
}
 
Example #11
Source File: BufferedRandomAccessFileTest.java    From hadoop-sstable with Apache License 2.0 5 votes vote down vote up
@Test
public void testLength() throws IOException {
    File tmpFile = File.createTempFile("lengthtest", "bin");
    SequentialWriter w = SequentialWriter.open(tmpFile);
    assertEquals(0, w.length());

    // write a chunk smaller then our buffer, so will not be flushed
    // to disk
    byte[] lessThenBuffer = generateByteArray(RandomAccessReader.DEFAULT_BUFFER_SIZE / 2);
    w.write(lessThenBuffer);
    assertEquals(lessThenBuffer.length, w.length());

    // sync the data and check length
    w.sync();
    assertEquals(lessThenBuffer.length, w.length());

    // write more then the buffer can hold and check length
    byte[] biggerThenBuffer = generateByteArray(RandomAccessReader.DEFAULT_BUFFER_SIZE * 2);
    w.write(biggerThenBuffer);
    assertEquals(biggerThenBuffer.length + lessThenBuffer.length, w.length());

    w.close();

    // will use cachedlength
    RandomAccessReader r = RandomAccessReader.open(new Path(tmpFile.getPath()), fs);
    assertEquals(lessThenBuffer.length + biggerThenBuffer.length, r.length());
    r.close();
}
 
Example #12
Source File: TokenTreeTest.java    From sasi with Apache License 2.0 5 votes vote down vote up
private static TokenTree generateTree(final long minToken, final long maxToken) throws IOException
{
    final SortedMap<Long, LongSet> toks = new TreeMap<Long, LongSet>()
    {{
            for (long i = minToken; i <= maxToken; i++)
            {
                LongSet offsetSet = new LongOpenHashSet();
                offsetSet.add(i);
                put(i, offsetSet);
            }
    }};

    final TokenTreeBuilder builder = new TokenTreeBuilder(toks).finish();
    final File treeFile = File.createTempFile("token-tree-get-test", "tt");
    treeFile.deleteOnExit();

    final SequentialWriter writer = new SequentialWriter(treeFile, 4096, false);
    builder.write(writer);
    writer.close();

    RandomAccessReader reader = null;

    try
    {
        reader = RandomAccessReader.open(treeFile);
        return new TokenTree(new MappedBuffer(reader));
    }
    finally
    {
        FileUtils.closeQuietly(reader);
    }
}
 
Example #13
Source File: TokenTreeTest.java    From sasi with Apache License 2.0 5 votes vote down vote up
@Test
public void buildSerializeAndIterate() throws Exception
{
    final TokenTreeBuilder builder = new TokenTreeBuilder(simpleTokenMap).finish();

    final File treeFile = File.createTempFile("token-tree-iterate-test1", "tt");
    treeFile.deleteOnExit();

    final SequentialWriter writer = new SequentialWriter(treeFile, 4096, false);
    builder.write(writer);
    writer.close();

    final RandomAccessReader reader = RandomAccessReader.open(treeFile);
    final TokenTree tokenTree = new TokenTree(new MappedBuffer(reader));

    final Iterator<Token> tokenIterator = tokenTree.iterator(KEY_CONVERTER);
    final Iterator<Map.Entry<Long, LongSet>> listIterator = simpleTokenMap.entrySet().iterator();
    while (tokenIterator.hasNext() && listIterator.hasNext())
    {
        Token treeNext = tokenIterator.next();
        Map.Entry<Long, LongSet> listNext = listIterator.next();

        Assert.assertEquals(listNext.getKey(), treeNext.get());
        Assert.assertEquals(convert(listNext.getValue()), convert(treeNext));
    }

    Assert.assertFalse("token iterator not finished", tokenIterator.hasNext());
    Assert.assertFalse("list iterator not finished", listIterator.hasNext());

    reader.close();
}
 
Example #14
Source File: OnDiskIndexBuilder.java    From sasi with Apache License 2.0 5 votes vote down vote up
public void flushAndClear(SequentialWriter out) throws IOException
{
    out.writeInt(offsets.size());
    for (int i = 0; i < offsets.size(); i++)
        out.writeShort(offsets.get(i));

    buffer.writeFullyTo(out);

    alignToBlock(out);

    offsets.clear();
    buffer.clear();
}
 
Example #15
Source File: OnDiskIndexBuilder.java    From sasi with Apache License 2.0 5 votes vote down vote up
private void addTerm(InMemoryDataTerm term, SequentialWriter out) throws IOException
{
    InMemoryPointerTerm ptr = dataLevel.add(term);
    if (ptr == null)
        return;

    int levelIdx = 0;
    for (;;)
    {
        MutableLevel<InMemoryPointerTerm> level = getIndexLevel(levelIdx++, out);
        if ((ptr = level.add(ptr)) == null)
            break;
    }
}
 
Example #16
Source File: RandomAccessReader.java    From hadoop-sstable with Apache License 2.0 4 votes vote down vote up
@VisibleForTesting
static RandomAccessReader open(SequentialWriter writer, FileSystem fs) {
    return open(new Path(writer.getPath()), DEFAULT_BUFFER_SIZE, false, null, fs);
}
 
Example #17
Source File: OnDiskIndexBuilder.java    From sasi with Apache License 2.0 4 votes vote down vote up
public DataBuilderLevel(SequentialWriter out, MutableBlock<InMemoryDataTerm> block)
{
    super(out, block);
    superBlockTree = new TokenTreeBuilder();
}
 
Example #18
Source File: OnDiskIndexBuilder.java    From sasi with Apache License 2.0 4 votes vote down vote up
public MutableLevel(SequentialWriter out, MutableBlock<T> block)
{
    this.out = out;
    this.inProcessBlock = block;
}
 
Example #19
Source File: OnDiskIndexBuilder.java    From sasi with Apache License 2.0 4 votes vote down vote up
protected static void alignToBlock(SequentialWriter out) throws IOException
{
    long endOfBlock = out.getFilePointer();
    if ((endOfBlock & (BLOCK_SIZE - 1)) != 0) // align on the block boundary if needed
        out.skipBytes((int) (FBUtilities.align(endOfBlock, BLOCK_SIZE) - endOfBlock));
}
 
Example #20
Source File: BufferedRandomAccessFileTest.java    From hadoop-sstable with Apache License 2.0 4 votes vote down vote up
@Test
public void testReadOnly() throws IOException {
    SequentialWriter file = createTempFile("brafReadOnlyTest");

    byte[] data = new byte[20];
    for (int i = 0; i < data.length; i++)
        data[i] = 'c';

    file.write(data);
    file.sync(); // flushing file to the disk

    // read-only copy of the file, with fixed file length
    final RandomAccessReader copy = RandomAccessReader.open(new Path(file.getPath()), fs);

    copy.seek(copy.length());
    assertTrue(copy.bytesRemaining() == 0 && copy.isEOF());

    // can't seek past the end of the file for read-only files
    expectException(new Callable<Object>() {
        public Object call() {
            copy.seek(copy.length() + 1);
            return null;
        }
    }, IllegalArgumentException.class);

    // Any write() call should fail
    expectException(new Callable<Object>() {
        public Object call() throws IOException {
            copy.write(1);
            return null;
        }
    }, UnsupportedOperationException.class);

    expectException(new Callable<Object>() {
        public Object call() throws IOException {
            copy.write(new byte[1]);
            return null;
        }
    }, UnsupportedOperationException.class);

    expectException(new Callable<Object>() {
        public Object call() throws IOException {
            copy.write(new byte[3], 0, 2);
            return null;
        }
    }, UnsupportedOperationException.class);

    copy.seek(0);
    copy.skipBytes(5);

    assertEquals(copy.bytesRemaining(), 15);
    assertEquals(copy.getFilePointer(), 5);
    assertTrue(!copy.isEOF());

    copy.seek(0);
    ByteBuffer contents = copy.readBytes((int) copy.length());

    assertEquals(contents.limit(), copy.length());
    assertTrue(ByteBufferUtil.compare(contents, data) == 0);

    copy.seek(0);

    int count = 0;
    while (!copy.isEOF()) {
        assertEquals((byte) copy.read(), 'c');
        count++;
    }

    assertEquals(count, copy.length());

    copy.seek(0);
    byte[] content = new byte[10];
    copy.read(content);

    assertEquals(new String(content), "cccccccccc");

    file.close();
    copy.close();
}
 
Example #21
Source File: BufferedRandomAccessFileTest.java    From hadoop-sstable with Apache License 2.0 4 votes vote down vote up
private SequentialWriter createTempFile(String name) throws IOException {
    File tempFile = File.createTempFile(name, null);
    tempFile.deleteOnExit();

    return SequentialWriter.open(tempFile);
}