net.jpountz.lz4.LZ4Exception Java Examples

The following examples show how to use net.jpountz.lz4.LZ4Exception. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: Lz4BlockCompressor.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public int compress(byte[] src, int srcOff, int srcLen, byte[] dst, int dstOff)
		throws InsufficientBufferException {
	try {
		int compressedLength = compressor.compress(
				src,
				srcOff,
				srcLen,
				dst,
				dstOff + HEADER_LENGTH
		);
		writeIntLE(compressedLength, dst, dstOff);
		writeIntLE(srcLen, dst, dstOff + 4);
		return HEADER_LENGTH + compressedLength;
	}
	catch (LZ4Exception | BufferOverflowException | ArrayIndexOutOfBoundsException e) {
		throw new InsufficientBufferException(e);
	}
}
 
Example #2
Source File: LZ4Compressor.java    From stratio-cassandra with Apache License 2.0 6 votes vote down vote up
public int uncompress(byte[] input, int inputOffset, int inputLength, byte[] output, int outputOffset) throws IOException
{
    final int decompressedLength =
            (input[inputOffset] & 0xFF)
            | ((input[inputOffset + 1] & 0xFF) << 8)
            | ((input[inputOffset + 2] & 0xFF) << 16)
            | ((input[inputOffset + 3] & 0xFF) << 24);
    final int compressedLength;
    try
    {
        compressedLength = decompressor.decompress(input, inputOffset + INTEGER_BYTES,
                                                   output, outputOffset, decompressedLength);
    }
    catch (LZ4Exception e)
    {
        throw new IOException(e);
    }

    if (compressedLength != inputLength - INTEGER_BYTES)
    {
        throw new IOException("Compressed lengths mismatch");
    }

    return decompressedLength;
}
 
Example #3
Source File: LZ4Compressor.java    From stratio-cassandra with Apache License 2.0 6 votes vote down vote up
public int compress(byte[] input, int inputOffset, int inputLength, WrappedArray output, int outputOffset) throws IOException
{
    final byte[] dest = output.buffer;
    dest[outputOffset] = (byte) inputLength;
    dest[outputOffset + 1] = (byte) (inputLength >>> 8);
    dest[outputOffset + 2] = (byte) (inputLength >>> 16);
    dest[outputOffset + 3] = (byte) (inputLength >>> 24);
    final int maxCompressedLength = compressor.maxCompressedLength(inputLength);
    try
    {
        return INTEGER_BYTES + compressor.compress(input, inputOffset, inputLength,
                                                   dest, outputOffset + INTEGER_BYTES, maxCompressedLength);
    }
    catch (LZ4Exception e)
    {
        throw new IOException(e);
    }
}
 
Example #4
Source File: LZ4CompressionCodec.java    From distributedlog with Apache License 2.0 6 votes vote down vote up
@Override
public byte[] decompress(byte[] data, int offset, int length, OpStatsLogger decompressionStat) {
    Preconditions.checkNotNull(data);
    Preconditions.checkArgument(offset >= 0 && offset < data.length);
    Preconditions.checkArgument(length >= 0);
    Preconditions.checkNotNull(decompressionStat);

    Stopwatch watch = Stopwatch.createStarted();
    // Assume that we have a compression ratio of 1/3.
    int outLength = length * 3;
    while (true) {
        try {
            byte[] decompressed = safeDecompressor.decompress(data, offset, length, outLength);
            decompressionStat.registerSuccessfulEvent(watch.elapsed(TimeUnit.MICROSECONDS));
            return decompressed;
        } catch (LZ4Exception e) {
            outLength *= 2;
        }
    }
}
 
Example #5
Source File: Lz4BlockCompressor.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public int compress(byte[] src, int srcOff, int srcLen, byte[] dst, int dstOff)
		throws InsufficientBufferException {
	try {
		int compressedLength = compressor.compress(
				src,
				srcOff,
				srcLen,
				dst,
				dstOff + HEADER_LENGTH
		);
		writeIntLE(compressedLength, dst, dstOff);
		writeIntLE(srcLen, dst, dstOff + 4);
		return HEADER_LENGTH + compressedLength;
	}
	catch (LZ4Exception | BufferOverflowException | ArrayIndexOutOfBoundsException e) {
		throw new InsufficientBufferException(e);
	}
}
 
Example #6
Source File: Lz4CompressUtilsTest.java    From hibernate4-memcached with Apache License 2.0 5 votes vote down vote up
@Test
public void decompressFast_over_exactDecompressedSize() {
    byte[] compressed = Lz4CompressUtils.compress(ORIGINAL_DATA_BYTES);

    expectedException.expect(LZ4Exception.class);
    Lz4CompressUtils.decompressFast(compressed, ORIGINAL_DATA_BYTES.length + 100);
}
 
Example #7
Source File: Lz4BlockCompressor.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public int compress(ByteBuffer src, int srcOff, int srcLen, ByteBuffer dst, int dstOff)
		throws InsufficientBufferException {
	try {
		final int prevSrcOff = src.position() + srcOff;
		final int prevDstOff = dst.position() + dstOff;

		int maxCompressedSize = compressor.maxCompressedLength(srcLen);
		int compressedLength = compressor.compress(
				src,
				prevSrcOff,
				srcLen,
				dst,
				prevDstOff + HEADER_LENGTH,
				maxCompressedSize
		);

		src.position(prevSrcOff + srcLen);

		dst.position(prevDstOff);
		dst.putInt(compressedLength);
		dst.putInt(srcLen);
		dst.position(prevDstOff + compressedLength + HEADER_LENGTH);

		return HEADER_LENGTH + compressedLength;
	}
	catch (LZ4Exception | ArrayIndexOutOfBoundsException | BufferOverflowException e) {
		throw new InsufficientBufferException(e);
	}
}
 
Example #8
Source File: Lz4CompressUtilsTest.java    From hibernate4-memcached with Apache License 2.0 5 votes vote down vote up
@Test
public void decompressFast_less_exactDecompressedSize() {
    byte[] compressed = Lz4CompressUtils.compress(ORIGINAL_DATA_BYTES);

    expectedException.expect(LZ4Exception.class);
    Lz4CompressUtils.decompressFast(compressed, ORIGINAL_DATA_BYTES.length - 100);
}
 
Example #9
Source File: Lz4CompressUtilsTest.java    From hibernate4-memcached with Apache License 2.0 5 votes vote down vote up
@Test
public void compress_and_decompressSafe_less_decompressLength() {
    byte[] compressed = Lz4CompressUtils.compress(ORIGINAL_DATA_BYTES);

    expectedException.expect(LZ4Exception.class);
    Lz4CompressUtils.decompressSafe(compressed, ORIGINAL_DATA_BYTES.length - 100);
}
 
Example #10
Source File: Lz4BlockDecompressor.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public int decompress(byte[] src, int srcOff, int srcLen, byte[] dst, int dstOff)
		throws InsufficientBufferException, DataCorruptionException {
	final int compressedLen = SafeUtils.readIntLE(src, srcOff);
	final int originalLen = SafeUtils.readIntLE(src, srcOff + 4);
	validateLength(compressedLen, originalLen);

	if (dst.length - dstOff < originalLen) {
		throw new InsufficientBufferException("Buffer length too small");
	}

	if (src.length - srcOff - HEADER_LENGTH < compressedLen) {
		throw new DataCorruptionException("Source data is not integral for decompression.");
	}

	try {
		final int compressedLen2 = decompressor.decompress(
				src,
				srcOff + HEADER_LENGTH,
				dst,
				dstOff,
				originalLen
		);
		if (compressedLen != compressedLen2) {
			throw new DataCorruptionException("Input is corrupted");
		}
	}
	catch (LZ4Exception e) {
		throw new DataCorruptionException("Input is corrupted", e);
	}

	return originalLen;
}
 
Example #11
Source File: Lz4BlockDecompressor.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public int decompress(ByteBuffer src, int srcOff, int srcLen, ByteBuffer dst, int dstOff)
		throws DataCorruptionException {
	final int prevSrcOff = src.position() + srcOff;
	final int prevDstOff = dst.position() + dstOff;

	src.order(ByteOrder.LITTLE_ENDIAN);
	final int compressedLen = src.getInt(prevSrcOff);
	final int originalLen = src.getInt(prevSrcOff + 4);
	validateLength(compressedLen, originalLen);

	if (dst.capacity() - prevDstOff < originalLen) {
		throw new InsufficientBufferException("Buffer length too small");
	}

	if (src.limit() - prevSrcOff - HEADER_LENGTH < compressedLen) {
		throw new DataCorruptionException("Source data is not integral for decompression.");
	}

	try {
		final int compressedLen2 = decompressor.decompress(
				src,
				prevSrcOff + HEADER_LENGTH,
				dst,
				prevDstOff,
				originalLen
		);
		if (compressedLen != compressedLen2) {
			throw new DataCorruptionException(
					"Input is corrupted, unexpected compressed length.");
		}
		src.position(prevSrcOff + compressedLen + HEADER_LENGTH);
		dst.position(prevDstOff + originalLen);
	}
	catch (LZ4Exception e) {
		throw new DataCorruptionException("Input is corrupted", e);
	}

	return originalLen;
}
 
Example #12
Source File: Lz4BlockCompressor.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public int compress(ByteBuffer src, int srcOff, int srcLen, ByteBuffer dst, int dstOff)
		throws InsufficientBufferException {
	try {
		final int prevSrcOff = src.position() + srcOff;
		final int prevDstOff = dst.position() + dstOff;

		int maxCompressedSize = compressor.maxCompressedLength(srcLen);
		int compressedLength = compressor.compress(
				src,
				prevSrcOff,
				srcLen,
				dst,
				prevDstOff + HEADER_LENGTH,
				maxCompressedSize
		);

		src.position(prevSrcOff + srcLen);

		dst.position(prevDstOff);
		dst.order(ByteOrder.LITTLE_ENDIAN);
		dst.putInt(compressedLength);
		dst.putInt(srcLen);
		dst.position(prevDstOff + compressedLength + HEADER_LENGTH);

		return HEADER_LENGTH + compressedLength;
	}
	catch (LZ4Exception | ArrayIndexOutOfBoundsException | BufferOverflowException e) {
		throw new InsufficientBufferException(e);
	}
}
 
Example #13
Source File: ChannelLZ4Decompressor.java    From datakernel with Apache License 2.0 5 votes vote down vote up
private static ByteBuf decompress(LZ4FastDecompressor decompressor, StreamingXXHash32 checksum, Header header,
		byte[] bytes, int off) throws ParseException {
	ByteBuf outputBuf = ByteBufPool.allocate(header.originalLen);
	outputBuf.tail(header.originalLen);
	switch (header.compressionMethod) {
		case COMPRESSION_METHOD_RAW:
			System.arraycopy(bytes, off, outputBuf.array(), 0, header.originalLen);
			break;
		case COMPRESSION_METHOD_LZ4:
			try {
				int compressedLen2 = decompressor.decompress(bytes, off, outputBuf.array(), 0, header.originalLen);
				if (header.compressedLen != compressedLen2) {
					throw STREAM_IS_CORRUPTED;
				}
			} catch (LZ4Exception e) {
				throw new ParseException(ChannelLZ4Decompressor.class, "Stream is corrupted", e);
			}
			break;
		default:
			throw STREAM_IS_CORRUPTED;
	}
	checksum.reset();
	checksum.update(outputBuf.array(), 0, header.originalLen);
	if (checksum.getValue() != header.check) {
		throw STREAM_IS_CORRUPTED;
	}
	return outputBuf;
}
 
Example #14
Source File: Lz4FrameEncoder.java    From netty-4.1.22 with Apache License 2.0 5 votes vote down vote up
private void flushBufferedData(ByteBuf out) {
    int flushableBytes = buffer.readableBytes();
    if (flushableBytes == 0) {
        return;
    }
    checksum.reset();
    checksum.update(buffer, buffer.readerIndex(), flushableBytes);
    final int check = (int) checksum.getValue();

    final int bufSize = compressor.maxCompressedLength(flushableBytes) + HEADER_LENGTH;
    out.ensureWritable(bufSize);
    final int idx = out.writerIndex();
    int compressedLength;
    try {
        ByteBuffer outNioBuffer = out.internalNioBuffer(idx + HEADER_LENGTH, out.writableBytes() - HEADER_LENGTH);
        int pos = outNioBuffer.position();
        // We always want to start at position 0 as we take care of reusing the buffer in the encode(...) loop.
        compressor.compress(buffer.internalNioBuffer(buffer.readerIndex(), flushableBytes), outNioBuffer);
        compressedLength = outNioBuffer.position() - pos;
    } catch (LZ4Exception e) {
        throw new CompressionException(e);
    }
    final int blockType;
    if (compressedLength >= flushableBytes) {
        blockType = BLOCK_TYPE_NON_COMPRESSED;
        compressedLength = flushableBytes;
        out.setBytes(idx + HEADER_LENGTH, buffer, 0, flushableBytes);
    } else {
        blockType = BLOCK_TYPE_COMPRESSED;
    }

    out.setLong(idx, MAGIC_NUMBER);
    out.setByte(idx + TOKEN_OFFSET, (byte) (blockType | compressionLevel));
    out.setIntLE(idx + COMPRESSED_LENGTH_OFFSET, compressedLength);
    out.setIntLE(idx + DECOMPRESSED_LENGTH_OFFSET, flushableBytes);
    out.setIntLE(idx + CHECKSUM_OFFSET, check);
    out.writerIndex(idx + HEADER_LENGTH + compressedLength);
    buffer.clear();
}
 
Example #15
Source File: Lz4BlockDecompressor.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public int decompress(byte[] src, int srcOff, int srcLen, byte[] dst, int dstOff)
		throws InsufficientBufferException, DataCorruptionException {
	final int compressedLen = SafeUtils.readIntLE(src, srcOff);
	final int originalLen = SafeUtils.readIntLE(src, srcOff + 4);
	validateLength(compressedLen, originalLen);

	if (dst.length - dstOff < originalLen) {
		throw new InsufficientBufferException("Buffer length too small");
	}

	if (src.length - srcOff - HEADER_LENGTH < compressedLen) {
		throw new DataCorruptionException("Source data is not integral for decompression.");
	}

	try {
		final int compressedLen2 = decompressor.decompress(
				src,
				srcOff + HEADER_LENGTH,
				dst,
				dstOff,
				originalLen
		);
		if (compressedLen != compressedLen2) {
			throw new DataCorruptionException("Input is corrupted");
		}
	}
	catch (LZ4Exception e) {
		throw new DataCorruptionException("Input is corrupted", e);
	}

	return originalLen;
}
 
Example #16
Source File: Lz4BlockDecompressor.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public int decompress(ByteBuffer src, int srcOff, int srcLen, ByteBuffer dst, int dstOff)
		throws DataCorruptionException {
	final int prevSrcOff = src.position() + srcOff;
	final int prevDstOff = dst.position() + dstOff;

	final int compressedLen = src.getInt(prevSrcOff);
	final int originalLen = src.getInt(prevSrcOff + 4);
	validateLength(compressedLen, originalLen);

	if (dst.capacity() - prevDstOff < originalLen) {
		throw new InsufficientBufferException("Buffer length too small");
	}

	if (src.limit() - prevSrcOff - HEADER_LENGTH < compressedLen) {
		throw new DataCorruptionException("Source data is not integral for decompression.");
	}

	try {
		final int compressedLen2 = decompressor.decompress(
				src,
				prevSrcOff + HEADER_LENGTH,
				dst,
				prevDstOff,
				originalLen
		);
		if (compressedLen != compressedLen2) {
			throw new DataCorruptionException(
					"Input is corrupted, unexpected compressed length.");
		}
		src.position(prevSrcOff + compressedLen + HEADER_LENGTH);
		dst.position(prevDstOff + originalLen);
	}
	catch (LZ4Exception e) {
		throw new DataCorruptionException("Input is corrupted", e);
	}

	return originalLen;
}
 
Example #17
Source File: NormalSketch.java    From vespa with Apache License 2.0 4 votes vote down vote up
/**
 * Serializes the Sketch.
 *
 * Serialization format
 * ==================
 * Original size:     4 bytes
 * Compressed size:   4 bytes
 * Compressed data:   N * 1 bytes
 *
 * Invariant:
 *      compressed size &lt;= original size
 *
 * Special case:
 *      compressed size == original size =&gt; data is uncompressed
 *
 * @param buf Serializer
 */
@Override
protected void onSerialize(Serializer buf) {
    super.onSerialize(buf);
    buf.putInt(null, data.length);
    try {
        LZ4Compressor c = lz4Factory.highCompressor();
        byte[] compressedData = new byte[data.length];
        int compressedSize = c.compress(data, compressedData);
        serializeDataArray(compressedData, compressedSize, buf);
    } catch (LZ4Exception e) {
        // LZ4Compressor.compress will throw this exception if it is unable to compress
        // into compressedData (when compressed size >= original size)
        serializeDataArray(data, data.length, buf);
    }
}
 
Example #18
Source File: KafkaLZ4BlockInputStream.java    From joyqueue with Apache License 2.0 4 votes vote down vote up
/**
 * Decompresses (if necessary) buffered data, optionally computes and validates a XXHash32 checksum,
 * and writes the result to a buffer.
 *
 * @throws IOException
 */
private void readBlock() throws IOException {
    int blockSize = KafkaBufferUtils.readUnsignedIntLE(in);

    // Check for EndMark
    if (blockSize == 0) {
        finished = true;
        // TODO implement content checksum, update flg.validate()
        return;
    } else if (blockSize > maxBlockSize) {
        throw new IOException(String.format("Block size %s exceeded max: %s", blockSize, maxBlockSize));
    }

    boolean compressed = (blockSize & KafkaLZ4BlockOutputStream.LZ4_FRAME_INCOMPRESSIBLE_MASK) == 0;
    byte[] bufferToRead;
    if (compressed) {
        bufferToRead = compressedBuffer;
    } else {
        blockSize &= ~KafkaLZ4BlockOutputStream.LZ4_FRAME_INCOMPRESSIBLE_MASK;
        bufferToRead = buffer;
        bufferSize = blockSize;
    }

    if (in.read(bufferToRead, 0, blockSize) != blockSize) {
        throw new IOException(PREMATURE_EOS);
    }

    // verify checksum
    if (flg.isBlockChecksumSet() && KafkaBufferUtils.readUnsignedIntLE(in) != checksum.hash(bufferToRead, 0, blockSize, 0)) {
        throw new IOException(BLOCK_HASH_MISMATCH);
    }

    if (compressed) {
        try {
            bufferSize = decompressor.decompress(compressedBuffer, 0, blockSize, buffer, 0, maxBlockSize);
        } catch (LZ4Exception e) {
            throw new IOException(e);
        }
    }

    bufferOffset = 0;
}
 
Example #19
Source File: KafkaLZ4BlockInputStream.java    From joyqueue with Apache License 2.0 4 votes vote down vote up
/**
 * Decompresses (if necessary) buffered data, optionally computes and validates a XXHash32 checksum,
 * and writes the result to a buffer.
 *
 * @throws IOException
 */
private void readBlock() throws IOException {
    int blockSize = KafkaBufferUtils.readUnsignedIntLE(in);

    // Check for EndMark
    if (blockSize == 0) {
        finished = true;
        // TODO implement content checksum, update flg.validate()
        return;
    } else if (blockSize > maxBlockSize) {
        throw new IOException(String.format("Block size %s exceeded max: %s", blockSize, maxBlockSize));
    }

    boolean compressed = (blockSize & KafkaLZ4BlockOutputStream.LZ4_FRAME_INCOMPRESSIBLE_MASK) == 0;
    byte[] bufferToRead;
    if (compressed) {
        bufferToRead = compressedBuffer;
    } else {
        blockSize &= ~KafkaLZ4BlockOutputStream.LZ4_FRAME_INCOMPRESSIBLE_MASK;
        bufferToRead = buffer;
        bufferSize = blockSize;
    }

    if (in.read(bufferToRead, 0, blockSize) != blockSize) {
        throw new IOException(PREMATURE_EOS);
    }

    // verify checksum
    if (flg.isBlockChecksumSet() && KafkaBufferUtils.readUnsignedIntLE(in) != checksum.hash(bufferToRead, 0, blockSize, 0)) {
        throw new IOException(BLOCK_HASH_MISMATCH);
    }

    if (compressed) {
        try {
            bufferSize = decompressor.decompress(compressedBuffer, 0, blockSize, buffer, 0, maxBlockSize);
        } catch (LZ4Exception e) {
            throw new IOException(e);
        }
    }

    bufferOffset = 0;
}
 
Example #20
Source File: KafkaLZ4BlockInputStream.java    From joyqueue with Apache License 2.0 4 votes vote down vote up
/**
 * Decompresses (if necessary) buffered data, optionally computes and validates a XXHash32 checksum,
 * and writes the result to a buffer.
 *
 * @throws IOException
 */
private void readBlock() throws IOException {
    int blockSize = KafkaBufferUtils.readUnsignedIntLE(in);

    // Check for EndMark
    if (blockSize == 0) {
        finished = true;
        // TODO implement content checksum, update flg.validate()
        return;
    } else if (blockSize > maxBlockSize) {
        throw new IOException(String.format("Block size %s exceeded max: %s", blockSize, maxBlockSize));
    }

    boolean compressed = (blockSize & KafkaLZ4BlockOutputStream.LZ4_FRAME_INCOMPRESSIBLE_MASK) == 0;
    byte[] bufferToRead;
    if (compressed) {
        bufferToRead = compressedBuffer;
    } else {
        blockSize &= ~KafkaLZ4BlockOutputStream.LZ4_FRAME_INCOMPRESSIBLE_MASK;
        bufferToRead = buffer;
        bufferSize = blockSize;
    }

    if (in.read(bufferToRead, 0, blockSize) != blockSize) {
        throw new IOException(PREMATURE_EOS);
    }

    // verify checksum
    if (flg.isBlockChecksumSet() && KafkaBufferUtils.readUnsignedIntLE(in) != checksum.hash(bufferToRead, 0, blockSize, 0)) {
        throw new IOException(BLOCK_HASH_MISMATCH);
    }

    if (compressed) {
        try {
            bufferSize = decompressor.decompress(compressedBuffer, 0, blockSize, buffer, 0, maxBlockSize);
        } catch (LZ4Exception e) {
            throw new IOException(e);
        }
    }

    bufferOffset = 0;
}