net.jpountz.lz4.LZ4Factory Java Examples
The following examples show how to use
net.jpountz.lz4.LZ4Factory.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: LZ4CompressedColumnReader.java From kylin with Apache License 2.0 | 6 votes |
public LZ4CompressedColumnReader(ByteBuffer dataBuffer, int columnDataStartOffset, int columnDataLength, int rowCount) { this.rowCount = rowCount; this.dataBuffer = dataBuffer; int footStartOffset = columnDataStartOffset + columnDataLength - 8; dataBuffer.position(footStartOffset); this.numValInBlock = dataBuffer.getInt(); this.valLen = dataBuffer.getInt(); this.blockDataReader = new GeneralColumnDataReader(dataBuffer, columnDataStartOffset, columnDataLength - 8); this.currBlockNum = -1; // this.readBuffer = new byte[valLen]; this.deCompressor = LZ4Factory.fastestInstance().safeDecompressor(); this.maxBufferLength = numValInBlock * valLen; this.decompressedBuffer = ByteBuffer.allocate(maxBufferLength); }
Example #2
Source File: ChannelLZ4DecompressorTest.java From datakernel with Apache License 2.0 | 6 votes |
@Test public void testTruncatedData() { ChannelLZ4Compressor compressor = ChannelLZ4Compressor.create(LZ4Factory.fastestInstance().fastCompressor()); ChannelLZ4Decompressor decompressor = ChannelLZ4Decompressor.create(); ByteBufQueue queue = new ByteBufQueue(); await(ChannelSupplier.of(ByteBufStrings.wrapAscii("TestData")).transformWith(compressor) .streamTo(ChannelConsumer.ofConsumer(queue::add))); // add trailing 0 - bytes queue.add(ByteBuf.wrapForReading(new byte[10])); Throwable e = awaitException(ChannelSupplier.of(queue.takeRemaining()) .transformWith(decompressor) .streamTo(ChannelConsumer.ofConsumer(data -> System.out.println(data.asString(UTF_8))))); assertSame(UNEXPECTED_DATA_EXCEPTION, e); }
Example #3
Source File: LZ4CompressedColumnReader.java From kylin-on-parquet-v2 with Apache License 2.0 | 6 votes |
public LZ4CompressedColumnReader(ByteBuffer dataBuffer, int columnDataStartOffset, int columnDataLength, int rowCount) { this.rowCount = rowCount; this.dataBuffer = dataBuffer; int footStartOffset = columnDataStartOffset + columnDataLength - 8; dataBuffer.position(footStartOffset); this.numValInBlock = dataBuffer.getInt(); this.valLen = dataBuffer.getInt(); this.blockDataReader = new GeneralColumnDataReader(dataBuffer, columnDataStartOffset, columnDataLength - 8); this.currBlockNum = -1; // this.readBuffer = new byte[valLen]; this.deCompressor = LZ4Factory.fastestInstance().safeDecompressor(); this.maxBufferLength = numValInBlock * valLen; this.decompressedBuffer = ByteBuffer.allocate(maxBufferLength); }
Example #4
Source File: Lz4Compress.java From compress with MIT License | 6 votes |
@Override public byte[] uncompress(byte[] data) throws IOException { LZ4Factory factory = LZ4Factory.fastestInstance(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); LZ4FastDecompressor decompresser = factory.fastDecompressor(); LZ4BlockInputStream lzis = new LZ4BlockInputStream(new ByteArrayInputStream(data), decompresser); int count; byte[] buffer = new byte[2048]; while ((count = lzis.read(buffer)) != -1) { baos.write(buffer, 0, count); } lzis.close(); return baos.toByteArray(); }
Example #5
Source File: Lz4FrameEncoder.java From netty-4.1.22 with Apache License 2.0 | 6 votes |
/** * Creates a new customizable LZ4 encoder.创建一个新的可定制的LZ4编码器。 * * @param factory user customizable {@link LZ4Factory} instance * which may be JNI bindings to the original C implementation, a pure Java implementation * or a Java implementation that uses the {@link sun.misc.Unsafe} * @param highCompressor if {@code true} codec will use compressor which requires more memory * and is slower but compresses more efficiently * @param blockSize the maximum number of bytes to try to compress at once, * must be >= 64 and <= 32 M * @param checksum the {@link Checksum} instance to use to check data for integrity * @param maxEncodeSize the maximum size for an encode (compressed) buffer */ public Lz4FrameEncoder(LZ4Factory factory, boolean highCompressor, int blockSize, Checksum checksum, int maxEncodeSize) { if (factory == null) { throw new NullPointerException("factory"); } if (checksum == null) { throw new NullPointerException("checksum"); } compressor = highCompressor ? factory.highCompressor() : factory.fastCompressor(); this.checksum = ByteBufChecksum.wrapChecksum(checksum); compressionLevel = compressionLevel(blockSize); this.blockSize = blockSize; this.maxEncodeSize = ObjectUtil.checkPositive(maxEncodeSize, "maxEncodeSize"); finished = false; }
Example #6
Source File: KafkaLZ4BlockOutputStream.java From joyqueue with Apache License 2.0 | 5 votes |
/** * Create a new {@link OutputStream} that will compress data using the LZ4 algorithm. * * @param out The output stream to compress * @param blockSize Default: 4. The block size used during compression. 4=64kb, 5=256kb, 6=1mb, 7=4mb. All other values will generate an exception * @param blockChecksum Default: false. When true, a XXHash32 checksum is computed and appended to the stream for every block of data * @throws IOException */ public KafkaLZ4BlockOutputStream(OutputStream out, int blockSize, boolean blockChecksum) throws IOException { super(out); compressor = LZ4Factory.fastestInstance().fastCompressor(); checksum = XXHashFactory.fastestInstance().hash32(); bd = new BD(blockSize); flg = new FLG(blockChecksum); bufferOffset = 0; maxBlockSize = bd.getBlockMaximumSize(); buffer = new byte[maxBlockSize]; compressedBuffer = new byte[compressor.maxCompressedLength(maxBlockSize)]; finished = false; writeHeader(); }
Example #7
Source File: FSInputLZ4CompressedColumnReader.java From kylin with Apache License 2.0 | 5 votes |
public FSInputLZ4CompressedColumnReader(FSDataInputStream fsInputStream, int columnDataStartOffset, int columnDataLength, int rowCount) throws IOException { this.rowCount = rowCount; this.fsInputStream = fsInputStream; int footStartOffset = columnDataStartOffset + columnDataLength - 8; fsInputStream.seek(footStartOffset); this.numValInBlock = fsInputStream.readInt(); this.valLen = fsInputStream.readInt(); fsInputStream.seek(columnDataStartOffset); this.currBlockNum = -1; this.deCompressor = LZ4Factory.fastestInstance().safeDecompressor(); this.maxDecompressedLength = numValInBlock * valLen; }
Example #8
Source File: LZ4CompressedColumnWriter.java From kylin with Apache License 2.0 | 5 votes |
public LZ4CompressedColumnWriter(int valLen, int rowCnt, int compressBlockSize, OutputStream output) { this.valLen = valLen; this.numValInBlock = compressBlockSize / valLen; this.blockCnt = rowCnt / numValInBlock; if (rowCnt % numValInBlock != 0) { blockCnt++; } this.compressor = LZ4Factory.fastestInstance().highCompressor(); this.writeBuffer = ByteBuffer.allocate(numValInBlock * valLen); this.dataOutput = new DataOutputStream(output); this.blockDataWriter = new GeneralColumnDataWriter(blockCnt, dataOutput); }
Example #9
Source File: LZ4CompressorTest.java From kylin with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { if(args.length == 0){ System.out.println("args[0] must be data file path"); return; } LZ4Factory factory = LZ4Factory.fastestInstance(); byte[] data = Files.toByteArray(new File(args[0])); final int decompressedLength = data.length; // compress data LZ4Compressor compressor = factory.fastCompressor(); long start = System.currentTimeMillis(); int maxCompressedLength = compressor.maxCompressedLength(decompressedLength); byte[] compressed = new byte[maxCompressedLength]; int compressedLength = compressor.compress(data, 0, decompressedLength, compressed, 0, maxCompressedLength); System.out.println("compress take:" + (System.currentTimeMillis() - start)); System.out.println(compressedLength); // decompress data // - method 1: when the decompressed length is known LZ4FastDecompressor decompressor = factory.fastDecompressor(); start = System.currentTimeMillis(); byte[] restored = new byte[decompressedLength]; int compressedLength2 = decompressor.decompress(compressed, 0, restored, 0, decompressedLength); System.out.println("decompress take:" + (System.currentTimeMillis() - start)); System.out.println(decompressedLength); // compressedLength == compressedLength2 // - method 2: when the compressed length is known (a little slower) // the destination buffer needs to be over-sized LZ4SafeDecompressor decompressor2 = factory.safeDecompressor(); int decompressedLength2 = decompressor2.decompress(compressed, 0, compressedLength, restored, 0); }
Example #10
Source File: Lz4Compress.java From compress with MIT License | 5 votes |
@Override public byte[] compress(byte[] data) throws IOException { LZ4Factory factory = LZ4Factory.fastestInstance(); ByteArrayOutputStream byteOutput = new ByteArrayOutputStream(); LZ4Compressor compressor = factory.fastCompressor(); LZ4BlockOutputStream compressedOutput = new LZ4BlockOutputStream(byteOutput, 2048, compressor); compressedOutput.write(data); compressedOutput.close(); return byteOutput.toByteArray(); }
Example #11
Source File: CompressedBuffedReaderTest.java From ClickHouse-Native-JDBC with Apache License 2.0 | 5 votes |
private byte[] compressedData(byte[] bytes) { LZ4Compressor lz4Compressor = LZ4Factory.safeInstance().fastCompressor(); byte[] compressData = lz4Compressor.compress(bytes); byte[] data = new byte[compressData.length + 9 + 16]; data[16] = (byte) (0x82 & 0xFF); System.arraycopy(compressData, 0, data, 9 + 16, compressData.length); System.arraycopy(littleEndian(compressData.length + 9), 0, data, 17, 4); System.arraycopy(littleEndian(bytes.length), 0, data, 21, 4); return data; }
Example #12
Source File: KafkaLZ4BlockInputStream.java From joyqueue with Apache License 2.0 | 5 votes |
/** * Create a new {@link InputStream} that will decompress data using the LZ4 algorithm. * * @param in The stream to decompress * @throws IOException */ public KafkaLZ4BlockInputStream(InputStream in, boolean ignoreFlagDescriptorChecksum) throws IOException { super(in); this.ignoreFlagDescriptorChecksum = ignoreFlagDescriptorChecksum; decompressor = LZ4Factory.fastestInstance().safeDecompressor(); checksum = XXHashFactory.fastestInstance().hash32(); readHeader(); maxBlockSize = bd.getBlockMaximumSize(); buffer = new byte[maxBlockSize]; compressedBuffer = new byte[maxBlockSize]; bufferOffset = 0; bufferSize = 0; finished = false; }
Example #13
Source File: KafkaLZ4BlockInputStream.java From joyqueue with Apache License 2.0 | 5 votes |
/** * Create a new {@link java.io.InputStream} that will decompress data using the LZ4 algorithm. * * @param in The stream to decompress * @throws java.io.IOException */ public KafkaLZ4BlockInputStream(InputStream in, boolean ignoreFlagDescriptorChecksum) throws IOException { super(in); this.ignoreFlagDescriptorChecksum = ignoreFlagDescriptorChecksum; decompressor = LZ4Factory.fastestInstance().safeDecompressor(); checksum = XXHashFactory.fastestInstance().hash32(); readHeader(); maxBlockSize = bd.getBlockMaximumSize(); buffer = new byte[maxBlockSize]; compressedBuffer = new byte[maxBlockSize]; bufferOffset = 0; bufferSize = 0; finished = false; }
Example #14
Source File: KafkaLZ4BlockOutputStream.java From joyqueue with Apache License 2.0 | 5 votes |
/** * Create a new {@link java.io.OutputStream} that will compress data using the LZ4 algorithm. * * @param out The output stream to compress * @param blockSize Default: 4. The block size used during compression. 4=64kb, 5=256kb, 6=1mb, 7=4mb. All other values will generate an exception * @param blockChecksum Default: false. When true, a XXHash32 checksum is computed and appended to the stream for every block of data * @throws java.io.IOException */ public KafkaLZ4BlockOutputStream(OutputStream out, int blockSize, boolean blockChecksum) throws IOException { super(out); compressor = LZ4Factory.fastestInstance().fastCompressor(); checksum = XXHashFactory.fastestInstance().hash32(); bd = new BD(blockSize); flg = new FLG(blockChecksum); bufferOffset = 0; maxBlockSize = bd.getBlockMaximumSize(); buffer = new byte[maxBlockSize]; compressedBuffer = new byte[compressor.maxCompressedLength(maxBlockSize)]; finished = false; writeHeader(); }
Example #15
Source File: KafkaLZ4BlockInputStream.java From joyqueue with Apache License 2.0 | 5 votes |
/** * Create a new {@link InputStream} that will decompress data using the LZ4 algorithm. * * @param in The stream to decompress * @throws IOException */ public KafkaLZ4BlockInputStream(InputStream in, boolean ignoreFlagDescriptorChecksum) throws IOException { super(in); this.ignoreFlagDescriptorChecksum = ignoreFlagDescriptorChecksum; decompressor = LZ4Factory.fastestInstance().safeDecompressor(); checksum = XXHashFactory.fastestInstance().hash32(); readHeader(); maxBlockSize = bd.getBlockMaximumSize(); buffer = new byte[maxBlockSize]; compressedBuffer = new byte[maxBlockSize]; bufferOffset = 0; bufferSize = 0; finished = false; }
Example #16
Source File: Lz4FrameEncoderTest.java From netty-4.1.22 with Apache License 2.0 | 5 votes |
private Lz4FrameEncoder newEncoder(int blockSize, int maxEncodeSize) { Checksum checksum = XXHashFactory.fastestInstance().newStreamingHash32(DEFAULT_SEED).asChecksum(); Lz4FrameEncoder encoder = new Lz4FrameEncoder(LZ4Factory.fastestInstance(), true, blockSize, checksum, maxEncodeSize); encoder.handlerAdded(ctx); return encoder; }
Example #17
Source File: KafkaLZ4BlockOutputStream.java From joyqueue with Apache License 2.0 | 5 votes |
/** * Create a new {@link OutputStream} that will compress data using the LZ4 algorithm. * * @param out The output stream to compress * @param blockSize Default: 4. The block size used during compression. 4=64kb, 5=256kb, 6=1mb, 7=4mb. All other values will generate an exception * @param blockChecksum Default: false. When true, a XXHash32 checksum is computed and appended to the stream for every block of data * @throws IOException */ public KafkaLZ4BlockOutputStream(OutputStream out, int blockSize, boolean blockChecksum) throws IOException { super(out); compressor = LZ4Factory.fastestInstance().fastCompressor(); checksum = XXHashFactory.fastestInstance().hash32(); bd = new BD(blockSize); flg = new FLG(blockChecksum); bufferOffset = 0; maxBlockSize = bd.getBlockMaximumSize(); buffer = new byte[maxBlockSize]; compressedBuffer = new byte[compressor.maxCompressedLength(maxBlockSize)]; finished = false; writeHeader(); }
Example #18
Source File: LZ4CompressorTest.java From kylin-on-parquet-v2 with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { if(args.length == 0){ System.out.println("args[0] must be data file path"); return; } LZ4Factory factory = LZ4Factory.fastestInstance(); byte[] data = Files.toByteArray(new File(args[0])); final int decompressedLength = data.length; // compress data LZ4Compressor compressor = factory.fastCompressor(); long start = System.currentTimeMillis(); int maxCompressedLength = compressor.maxCompressedLength(decompressedLength); byte[] compressed = new byte[maxCompressedLength]; int compressedLength = compressor.compress(data, 0, decompressedLength, compressed, 0, maxCompressedLength); System.out.println("compress take:" + (System.currentTimeMillis() - start)); System.out.println(compressedLength); // decompress data // - method 1: when the decompressed length is known LZ4FastDecompressor decompressor = factory.fastDecompressor(); start = System.currentTimeMillis(); byte[] restored = new byte[decompressedLength]; int compressedLength2 = decompressor.decompress(compressed, 0, restored, 0, decompressedLength); System.out.println("decompress take:" + (System.currentTimeMillis() - start)); System.out.println(decompressedLength); // compressedLength == compressedLength2 // - method 2: when the compressed length is known (a little slower) // the destination buffer needs to be over-sized LZ4SafeDecompressor decompressor2 = factory.safeDecompressor(); int decompressedLength2 = decompressor2.decompress(compressed, 0, compressedLength, restored, 0); }
Example #19
Source File: LZ4CompressedColumnWriter.java From kylin-on-parquet-v2 with Apache License 2.0 | 5 votes |
public LZ4CompressedColumnWriter(int valLen, int rowCnt, int compressBlockSize, OutputStream output) { this.valLen = valLen; this.numValInBlock = compressBlockSize / valLen; this.blockCnt = rowCnt / numValInBlock; if (rowCnt % numValInBlock != 0) { blockCnt++; } this.compressor = LZ4Factory.fastestInstance().highCompressor(); this.writeBuffer = ByteBuffer.allocate(numValInBlock * valLen); this.dataOutput = new DataOutputStream(output); this.blockDataWriter = new GeneralColumnDataWriter(blockCnt, dataOutput); }
Example #20
Source File: FSInputLZ4CompressedColumnReader.java From kylin-on-parquet-v2 with Apache License 2.0 | 5 votes |
public FSInputLZ4CompressedColumnReader(FSDataInputStream fsInputStream, int columnDataStartOffset, int columnDataLength, int rowCount) throws IOException { this.rowCount = rowCount; this.fsInputStream = fsInputStream; int footStartOffset = columnDataStartOffset + columnDataLength - 8; fsInputStream.seek(footStartOffset); this.numValInBlock = fsInputStream.readInt(); this.valLen = fsInputStream.readInt(); fsInputStream.seek(columnDataStartOffset); this.currBlockNum = -1; this.deCompressor = LZ4Factory.fastestInstance().safeDecompressor(); this.maxDecompressedLength = numValInBlock * valLen; }
Example #21
Source File: SinParser.java From Flashtool with GNU General Public License v3.0 | 5 votes |
public String getDataTypePriv() throws IOException { RandomAccessFile fin = new RandomAccessFile(sinfile,"r"); byte[] res=null; byte[] rescomp=null; if (dataHeader.mmcfLen>0) { Object block = dataBlocks.firstElement(); if (block instanceof AddrBlock) { res = new byte[(int)((AddrBlock)block).dataLen]; fin.seek(getDataOffset()+((AddrBlock)block).dataOffset); fin.read(res); fin.close(); } else { rescomp = new byte[(int)((LZ4ABlock)block).compDataLen]; fin.seek(getDataOffset()+((LZ4ABlock)block).dataOffset); fin.read(rescomp); fin.close(); LZ4Factory factory = LZ4Factory.fastestInstance(); LZ4FastDecompressor decomp = factory.fastDecompressor(); res = decomp.decompress(rescomp, (int)((LZ4ABlock)block).uncompDataLen); } } else { res = new byte[blocks.blocks[0].length]; fin.seek(getDataOffset()); fin.read(res); fin.close(); } return getDataTypePriv(res); }
Example #22
Source File: CompressionUtil.java From consulo with Apache License 2.0 | 4 votes |
protected static LZ4FastDecompressor decompressor() { return LZ4Factory.fastestJavaInstance().fastDecompressor(); }
Example #23
Source File: CompressionUtil.java From consulo with Apache License 2.0 | 4 votes |
private static LZ4Compressor compressor() { return LZ4Factory.fastestJavaInstance().fastCompressor(); }
Example #24
Source File: ODAGStashLZ4Wrapper.java From Arabesque with Apache License 2.0 | 4 votes |
public ODAGStashLZ4Wrapper() { lz4factory = LZ4Factory.fastestInstance(); reset(); }
Example #25
Source File: LZ4ObjectCache.java From Arabesque with Apache License 2.0 | 4 votes |
public LZ4ObjectCache() { lz4factory = LZ4Factory.fastestInstance(); reset(); }
Example #26
Source File: LZ4TileCacheCompressor.java From recast4j with zlib License | 4 votes |
@Override public byte[] decompress(byte[] buf, int offset, int len, int outputlen) { return LZ4Factory.fastestInstance().fastDecompressor().decompress(buf, offset, outputlen); }
Example #27
Source File: LZ4TileCacheCompressor.java From recast4j with zlib License | 4 votes |
@Override public byte[] compress(byte[] buf) { return LZ4Factory.fastestInstance().highCompressor().compress(buf); }
Example #28
Source File: LZ4.java From PlayerSQL with GNU General Public License v2.0 | 4 votes |
public static byte[] compress(byte[] input) { return LZ4Factory.fastestInstance().highCompressor().compress(input); }
Example #29
Source File: LZ4.java From PlayerSQL with GNU General Public License v2.0 | 4 votes |
public static byte[] decompress(byte[] input, int uncompressedLength) { return LZ4Factory.fastestInstance().fastDecompressor().decompress(input, uncompressedLength); }
Example #30
Source File: Lz4BlockCompressor.java From flink with Apache License 2.0 | 4 votes |
public Lz4BlockCompressor() { this.compressor = LZ4Factory.fastestInstance().fastCompressor(); }