Java Code Examples for io.netty.buffer.ByteBufAllocator#heapBuffer()

The following examples show how to use io.netty.buffer.ByteBufAllocator#heapBuffer() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SpdyHeaderBlockZlibEncoder.java    From netty-4.1.22 with Apache License 2.0 6 votes vote down vote up
private ByteBuf encode(ByteBufAllocator alloc, int len) {
    ByteBuf compressed = alloc.heapBuffer(len);
    boolean release = true;
    try {
        while (compressInto(compressed)) {
            // Although unlikely, it's possible that the compressed size is larger than the decompressed size
            compressed.ensureWritable(compressed.capacity() << 1);
        }
        release = false;
        return compressed;
    } finally {
        if (release) {
            compressed.release();
        }
    }
}
 
Example 2
Source File: UnixChannelUtilTest.java    From netty-4.1.22 with Apache License 2.0 6 votes vote down vote up
private static void testIsBufferCopyNeededForWrite(ByteBufAllocator alloc) {
    ByteBuf byteBuf = alloc.directBuffer();
    assertFalse(isBufferCopyNeededForWrite(byteBuf, IOV_MAX));
    assertFalse(isBufferCopyNeededForWrite(byteBuf.asReadOnly(), IOV_MAX));
    assertTrue(byteBuf.release());

    byteBuf = alloc.heapBuffer();
    assertTrue(isBufferCopyNeededForWrite(byteBuf, IOV_MAX));
    assertTrue(isBufferCopyNeededForWrite(byteBuf.asReadOnly(), IOV_MAX));
    assertTrue(byteBuf.release());

    assertCompositeByteBufIsBufferCopyNeededForWrite(alloc, 2, 0, false);
    assertCompositeByteBufIsBufferCopyNeededForWrite(alloc, IOV_MAX + 1, 0, true);
    assertCompositeByteBufIsBufferCopyNeededForWrite(alloc, 0, 2, true);
    assertCompositeByteBufIsBufferCopyNeededForWrite(alloc, 1, 1, true);
}
 
Example 3
Source File: ChunkedFile.java    From netty-4.1.22 with Apache License 2.0 6 votes vote down vote up
@Override
public ByteBuf readChunk(ByteBufAllocator allocator) throws Exception {
    long offset = this.offset;
    if (offset >= endOffset) {
        return null;
    }

    int chunkSize = (int) Math.min(this.chunkSize, endOffset - offset);
    // Check if the buffer is backed by an byte array. If so we can optimize it a bit an safe a copy检查缓冲区是否由字节数组备份。如果是这样的话,我们可以对它进行优化,使其安全一点

    ByteBuf buf = allocator.heapBuffer(chunkSize);
    boolean release = true;
    try {
        file.readFully(buf.array(), buf.arrayOffset(), chunkSize);
        buf.writerIndex(chunkSize);
        this.offset = offset + chunkSize;
        release = false;
        return buf;
    } finally {
        if (release) {
            buf.release();
        }
    }
}
 
Example 4
Source File: SpdyHeaderBlockZlibEncoder.java    From netty4.0.27Learn with Apache License 2.0 6 votes vote down vote up
private ByteBuf encode(ByteBufAllocator alloc, int len) {
    ByteBuf compressed = alloc.heapBuffer(len);
    boolean release = true;
    try {
        while (compressInto(compressed)) {
            // Although unlikely, it's possible that the compressed size is larger than the decompressed size
            compressed.ensureWritable(compressed.capacity() << 1);
        }
        release = false;
        return compressed;
    } finally {
        if (release) {
            compressed.release();
        }
    }
}
 
Example 5
Source File: ServletOutputStream.java    From spring-boot-protocol with Apache License 2.0 5 votes vote down vote up
/**
 * Allocation buffer
 * @param allocator allocator distributor
 * @param len The required byte length
 * @return ByteBuf
 */
protected ByteBuf allocByteBuf(ByteBufAllocator allocator, int len){
    ByteBuf ioByteBuf;
    if(len > responseWriterChunkMaxHeapByteLength){
        if(PlatformDependent.usedDirectMemory() + len >= PlatformDependent.maxDirectMemory() * 0.8F){
            ioByteBuf = allocator.heapBuffer(len);
        }else {
            ioByteBuf = allocator.directBuffer(len);
        }
    }else {
        ioByteBuf = allocator.heapBuffer(len);
    }
    return ioByteBuf;
}
 
Example 6
Source File: AbstractSslHandlerThroughputBenchmark.java    From netty-4.1.22 with Apache License 2.0 4 votes vote down vote up
@Override
ByteBuf newBuffer(ByteBufAllocator allocator, int size) {
    return allocator.heapBuffer(size);
}
 
Example 7
Source File: SpdyHeaderBlockJZlibEncoder.java    From netty-4.1.22 with Apache License 2.0 4 votes vote down vote up
private ByteBuf encode(ByteBufAllocator alloc) {
    boolean release = true;
    ByteBuf out = null;
    try {
        int oldNextInIndex = z.next_in_index;
        int oldNextOutIndex = z.next_out_index;

        int maxOutputLength = (int) Math.ceil(z.next_in.length * 1.001) + 12;
        out = alloc.heapBuffer(maxOutputLength);
        z.next_out = out.array();
        z.next_out_index = out.arrayOffset() + out.writerIndex();
        z.avail_out = maxOutputLength;

        int resultCode;
        try {
            resultCode = z.deflate(JZlib.Z_SYNC_FLUSH);
        } finally {
            out.skipBytes(z.next_in_index - oldNextInIndex);
        }
        if (resultCode != JZlib.Z_OK) {
            throw new CompressionException("compression failure: " + resultCode);
        }

        int outputLength = z.next_out_index - oldNextOutIndex;
        if (outputLength > 0) {
            out.writerIndex(out.writerIndex() + outputLength);
        }
        release = false;
        return out;
    } finally {
        // Deference the external references explicitly to tell the VM that
        // the allocated byte arrays are temporary so that the call stack
        // can be utilized.
        // I'm not sure if the modern VMs do this optimization though.
        z.next_in = null;
        z.next_out = null;
        if (release && out != null) {
            out.release();
        }
    }
}
 
Example 8
Source File: SpdyHeaderBlockZlibDecoder.java    From netty-4.1.22 with Apache License 2.0 4 votes vote down vote up
private void ensureBuffer(ByteBufAllocator alloc) {
    if (decompressed == null) {
        decompressed = alloc.heapBuffer(DEFAULT_BUFFER_CAPACITY);
    }
    decompressed.ensureWritable(1);
}
 
Example 9
Source File: HttpPostRequestDecoderTest.java    From netty-4.1.22 with Apache License 2.0 4 votes vote down vote up
@Test
public void testNoZeroOut() throws Exception {
    final String boundary = "E832jQp_Rq2ErFmAduHSR8YlMSm0FCY";

    final DefaultHttpDataFactory aMemFactory = new DefaultHttpDataFactory(false);

    DefaultHttpRequest aRequest = new DefaultHttpRequest(HttpVersion.HTTP_1_1,
                                                         HttpMethod.POST,
                                                         "http://localhost");
    aRequest.headers().set(HttpHeaderNames.CONTENT_TYPE,
                           "multipart/form-data; boundary=" + boundary);
    aRequest.headers().set(HttpHeaderNames.TRANSFER_ENCODING,
                           HttpHeaderValues.CHUNKED);

    HttpPostRequestDecoder aDecoder = new HttpPostRequestDecoder(aMemFactory, aRequest);

    final String aData = "some data would be here. the data should be long enough that it " +
                         "will be longer than the original buffer length of 256 bytes in " +
                         "the HttpPostRequestDecoder in order to trigger the issue. Some more " +
                         "data just to be on the safe side.";

    final String body =
            "--" + boundary + "\r\n" +
            "Content-Disposition: form-data; name=\"root\"\r\n" +
            "Content-Type: text/plain\r\n" +
            "\r\n" +
            aData +
            "\r\n" +
            "--" + boundary + "--\r\n";

    byte[] aBytes = body.getBytes();

    int split = 125;

    ByteBufAllocator aAlloc = new UnpooledByteBufAllocator(true);
    ByteBuf aSmallBuf = aAlloc.heapBuffer(split, split);
    ByteBuf aLargeBuf = aAlloc.heapBuffer(aBytes.length - split, aBytes.length - split);

    aSmallBuf.writeBytes(aBytes, 0, split);
    aLargeBuf.writeBytes(aBytes, split, aBytes.length - split);

    aDecoder.offer(new DefaultHttpContent(aSmallBuf));
    aDecoder.offer(new DefaultHttpContent(aLargeBuf));

    aDecoder.offer(LastHttpContent.EMPTY_LAST_CONTENT);

    assertTrue("Should have a piece of data", aDecoder.hasNext());

    InterfaceHttpData aDecodedData = aDecoder.next();
    assertEquals(InterfaceHttpData.HttpDataType.Attribute, aDecodedData.getHttpDataType());

    Attribute aAttr = (Attribute) aDecodedData;
    assertEquals(aData, aAttr.getValue());

    aDecodedData.release();
    aDecoder.destroy();
}
 
Example 10
Source File: SpdyHeaderBlockJZlibEncoder.java    From netty4.0.27Learn with Apache License 2.0 4 votes vote down vote up
private ByteBuf encode(ByteBufAllocator alloc) {
    boolean release = true;
    ByteBuf out = null;
    try {
        int oldNextInIndex = z.next_in_index;
        int oldNextOutIndex = z.next_out_index;

        int maxOutputLength = (int) Math.ceil(z.next_in.length * 1.001) + 12;
        out = alloc.heapBuffer(maxOutputLength);
        z.next_out = out.array();
        z.next_out_index = out.arrayOffset() + out.writerIndex();
        z.avail_out = maxOutputLength;

        int resultCode;
        try {
            resultCode = z.deflate(JZlib.Z_SYNC_FLUSH);
        } finally {
            out.skipBytes(z.next_in_index - oldNextInIndex);
        }
        if (resultCode != JZlib.Z_OK) {
            throw new CompressionException("compression failure: " + resultCode);
        }

        int outputLength = z.next_out_index - oldNextOutIndex;
        if (outputLength > 0) {
            out.writerIndex(out.writerIndex() + outputLength);
        }
        release = false;
        return out;
    } finally {
        // Deference the external references explicitly to tell the VM that
        // the allocated byte arrays are temporary so that the call stack
        // can be utilized.
        // I'm not sure if the modern VMs do this optimization though.
        z.next_in = null;
        z.next_out = null;
        if (release && out != null) {
            out.release();
        }
    }
}
 
Example 11
Source File: SpdyHeaderBlockZlibDecoder.java    From netty4.0.27Learn with Apache License 2.0 4 votes vote down vote up
private void ensureBuffer(ByteBufAllocator alloc) {
    if (decompressed == null) {
        decompressed = alloc.heapBuffer(DEFAULT_BUFFER_CAPACITY);
    }
    decompressed.ensureWritable(1);
}
 
Example 12
Source File: SpdyHeaderBlockRawEncoder.java    From netty4.0.27Learn with Apache License 2.0 4 votes vote down vote up
@Override
public ByteBuf encode(ByteBufAllocator alloc, SpdyHeadersFrame frame) throws Exception {
    Set<String> names = frame.headers().names();
    int numHeaders = names.size();
    if (numHeaders == 0) {
        return Unpooled.EMPTY_BUFFER;
    }
    if (numHeaders > SPDY_MAX_NV_LENGTH) {
        throw new IllegalArgumentException(
                "header block contains too many headers");
    }
    ByteBuf headerBlock = alloc.heapBuffer();
    writeLengthField(headerBlock, numHeaders);
    for (String name: names) {
        byte[] nameBytes = name.getBytes("UTF-8");
        writeLengthField(headerBlock, nameBytes.length);
        headerBlock.writeBytes(nameBytes);
        int savedIndex = headerBlock.writerIndex();
        int valueLength = 0;
        writeLengthField(headerBlock, valueLength);
        for (String value: frame.headers().getAll(name)) {
            byte[] valueBytes = value.getBytes("UTF-8");
            if (valueBytes.length > 0) {
                headerBlock.writeBytes(valueBytes);
                headerBlock.writeByte(0);
                valueLength += valueBytes.length + 1;
            }
        }
        if (valueLength != 0) {
            valueLength --;
        }
        if (valueLength > SPDY_MAX_NV_LENGTH) {
            throw new IllegalArgumentException(
                    "header exceeds allowable length: " + name);
        }
        if (valueLength > 0) {
            setLengthField(headerBlock, savedIndex, valueLength);
            headerBlock.writerIndex(headerBlock.writerIndex() - 1);
        }
    }
    return headerBlock;
}
 
Example 13
Source File: HttpPostRequestDecoderTest.java    From netty4.0.27Learn with Apache License 2.0 4 votes vote down vote up
@Test
public void testNoZeroOut() throws Exception {
    final String boundary = "E832jQp_Rq2ErFmAduHSR8YlMSm0FCY";

    final DefaultHttpDataFactory aMemFactory = new DefaultHttpDataFactory(false);

    DefaultHttpRequest aRequest = new DefaultHttpRequest(HttpVersion.HTTP_1_1,
                                                         HttpMethod.POST,
                                                         "http://localhost");
    aRequest.headers().set(HttpHeaders.Names.CONTENT_TYPE,
                           "multipart/form-data; boundary=" + boundary);
    aRequest.headers().set(HttpHeaders.Names.TRANSFER_ENCODING,
                           HttpHeaders.Values.CHUNKED);

    HttpPostRequestDecoder aDecoder = new HttpPostRequestDecoder(aMemFactory, aRequest);

    final String aData = "some data would be here. the data should be long enough that it " +
                         "will be longer than the original buffer length of 256 bytes in " +
                         "the HttpPostRequestDecoder in order to trigger the issue. Some more " +
                         "data just to be on the safe side.";

    final String body =
            "--" + boundary + "\r\n" +
            "Content-Disposition: form-data; name=\"root\"\r\n" +
            "Content-Type: text/plain\r\n" +
            "\r\n" +
            aData +
            "\r\n" +
            "--" + boundary + "--\r\n";

    byte[] aBytes = body.getBytes();

    int split = 125;

    ByteBufAllocator aAlloc = new UnpooledByteBufAllocator(true);
    ByteBuf aSmallBuf = aAlloc.heapBuffer(split, split);
    ByteBuf aLargeBuf = aAlloc.heapBuffer(aBytes.length - split, aBytes.length - split);

    aSmallBuf.writeBytes(aBytes, 0, split);
    aLargeBuf.writeBytes(aBytes, split, aBytes.length - split);

    aDecoder.offer(releaseLater(new DefaultHttpContent(aSmallBuf)));
    aDecoder.offer(releaseLater(new DefaultHttpContent(aLargeBuf)));

    aDecoder.offer(LastHttpContent.EMPTY_LAST_CONTENT);

    assertTrue("Should have a piece of data", aDecoder.hasNext());

    InterfaceHttpData aDecodedData = aDecoder.next();
    assertEquals(InterfaceHttpData.HttpDataType.Attribute, aDecodedData.getHttpDataType());

    Attribute aAttr = (Attribute) aDecodedData;
    assertEquals(aData, aAttr.getValue());

    aDecodedData.release();
    aDecoder.destroy();
}
 
Example 14
Source File: MoreByteBufUtils.java    From Velocity with MIT License 2 votes vote down vote up
/**
 * Creates a {@link ByteBuf} that will have the best performance with the specified
 * {@code nativeStuff}.
 *
 * @param alloc the {@link ByteBufAllocator} to use
 * @param nativeStuff the native we are working with
 * @return a buffer compatible with the native
 */
public static ByteBuf preferredBuffer(ByteBufAllocator alloc, Native nativeStuff) {
  return nativeStuff.isNative() ? alloc.directBuffer() : alloc.heapBuffer();
}
 
Example 15
Source File: MoreByteBufUtils.java    From Velocity with MIT License 2 votes vote down vote up
/**
 * Creates a {@link ByteBuf} that will have the best performance with the specified
 * {@code nativeStuff}.
 *
 * @param alloc the {@link ByteBufAllocator} to use
 * @param nativeStuff the native we are working with
 * @param initialCapacity the initial capacity to allocate
 * @return a buffer compatible with the native
 */
public static ByteBuf preferredBuffer(ByteBufAllocator alloc, Native nativeStuff,
    int initialCapacity) {
  return nativeStuff.isNative() ? alloc.directBuffer(initialCapacity) : alloc
      .heapBuffer(initialCapacity);
}