Java Code Examples for java.nio.ByteBuffer#wrap()

The following examples show how to use java.nio.ByteBuffer#wrap() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: UnsubAckMessageTest.java    From xenqtt with Apache License 2.0 6 votes vote down vote up
@Test
public void testMessageIds() throws Exception {

	for (int i = 0; i < 0xffff; i++) {
		buf = ByteBuffer.wrap(new byte[] { (byte) 0xb0, 0x02, (byte) (i >> 8), (byte) (i & 0xff) });
		msg = new UnsubAckMessage(i);
		assertEquals(buf, msg.buffer);
		assertEquals(i, msg.getMessageId());
	}

	for (int i = 0; i < 0xffff; i++) {
		buf = ByteBuffer.wrap(new byte[] { (byte) 0xb0, 0x02, (byte) (i >> 8), (byte) (i & 0xff) });
		msg = new UnsubAckMessage(buf, 0);
		assertEquals(i, msg.getMessageId());
	}
}
 
Example 2
Source File: Request.java    From cloudstack with Apache License 2.0 6 votes vote down vote up
public ByteBuffer[] toBytes() {
    final ByteBuffer[] buffers = new ByteBuffer[2];
    ByteBuffer tmp;

    if (_content == null) {
        _content = s_gson.toJson(_cmds, _cmds.getClass());
    }
    tmp = ByteBuffer.wrap(_content.getBytes());
    int capacity = tmp.capacity();
    /* Check if we need to compress the data */
    if (capacity >= 8192) {
        tmp = doCompress(tmp, capacity);
        _flags |= FLAG_COMPRESSED;
    }
    buffers[1] = tmp;
    buffers[0] = serializeHeader(capacity);

    return buffers;
}
 
Example 3
Source File: AbstractInternalHDRPercentiles.java    From Elasticsearch with Apache License 2.0 6 votes vote down vote up
@Override
protected void doReadFrom(StreamInput in) throws IOException {
    valueFormatter = ValueFormatterStreams.readOptional(in);
    keys = new double[in.readInt()];
    for (int i = 0; i < keys.length; ++i) {
        keys[i] = in.readDouble();
    }
    long minBarForHighestToLowestValueRatio = in.readLong();
    final int serializedLen = in.readVInt();
    byte[] bytes = new byte[serializedLen];
    in.readBytes(bytes, 0, serializedLen);
    ByteBuffer stateBuffer = ByteBuffer.wrap(bytes);
    try {
        state = DoubleHistogram.decodeFromCompressedByteBuffer(stateBuffer, minBarForHighestToLowestValueRatio);
    } catch (DataFormatException e) {
        throw new IOException("Failed to decode DoubleHistogram for aggregation [" + name + "]", e);
    }
    keyed = in.readBoolean();
}
 
Example 4
Source File: UnsubscribeMessageTest.java    From xenqtt with Apache License 2.0 6 votes vote down vote up
@Test
public void testInboundCtor() {
	String[] topics = new String[] { "alpha", "beta", "delta", "gamma" };
	UnsubscribeMessage message = new UnsubscribeMessage(ByteBuffer.wrap(PAYLOAD), 29, 0);

	assertSame(MessageType.UNSUBSCRIBE, message.getMessageType());
	assertSame(QoS.AT_LEAST_ONCE, message.getQoS());
	assertEquals(1, message.getQoSLevel());
	assertFalse(message.isDuplicate());
	assertFalse(message.isRetain());

	assertEquals(1, message.getMessageId());
	assertArrayEquals(topics, message.getTopics());

	assertArrayEquals(PAYLOAD, message.buffer.array());
}
 
Example 5
Source File: FileManager.java    From logging-log4j2 with Apache License 2.0 6 votes vote down vote up
/**
 * Creates a FileManager.
 * @param name The name of the File.
 * @param data The FactoryData
 * @return The FileManager for the File.
 */
@Override
public FileManager createManager(final String name, final FactoryData data) {
    final File file = new File(name);
    try {
        FileUtils.makeParentDirs(file);
        final int actualSize = data.bufferedIo ? data.bufferSize : Constants.ENCODER_BYTE_BUFFER_SIZE;
        final ByteBuffer byteBuffer = ByteBuffer.wrap(new byte[actualSize]);
        final FileOutputStream fos = data.createOnDemand ? null : new FileOutputStream(file, data.append);
        final boolean writeHeader = file.exists() && file.length() == 0;
        final FileManager fm = new FileManager(data.getLoggerContext(), name, fos, data.append, data.locking,
                data.createOnDemand, data.advertiseURI, data.layout,
                data.filePermissions, data.fileOwner, data.fileGroup, writeHeader, byteBuffer);
        if (fos != null && fm.attributeViewEnabled) {
            fm.defineAttributeView(file.toPath());
        }
        return fm;
    } catch (final IOException ex) {
        LOGGER.error("FileManager (" + name + ") " + ex, ex);
    }
    return null;
}
 
Example 6
Source File: TestBlockReaderLocal.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Override
public void doTest(BlockReaderLocal reader, byte original[])
    throws IOException {
  Assert.assertTrue(!reader.getVerifyChecksum());
  ByteBuffer buf = ByteBuffer.wrap(new byte[TEST_LENGTH]);
  reader.skip(1);
  readFully(reader, buf, 1, 9);
  assertArrayRegionsEqual(original, 1, buf.array(), 1, 9);
  readFully(reader, buf, 10, 100);
  assertArrayRegionsEqual(original, 10, buf.array(), 10, 100);
  reader.forceAnchorable();
  readFully(reader, buf, 110, 700);
  assertArrayRegionsEqual(original, 110, buf.array(), 110, 700);
  reader.forceUnanchorable();
  reader.skip(1); // skip from offset 810 to offset 811
  readFully(reader, buf, 811, 5);
  assertArrayRegionsEqual(original, 811, buf.array(), 811, 5);
}
 
Example 7
Source File: CompletionHandlerRelease.java    From jdk8u_jdk with GNU General Public License v2.0 5 votes vote down vote up
@Test
public void testRead() throws Exception {
    try (Server server = new Server();
         AsynchronousSocketChannel ch =
             AsynchronousSocketChannel.open(GROUP)) {
        ch.connect(server.address()).get();

        try (AsynchronousSocketChannel sc = server.accept().get()) {
            ByteBuffer src = ByteBuffer.wrap("hello".getBytes("UTF-8"));
            sc.setOption(SO_SNDBUF, src.remaining());
            sc.write(src).get();

            CountDownLatch latch = new CountDownLatch(1);
            Handler<Integer,Object> handler =
                new Handler<Integer,Object>("read", latch);
            ReferenceQueue queue = new ReferenceQueue<WeakReference>();
            WeakReference<Object> ref =
                new WeakReference<Object>(handler, queue);

            ByteBuffer dst = ByteBuffer.allocate(64);
            ch.read(dst, null, handler);

            try { latch.await(); } catch (InterruptedException ignore) { }

            handler = null;
            waitForRefToClear(ref, queue);
        }
    }
}
 
Example 8
Source File: Text.java    From RDFS with Apache License 2.0 5 votes vote down vote up
/**
 * Finds any occurence of <code>what</code> in the backing
 * buffer, starting as position <code>start</code>. The starting
 * position is measured in bytes and the return value is in
 * terms of byte position in the buffer. The backing buffer is
 * not converted to a string for this operation.
 * @return byte position of the first occurence of the search
 *         string in the UTF-8 buffer or -1 if not found
 */
public int find(String what, int start) {
  try {
    ByteBuffer src = ByteBuffer.wrap(this.bytes,0,this.length);
    ByteBuffer tgt = encode(what);
    byte b = tgt.get();
    src.position(start);
        
    while (src.hasRemaining()) {
      if (b == src.get()) { // matching first byte
        src.mark(); // save position in loop
        tgt.mark(); // save position in target
        boolean found = true;
        int pos = src.position()-1;
        while (tgt.hasRemaining()) {
          if (!src.hasRemaining()) { // src expired first
            tgt.reset();
            src.reset();
            found = false;
            break;
          }
          if (!(tgt.get() == src.get())) {
            tgt.reset();
            src.reset();
            found = false;
            break; // no match
          }
        }
        if (found) return pos;
      }
    }
    return -1; // not found
  } catch (CharacterCodingException e) {
    // can't get here
    e.printStackTrace();
    return -1;
  }
}
 
Example 9
Source File: GfsSeekableByteChannel.java    From ParallelGit with Apache License 2.0 5 votes vote down vote up
@Override
public int write(ByteBuffer src) throws ClosedChannelException {
  checkClosed();
  checkWriteAccess();
  synchronized(this) {
    if(buffer.remaining() < src.remaining()) {
      int position = buffer.position();
      byte[] bytes = new byte[position + src.remaining()];
      arraycopy(buffer.array(), buffer.arrayOffset(), bytes, 0, position);
      buffer = ByteBuffer.wrap(bytes);
      buffer.position(position);
    }
    return copyBytes(buffer, src);
  }
}
 
Example 10
Source File: ByteArray.java    From CloverETL-Engine with GNU Lesser General Public License v2.1 5 votes vote down vote up
public ByteArray append(float value){
	int newlen = FLOAT_SIZE_BYTES + count;
	ensureCapacity(newlen);
	ByteBuffer buf=ByteBuffer.wrap(this.value,count,FLOAT_SIZE_BYTES);
	buf.putFloat(value);
	count=newlen;
	return this;

}
 
Example 11
Source File: HistoricalBlockManager.java    From nyzoVerifier with The Unlicense 5 votes vote down vote up
private static void buildOffsetFile() {

        // This is a brute-force process for finding which offset file to build. Just before a consolidated file is
        // written by the block-file consolidator, its corresponding offset file is deleted to ensure that stale offset
        // files do not exist. This process checks all consolidated files backward from the frozen edge. When a
        // consolidated file without an offset file is found, the offset file is built.
        long offsetFileHeight = -1L;
        for (long height = BlockManager.getFrozenEdgeHeight(); height >= 0 && offsetFileHeight < 0;
             height -= BlockManager.blocksPerFile) {
            if (BlockManager.consolidatedFileForBlockHeight(height).exists() && !offsetFileForHeight(height).exists()) {
                offsetFileHeight = height;
            }
        }

        if (offsetFileHeight >= 0) {
            // Calculate the offsets.
            File consolidatedFile = BlockManager.consolidatedFileForBlockHeight(offsetFileHeight);
            int[] offsets = blockOffsetsForConsolidatedFile(consolidatedFile);

            // Write the offsets to the file.
            byte[] offsetBytes = new byte[offsets.length * 4];
            ByteBuffer offsetBuffer = ByteBuffer.wrap(offsetBytes);
            for (int offset : offsets) {
                offsetBuffer.putInt(offset);
            }
            try {
                Files.write(Paths.get(offsetFileForHeight(offsetFileHeight).getAbsolutePath()), offsetBytes);
            } catch (Exception ignored) { }
        }
    }
 
Example 12
Source File: BitPackedBuffer.java    From scelight with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a new {@link BitPackedBuffer}.
 * 
 * @param data raw byte data
 * @param typeInfos type info array
 * @param bigEndian tells if byte order is big endian
 */
public BitPackedBuffer( final byte[] data, final TypeInfo[] typeInfos, final boolean bigEndian ) {
	this.data = data;
	this.typeInfos = typeInfos;
	this.bigEndian = bigEndian;
	
	wrapper = ByteBuffer.wrap( data );
}
 
Example 13
Source File: ObjectFileScrubbersTest.java    From buck with Apache License 2.0 5 votes vote down vote up
@Test
public void testPutLittleEndianLongPositive() {
  long value = 0x123456789ABCDEF0L;
  byte[] buffer = new byte[8];
  ByteBuffer bufferWrapper = ByteBuffer.wrap(buffer);
  ObjectFileScrubbers.putLittleEndianLong(bufferWrapper, value);
  assertThat(buffer[0], equalTo((byte) 0xF0));
  assertThat(buffer[1], equalTo((byte) 0xDE));
  assertThat(buffer[2], equalTo((byte) 0xBC));
  assertThat(buffer[3], equalTo((byte) 0x9A));
  assertThat(buffer[4], equalTo((byte) 0x78));
  assertThat(buffer[5], equalTo((byte) 0x56));
  assertThat(buffer[6], equalTo((byte) 0x34));
  assertThat(buffer[7], equalTo((byte) 0x12));
}
 
Example 14
Source File: Identicon.java    From Identiconizer with Apache License 2.0 5 votes vote down vote up
private static byte[] makeTextBlock(String text) {
    byte[] block = new byte[text.length() + 1];
    ByteBuffer blockBuffer = ByteBuffer.wrap(block);
    // http://www.libpng.org/pub/png/spec/1.2/PNG-Chunks.html
    // put the text as the chunk's keyword
    blockBuffer.put(text.getBytes());
    // followed by a null separator
    blockBuffer.put((byte) 0);
    // we leave the chunk's text empty

    return block;
}
 
Example 15
Source File: DumpDoubleColumnBinaryMaker.java    From multiple-dimension-spread with Apache License 2.0 5 votes vote down vote up
@Override
public ColumnBinary toBinary(final ColumnBinaryMakerConfig commonConfig , final ColumnBinaryMakerCustomConfigNode currentConfigNode , final IColumn column ) throws IOException{
  ColumnBinaryMakerConfig currentConfig = commonConfig;
  if( currentConfigNode != null ){
    currentConfig = currentConfigNode.getCurrentConfig();
  }
  byte[] binaryRaw = new byte[ getBinaryLength( column.size() ) ];
  ByteBuffer lengthBuffer = ByteBuffer.wrap( binaryRaw );
  lengthBuffer.putInt( column.size() );
  lengthBuffer.putInt( column.size() * Double.BYTES );

  ByteBuffer nullFlagBuffer = ByteBuffer.wrap( binaryRaw , Integer.BYTES * 2 , column.size() );
  DoubleBuffer doubleBuffer = ByteBuffer.wrap( binaryRaw , ( Integer.BYTES * 2 + column.size() ) , ( column.size() * Double.BYTES ) ).asDoubleBuffer();

  int rowCount = 0;
  for( int i = 0 ; i < column.size() ; i++ ){
    ICell cell = column.get(i);
    if( cell.getType() == ColumnType.NULL ){
      nullFlagBuffer.put( (byte)1 );
      doubleBuffer.put( (double)0 );
    }
    else{
      rowCount++;
      PrimitiveCell byteCell = (PrimitiveCell) cell;
      nullFlagBuffer.put( (byte)0 );
      doubleBuffer.put( byteCell.getRow().getDouble() );
    }
  }

  byte[] binary = currentConfig.compressorClass.compress( binaryRaw , 0 , binaryRaw.length );

  return new ColumnBinary( this.getClass().getName() , currentConfig.compressorClass.getClass().getName() , column.getColumnName() , ColumnType.DOUBLE , rowCount , binaryRaw.length , rowCount * Double.BYTES , -1 , binary , 0 , binary.length , null );
}
 
Example 16
Source File: TJSONProtocol.java    From dapeng-soa with Apache License 2.0 4 votes vote down vote up
@Override
public ByteBuffer readBinary() throws TException {
  return ByteBuffer.wrap(readJSONBase64());
}
 
Example 17
Source File: ConversionUtils.java    From usergrid with Apache License 2.0 4 votes vote down vote up
public static ByteBuffer bytebuffer( Object obj ) {
    if ( obj instanceof ByteBuffer ) {
        return ( ( ByteBuffer ) obj ).duplicate();
    }
    return ByteBuffer.wrap( bytes( obj ) );
}
 
Example 18
Source File: LumberjackFrameHandler.java    From nifi with Apache License 2.0 4 votes vote down vote up
public void handle(final LumberjackFrame frame, final ChannelResponder<SocketChannel> responder, final String sender)
        throws IOException, InterruptedException {

    final Map<String, String> metadata = EventFactoryUtil.createMapWithSender(sender.toString());
    metadata.put(LumberjackMetadata.SEQNUMBER_KEY, String.valueOf(frame.getSeqNumber()));
    String line = "";

    /* If frameType is a data Frame, Handle the Lumberjack data payload, iterating over it and extracting
    keys and values into metadata.

    All keys are inserted into metadata with the exception of line that gets added into the body of the event
    */
    if (frame.getFrameType() == 0x44) {
        ByteBuffer currentData = ByteBuffer.wrap(frame.getPayload());
        long pairCount = currentData.getInt() & 0x00000000ffffffffL;
        Map<String,String> fields = new HashMap<>();
        for (int i = 0; i < pairCount; i++) {
            long keyLength = currentData.getInt() & 0x00000000ffffffffL;
            byte[] bytes = new byte[(int) keyLength];
            currentData.get(bytes);
            String key = new String(bytes);
            long valueLength = currentData.getInt() & 0x00000000ffffffffL;
            bytes = new byte[(int) valueLength];
            currentData.get(bytes);
            String value = new String(bytes);

            if (key.equals("line")) {
                line = value;
            } else {
                fields.put(key, value);
            }
        }
        // Serialize the fields into a String to push it via metdate
        Gson serialFields = new Gson();

        metadata.put("lumberjack.fields", serialFields.toJson(fields).toString());

        // queue the raw event blocking until space is available, reset the buffer
        final E event = eventFactory.create(line.getBytes(), metadata, responder);
        events.offer(event);
    } else if (frame.getFrameType() == 0x4A ) {
        logger.error("Data type was JSON. JSON payload aren't yet supported, pending the documentation of Lumberjack protocol v2");
    }
}
 
Example 19
Source File: AggRecord.java    From kinesis-aggregation with Apache License 2.0 3 votes vote down vote up
/**
 * Convert the aggregated data in this record into a single PutRecordRequest.
 * This method has no side effects (i.e. it will not clear the current contents
 * of the aggregated record).
 * 
 * @param streamName
 *            The Kinesis stream name where this PutRecordRequest will be sent.
 * @return A PutRecordRequest containing all the current data in this aggregated
 *         record.
 */
public PutRecordRequest toPutRecordRequest(String streamName) {
	byte[] recordBytes = toRecordBytes();
	ByteBuffer bb = ByteBuffer.wrap(recordBytes);
	return new PutRecordRequest().withStreamName(streamName).withExplicitHashKey(getExplicitHashKey())
			.withPartitionKey(getPartitionKey()).withData(bb);
}
 
Example 20
Source File: Node.java    From bt with Apache License 2.0 2 votes vote down vote up
/**
 * Saves the routing table to a file
 *
 * @param file to save to
 * @throws IOException
 */
void saveTable(Path saveTo) throws IOException {
	// don't persist in test mode
	if(!Files.isDirectory(saveTo.getParent()))
		return;
	
	Key currentRootID = getRootID();
	
	// called in an uninitialized state, no point in overwriting the table
	if(currentRootID == null)
		return;
	
	ByteBuffer tableBuffer = AnonAllocator.allocate(50*1024*1024);
	
	
	Map<String,Object> tableMap = new TreeMap<>();

	RoutingTable table = routingTableCOW;
	
	Stream<Map<String, Object>> main = table.stream().map(RoutingTableEntry::getBucket).flatMap(b -> b.entriesStream().map(KBucketEntry::toBencoded));
	Stream<Map<String, Object>> replacements = table.stream().map(RoutingTableEntry::getBucket).flatMap(b -> b.replacementsStream().map(KBucketEntry::toBencoded));
	
	tableMap.put("mainEntries", main);
	tableMap.put("replacements", replacements);
	
	ByteBuffer doubleBuf = ByteBuffer.wrap(new byte[8]);
	doubleBuf.putDouble(0, dht.getEstimator().getRawDistanceEstimate());
	tableMap.put("log2estimate", doubleBuf);
	
	tableMap.put("timestamp", System.currentTimeMillis());
	tableMap.put("oldKey", currentRootID.getHash());
	
	new BEncoder().encodeInto(tableMap, tableBuffer);
	
	Path tempFile = Files.createTempFile(saveTo.getParent(), "saveTable", "tmp");
	
	try(SeekableByteChannel chan = Files.newByteChannel(tempFile, StandardOpenOption.WRITE)) {
		chan.write(tableBuffer);
		chan.close();
		Files.move(tempFile, saveTo, StandardCopyOption.ATOMIC_MOVE, StandardCopyOption.REPLACE_EXISTING);
	};

}