Java Code Examples for com.google.protobuf.ByteString#Output

The following examples show how to use com.google.protobuf.ByteString#Output . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ContractFunctionParams.java    From hedera-sdk-java with Apache License 2.0 6 votes vote down vote up
static ByteString int256(long val, int bitWidth, boolean signed) {
    // don't try to get wider than a `long` as it should just be filled with padding
    bitWidth = Math.min(bitWidth, 64);
    final ByteString.Output output = ByteString.newOutput(bitWidth / 8);

    // write bytes in big-endian order
    for (int i = bitWidth - 8; i >= 0; i -= 8) {
        // widening conversion sign-extends so we don't have to do anything special when
        // truncating a previously widened value
        final byte u8 = (byte) (val >> i);
        output.write(u8);
    }

    // byte padding will sign-extend appropriately
    return leftPad32(output.toByteString(), signed && val < 0);
}
 
Example 2
Source File: GenQueryOutputStream.java    From bazel with Apache License 2.0 6 votes vote down vote up
private void maybeStartCompression(int additionalBytes) throws IOException {
  if (!compressionEnabled) {
    return;
  }

  if (compressed) {
    return;
  }

  if (bytesWritten + additionalBytes < COMPRESSION_THRESHOLD) {
    return;
  }

  ByteString.Output compressedBytesOut = ByteString.newOutput();
  GZIPOutputStream gzipOut = new GZIPOutputStream(compressedBytesOut);
  bytesOut.writeTo(gzipOut);
  bytesOut = compressedBytesOut;
  out = gzipOut;
  compressed = true;
}
 
Example 3
Source File: RocksDBMapMutationSet.java    From snowblossom with Apache License 2.0 6 votes vote down vote up
private ByteString getDBKey(ByteString key, ByteString value)
{
try
{
    ByteString.Output out = ByteString.newOutput(100);
    out.write(name_bytes);
    out.write(key.toByteArray());
    out.write(sep);	
    out.write(value.toByteArray());
    ByteString w = out.toByteString();
    return w;
  }
  catch(java.io.IOException e)
  {
    throw new RuntimeException(e);
  }
}
 
Example 4
Source File: RocksDBMapMutationSet.java    From jelectrum with MIT License 6 votes vote down vote up
private ByteString getDBKey(ByteString key)
{
  try
  {
    ByteString.Output out = ByteString.newOutput(100);
    out.write(name_bytes);
    out.write(key.toByteArray());
    out.write(sep);
    ByteString w = out.toByteString();
    return w;
  }
  catch(java.io.IOException e)
  {
    throw new RuntimeException(e);
  }
}
 
Example 5
Source File: SimpleUtxoMgr.java    From jelectrum with MIT License 6 votes vote down vote up
public static ByteString getKey(ByteString scriptHash, Sha256Hash tx_id, int idx)
{
  try
  {
    ByteString.Output key_out = ByteString.newOutput(32+32+4);

    key_out.write(scriptHash.toByteArray());
    key_out.write(tx_id.getBytes());

    ByteBuffer bb = ByteBuffer.allocate(4);
    bb.order(java.nio.ByteOrder.LITTLE_ENDIAN);
    bb.putInt(idx);
    key_out.write(bb.array());
    return key_out.toByteString();
  }
  catch(java.io.IOException e)
  {
    throw new RuntimeException(e);
  }

}
 
Example 6
Source File: TezCommonUtils.java    From tez with Apache License 2.0 6 votes vote down vote up
@Private
public static ByteString compressByteArrayToByteString(byte[] inBytes, Deflater deflater) throws IOException {
  deflater.reset();
  ByteString.Output os = ByteString.newOutput();
  DeflaterOutputStream compressOs = null;
  try {
    compressOs = new DeflaterOutputStream(os, deflater);
    compressOs.write(inBytes);
    compressOs.finish();
    ByteString byteString = os.toByteString();
    return byteString;
  } finally {
    if (compressOs != null) {
      compressOs.close();
    }
  }
}
 
Example 7
Source File: FlinkStateInternals.java    From flink-dataflow with Apache License 2.0 6 votes vote down vote up
@Override
public void persistState(StateCheckpointWriter checkpointBuilder) throws IOException {
	if (value != null) {
		// serialize the coder.
		byte[] coder = InstantiationUtil.serializeObject(elemCoder);

		// encode the value into a ByteString
		ByteString.Output stream = ByteString.newOutput();
		elemCoder.encode(value, stream, Coder.Context.OUTER);
		ByteString data = stream.toByteString();

		checkpointBuilder.addValueBuilder()
			.setTag(stateKey)
			.setData(coder)
			.setData(data);
	}
}
 
Example 8
Source File: FlinkStateInternals.java    From flink-dataflow with Apache License 2.0 6 votes vote down vote up
@Override
public void persistState(StateCheckpointWriter checkpointBuilder) throws IOException {
	if (!isClear) {
		// serialize the coder.
		byte[] coder = InstantiationUtil.serializeObject(accumCoder);

		// serialize the combiner.
		byte[] combiner = InstantiationUtil.serializeObject(combineFn);

		// encode the accumulator into a ByteString
		ByteString.Output stream = ByteString.newOutput();
		accumCoder.encode(accum, stream, Coder.Context.OUTER);
		ByteString data = stream.toByteString();

		// put the flag that the next serialized element is an accumulator
		checkpointBuilder.addAccumulatorBuilder()
			.setTag(stateKey)
			.setData(coder)
			.setData(combiner)
			.setData(data);
	}
}
 
Example 9
Source File: MRHelpers.java    From incubator-tez with Apache License 2.0 6 votes vote down vote up
@Private
public static <T extends org.apache.hadoop.mapreduce.InputSplit> MRSplitProto createSplitProto(
    T newSplit, SerializationFactory serializationFactory)
    throws IOException, InterruptedException {
  MRSplitProto.Builder builder = MRSplitProto
      .newBuilder();
  
  builder.setSplitClassName(newSplit.getClass().getName());

  @SuppressWarnings("unchecked")
  Serializer<T> serializer = serializationFactory
      .getSerializer((Class<T>) newSplit.getClass());
  ByteString.Output out = ByteString
      .newOutput(SPLIT_SERIALIZED_LENGTH_ESTIMATE);
  serializer.open(out);
  serializer.serialize(newSplit);
  // TODO MR Compat: Check against max block locations per split.
  ByteString splitBs = out.toByteString();
  builder.setSplitBytes(splitBs);

  return builder.build();
}
 
Example 10
Source File: GenQueryOutputStream.java    From bazel with Apache License 2.0 5 votes vote down vote up
@Override
public ByteString getBytes() throws IOException {
  ByteString.Output out = ByteString.newOutput(size);
  try (GZIPInputStream gzipIn = new GZIPInputStream(compressedData.newInput())) {
    ByteStreams.copy(gzipIn, out);
  }
  return out.toByteString();
}
 
Example 11
Source File: TezUtils.java    From tez with Apache License 2.0 5 votes vote down vote up
/**
 * Convert a Configuration to compressed ByteString using Protocol buffer
 *
 * @param conf
 *          : Configuration to be converted
 * @return PB ByteString (compressed)
 * @throws java.io.IOException
 */
public static ByteString createByteStringFromConf(Configuration conf) throws IOException {
  Objects.requireNonNull(conf, "Configuration must be specified");
  ByteString.Output os = ByteString.newOutput();
  SnappyOutputStream compressOs = new SnappyOutputStream(os);
  try {
    writeConfInPB(compressOs, conf);
  } finally {
    if (compressOs != null) {
      compressOs.close();
    }
  }
  return os.toByteString();
}
 
Example 12
Source File: TestUtils.java    From bazel with Apache License 2.0 5 votes vote down vote up
public static <T> ByteString toBytes(SerializationContext serializationContext, T value)
    throws IOException, SerializationException {
  ByteString.Output output = ByteString.newOutput();
  CodedOutputStream codedOut = CodedOutputStream.newInstance(output);
  serializationContext.serialize(value, codedOut);
  codedOut.flush();
  return output.toByteString();
}
 
Example 13
Source File: TezUtils.java    From incubator-tez with Apache License 2.0 5 votes vote down vote up
/**
 * Convert a Configuration to compressed ByteString using Protocol buffer
 * 
 * @param conf
 *          : Configuration to be converted
 * @return PB ByteString (compressed)
 * @throws IOException
 */
public static ByteString createByteStringFromConf(Configuration conf) throws IOException {
  Preconditions.checkNotNull(conf, "Configuration must be specified");
  ByteString.Output os = ByteString.newOutput();
  DeflaterOutputStream compressOs = new DeflaterOutputStream(os,
      new Deflater(Deflater.BEST_SPEED));
  try {
    writeConfInPB(compressOs, conf);
  } finally {
    if (compressOs != null) {
      compressOs.close();
    }
  }
  return os.toByteString();
}
 
Example 14
Source File: CASFileCacheTest.java    From bazel-buildfarm with Apache License 2.0 5 votes vote down vote up
@Test
public void duplicateExpiredEntrySuppressesDigestExpiration()
    throws IOException, InterruptedException {
  Blob expiringBlob;
  try (ByteString.Output out = ByteString.newOutput(512)) {
    for (int i = 0; i < 512; i++) {
      out.write(0);
    }
    expiringBlob = new Blob(out.toByteString(), DIGEST_UTIL);
  }
  blobs.put(expiringBlob.getDigest(), expiringBlob.getData());
  decrementReference(
      fileCache.put(expiringBlob.getDigest(), /* isExecutable=*/ false)); // expected eviction
  blobs.clear();
  decrementReference(
      fileCache.put(
          expiringBlob.getDigest(),
          /* isExecutable=*/ true)); // should be fed from storage directly, not through delegate

  fileCache.put(new Blob(ByteString.copyFromUtf8("Hello, World"), DIGEST_UTIL));

  verifyZeroInteractions(onExpire);
  // assert expiration of non-executable digest
  String expiringKey = fileCache.getKey(expiringBlob.getDigest(), /* isExecutable=*/ false);
  assertThat(storage.containsKey(expiringKey)).isFalse();
  assertThat(Files.exists(fileCache.getPath(expiringKey))).isFalse();
}
 
Example 15
Source File: ProbableIntersectionCursorState.java    From fdb-record-layer with Apache License 2.0 5 votes vote down vote up
@Override
@Nonnull
public BloomFilterCursorContinuation getContinuation() {
    ByteString.Output bloomOutput = ByteString.newOutput();
    try {
        bloomFilter.writeTo(bloomOutput);
    } catch (IOException e) {
        throw new RecordCoreException("unable to serialize bloom filter", e);
    }
    return new BloomFilterCursorContinuation(super.getContinuation(), bloomOutput.toByteString());
}
 
Example 16
Source File: BuildOptions.java    From bazel with Apache License 2.0 5 votes vote down vote up
@Override
public void serialize(
    SerializationContext context,
    OptionsDiffForReconstruction diff,
    CodedOutputStream codedOut)
    throws SerializationException, IOException {
  OptionsDiffCache cache = context.getDependency(OptionsDiffCache.class);
  ByteString bytes = cache.getBytesFromOptionsDiff(diff);
  if (bytes == null) {
    context = context.getNewNonMemoizingContext();
    ByteString.Output byteStringOut = ByteString.newOutput();
    CodedOutputStream bytesOut = CodedOutputStream.newInstance(byteStringOut);
    context.serialize(diff.differingOptions, bytesOut);
    context.serialize(diff.extraFirstFragmentClasses, bytesOut);
    context.serialize(diff.extraSecondFragments, bytesOut);
    bytesOut.writeByteArrayNoTag(diff.baseFingerprint);
    context.serialize(diff.checksum, bytesOut);
    context.serialize(diff.differingStarlarkOptions, bytesOut);
    context.serialize(diff.extraFirstStarlarkOptions, bytesOut);
    context.serialize(diff.extraSecondStarlarkOptions, bytesOut);
    bytesOut.flush();
    byteStringOut.flush();
    int optionsDiffSize = byteStringOut.size();
    bytes = byteStringOut.toByteString();
    cache.putBytesFromOptionsDiff(diff, bytes);
    logger.atFine().log(
        "Serialized OptionsDiffForReconstruction %s. Diff took %d bytes.",
        diff, optionsDiffSize);
  }
  codedOut.writeBytesNoTag(bytes);
}
 
Example 17
Source File: BinTools.java    From bazel with Apache License 2.0 4 votes vote down vote up
@Override
public ByteString getBytes() throws IOException {
  ByteString.Output out = ByteString.newOutput();
  writeTo(out);
  return out.toByteString();
}
 
Example 18
Source File: ByteStreamServiceTest.java    From bazel-buildfarm with Apache License 2.0 4 votes vote down vote up
@Test
public void readSlicesLargeChunksFromInstance() throws Exception {
  // pick a large chunk size
  long size = CHUNK_SIZE * 10 + CHUNK_SIZE - 47;
  ByteString content;
  try (ByteString.Output out =
      ByteString.newOutput(
          ByteStreamService.CHUNK_SIZE * 10 + ByteStreamService.CHUNK_SIZE - 47)) {
    for (long i = 0; i < size; i++) {
      out.write((int) (i & 0xff));
    }
    content = out.toByteString();
  }
  Digest digest = DIGEST_UTIL.compute(content);
  String resourceName = "blobs/" + DigestUtil.toString(digest);
  ReadRequest request = ReadRequest.newBuilder().setResourceName(resourceName).build();

  Instance instance = mock(Instance.class);
  when(instances.getFromBlob(eq(resourceName))).thenReturn(instance);
  doAnswer(answerVoid((blobDigest, offset, limit, chunkObserver, metadata) -> {}))
      .when(instance)
      .getBlob(
          eq(digest),
          eq(request.getReadOffset()),
          eq((long) content.size()),
          any(ServerCallStreamObserver.class),
          eq(RequestMetadata.getDefaultInstance()));
  Channel channel = InProcessChannelBuilder.forName(fakeServerName).directExecutor().build();
  ByteStreamStub service = ByteStreamGrpc.newStub(channel);
  CountingReadObserver readObserver = new CountingReadObserver();
  service.read(request, readObserver);
  ArgumentCaptor<ServerCallStreamObserver<ByteString>> observerCaptor =
      ArgumentCaptor.forClass(ServerCallStreamObserver.class);
  verify(instance, times(1))
      .getBlob(
          eq(digest),
          eq(request.getReadOffset()),
          eq((long) content.size()),
          observerCaptor.capture(),
          eq(RequestMetadata.getDefaultInstance()));

  StreamObserver<ByteString> responseObserver = observerCaptor.getValue();
  // supply entire content
  responseObserver.onNext(content);
  responseObserver.onCompleted();

  assertThat(readObserver.isCompleted()).isTrue();
  assertThat(readObserver.getData()).isEqualTo(content);
  List<Integer> sizes = readObserver.getSizesList();
  assertThat(sizes.size()).isEqualTo(11); // 10 + 1 incomplete chunk
  assertThat(Iterables.filter(sizes, (responseSize) -> responseSize > CHUNK_SIZE)).isEmpty();
}
 
Example 19
Source File: CASFileCacheTest.java    From bazel-buildfarm with Apache License 2.0 4 votes vote down vote up
@Test
public void expireInterruptCausesExpirySequenceHalt() throws IOException, InterruptedException {
  Blob expiringBlob;
  try (ByteString.Output out = ByteString.newOutput(1024)) {
    for (int i = 0; i < 1024; i++) {
      out.write(0);
    }
    expiringBlob = new Blob(out.toByteString(), DIGEST_UTIL);
    fileCache.put(expiringBlob);
  }
  Digest expiringDigest = expiringBlob.getDigest();

  // set the delegate to throw interrupted on write output creation
  Write interruptingWrite =
      new Write() {
        boolean canReset = false;

        @Override
        public long getCommittedSize() {
          throw new UnsupportedOperationException();
        }

        @Override
        public boolean isComplete() {
          throw new UnsupportedOperationException();
        }

        @Override
        public FeedbackOutputStream getOutput(
            long deadlineAfter, TimeUnit deadlineAfterUnits, Runnable onReadyHandler)
            throws IOException {
          canReset = true;
          throw new IOException(new InterruptedException());
        }

        @Override
        public void reset() {
          if (!canReset) {
            throw new UnsupportedOperationException();
          }
        }

        @Override
        public void addListener(Runnable onCompleted, Executor executor) {
          throw new UnsupportedOperationException();
        }
      };
  when(delegate.getWrite(eq(expiringDigest), any(UUID.class), any(RequestMetadata.class)))
      .thenReturn(interruptingWrite);

  // FIXME we should have a guarantee that we did not iterate over another expiration
  InterruptedException sequenceException = null;
  try {
    fileCache.put(new Blob(ByteString.copyFromUtf8("Hello, World"), DIGEST_UTIL));
    fail("should not get here");
  } catch (InterruptedException e) {
    sequenceException = e;
  }
  assertThat(sequenceException).isNotNull();

  verify(delegate, times(1))
      .getWrite(eq(expiringDigest), any(UUID.class), any(RequestMetadata.class));
}
 
Example 20
Source File: StringActionInput.java    From bazel with Apache License 2.0 4 votes vote down vote up
@Override
public ByteString getBytes() throws IOException {
  ByteString.Output out = ByteString.newOutput();
  writeTo(out);
  return out.toByteString();
}