Java Code Examples for com.google.cloud.pubsub.v1.Subscriber#startAsync()

The following examples show how to use com.google.cloud.pubsub.v1.Subscriber#startAsync() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: PubsubIntegrationTest.java    From gcp-ingestion with Mozilla Public License 2.0 7 votes vote down vote up
private List<String> receiveLines(int expectedMessageCount) throws Exception {
  List<String> received = new CopyOnWriteArrayList<>();
  ProjectSubscriptionName subscriptionName = ProjectSubscriptionName.of(projectId,
      subscriptionId);

  MessageReceiver receiver = ((PubsubMessage message, AckReplyConsumer consumer) -> {
    try {
      String encoded = Json.asString(new org.apache.beam.sdk.io.gcp.pubsub.PubsubMessage(
          message.getData().toByteArray(), message.getAttributesMap()));
      received.add(encoded);
    } catch (IOException e) {
      throw new UncheckedIOException(e);
    }
    consumer.ack();
  });
  Subscriber subscriber = Subscriber.newBuilder(subscriptionName, receiver).build();
  subscriber.startAsync();
  while (received.size() < expectedMessageCount) {
    Thread.sleep(100);
  }
  subscriber.stopAsync();

  return received;
}
 
Example 2
Source File: PubSubSubscriberTemplate.java    From spring-cloud-gcp with Apache License 2.0 6 votes vote down vote up
@Override
public Subscriber subscribe(String subscription,
		Consumer<BasicAcknowledgeablePubsubMessage> messageConsumer) {
	Assert.notNull(messageConsumer, "The messageConsumer can't be null.");

	Subscriber subscriber =
			this.subscriberFactory.createSubscriber(subscription,
					(message, ackReplyConsumer) -> messageConsumer.accept(
							new PushedAcknowledgeablePubsubMessage(
									PubSubSubscriptionUtils.toProjectSubscriptionName(subscription,
											this.subscriberFactory.getProjectId()),
									message,
									ackReplyConsumer)));
	subscriber.startAsync();
	return subscriber;
}
 
Example 3
Source File: PubSubSubscriberTemplate.java    From spring-cloud-gcp with Apache License 2.0 6 votes vote down vote up
@Override
public <T> Subscriber subscribeAndConvert(String subscription,
		Consumer<ConvertedBasicAcknowledgeablePubsubMessage<T>> messageConsumer, Class<T> payloadType) {
	Assert.notNull(messageConsumer, "The messageConsumer can't be null.");

	Subscriber subscriber =
			this.subscriberFactory.createSubscriber(subscription,
					(message, ackReplyConsumer) -> messageConsumer.accept(
							new ConvertedPushedAcknowledgeablePubsubMessage<>(
									PubSubSubscriptionUtils.toProjectSubscriptionName(subscription,
											this.subscriberFactory.getProjectId()),
									message,
									this.getMessageConverter().fromPubSubMessage(message, payloadType),
									ackReplyConsumer)));
	subscriber.startAsync();
	return subscriber;
}
 
Example 4
Source File: PubsubHelper.java    From flink with Apache License 2.0 5 votes vote down vote up
public Subscriber subscribeToSubscription(String project, String subscription, MessageReceiver messageReceiver) {
	ProjectSubscriptionName subscriptionName = ProjectSubscriptionName.of(project, subscription);
	Subscriber subscriber =
		Subscriber
			.newBuilder(subscriptionName, messageReceiver)
			.setChannelProvider(channelProvider)
			.setCredentialsProvider(NoCredentialsProvider.create())
			.build();
	subscriber.startAsync();
	return subscriber;
}
 
Example 5
Source File: PubSubSubscriberTemplate.java    From spring-cloud-gcp with Apache License 2.0 5 votes vote down vote up
@Override
@Deprecated
public Subscriber subscribe(String subscription, MessageReceiver messageReceiver) {
	Assert.hasText(subscription, "The subscription can't be null or empty.");
	Assert.notNull(messageReceiver, "The messageReceiver can't be null.");

	Subscriber subscriber =
			this.subscriberFactory.createSubscriber(subscription, messageReceiver);
	subscriber.startAsync();
	return subscriber;
}
 
Example 6
Source File: PubsubHelper.java    From flink with Apache License 2.0 5 votes vote down vote up
public Subscriber subscribeToSubscription(String project, String subscription, MessageReceiver messageReceiver) {
	ProjectSubscriptionName subscriptionName = ProjectSubscriptionName.of(project, subscription);
	Subscriber subscriber =
		Subscriber
			.newBuilder(subscriptionName, messageReceiver)
			.setChannelProvider(channelProvider)
			.setCredentialsProvider(NoCredentialsProvider.create())
			.build();
	subscriber.startAsync();
	return subscriber;
}
 
Example 7
Source File: ClientLibraryIT.java    From kafka-pubsub-emulator with Apache License 2.0 4 votes vote down vote up
/**
 * Verifies that a Publisher can publish messages and a Subscriber will receive exactly those same
 * messages.
 */
@Test(timeout = 30000)
public void publishAndStreamingPull() throws Exception {
  String messagePrefix = CLIENT_LIBRARY_TOPIC + System.currentTimeMillis() + "-";
  int messages = 5000;
  Set<String> messagesSet = new TreeSet<>();
  for (int i = 0; i < messages; i++) {
    messagesSet.add(messagePrefix + i);
  }
  CountDownLatch publisherCountDownLatch = new CountDownLatch(messages);
  CountDownLatch subscriberCountDownLatch = new CountDownLatch(messages);
  Set<String> publishedIds = new ConcurrentSkipListSet<>();
  Set<String> receivedIds = new ConcurrentSkipListSet<>();
  LongAdder duplicates = new LongAdder();
  Publisher publisher = getPublisher(CLIENT_LIBRARY_TOPIC);

  Subscriber subscriber =
      getSubscriber(
          SUBSCRIPTION,
          (message, consumer) -> {
            consumer.ack();
            if (receivedIds.contains(message.getMessageId())) {
              duplicates.increment();
            } else {
              receivedIds.add(message.getMessageId());
            }
            subscriberCountDownLatch.countDown();
          });

  for (String data : messagesSet) {
    publish(
        publisher,
        PubsubMessage.newBuilder().setData(ByteString.copyFromUtf8(data)).build(),
        (throwable) -> LOGGER.severe("Publisher Error " + throwable.getMessage()),
        (result) -> {
          publishedIds.add(result);
          publisherCountDownLatch.countDown();
        });
  }
  LOGGER.info(format("Waiting for %d messages to be published by Publisher", messages));
  publisherCountDownLatch.await();
  LOGGER.info("Shutting down Publisher");
  publisher.shutdown();
  assertEquals(messages, publishedIds.size());

  subscriber.startAsync();
  LOGGER.info(format("Waiting for %d messages to be received by Subscriber", messages));
  subscriberCountDownLatch.await();
  LOGGER.info("Shutting down Subscriber");
  // Shouldn't be necessary, but it seems to ensure all acks get sent
  Thread.sleep(2000);
  subscriber.stopAsync().awaitTerminated();
  // Shouldn't be necessary, but to ensure the timing of consumers information
  Thread.sleep(3000);
  assertEquals(receivedIds, publishedIds);
  assertEquals(0, duplicates.intValue());

  Consumer<String, ByteBuffer> validator =
      getValidationConsumer(CLIENT_LIBRARY_TOPIC, SUBSCRIPTION);
  validator
      .assignment()
      .forEach(
          tp -> {
            Long endOffset = validator.endOffsets(Collections.singleton(tp)).getOrDefault(tp, 0L);
            OffsetAndMetadata committed = validator.committed(tp);
            assertEquals(endOffset, Long.valueOf(committed.offset()));
          });
}
 
Example 8
Source File: SecurityGrpcIT.java    From kafka-pubsub-emulator with Apache License 2.0 4 votes vote down vote up
@Test
public void publishAndSubscribeOnSecureServer() throws Exception {
  // Sanity check
  assertTrue(USE_SSL);
  assertTrue(BaseIT.USE_SSL);

  String messageData = "security-test-" + System.currentTimeMillis();

  CountDownLatch messageToPublish = new CountDownLatch(1);
  CountDownLatch messageToSubscribe = new CountDownLatch(1);

  List<String> publishedIds = new ArrayList<>();

  Map<String, Integer> receivedIds = new ConcurrentHashMap<>();
  Publisher publisher = getPublisher(SSL_TOPIC);

  Subscriber subscriber =
      getSubscriber(
          SUBSCRIPTION,
          (message, consumer) -> {
            consumer.ack();
            if (messageData.equals(message.getData().toStringUtf8())) {
              receivedIds.put(message.getMessageId(), 1);
              messageToSubscribe.countDown();
            }
          });

  publish(
      publisher,
      PubsubMessage.newBuilder().setData(ByteString.copyFromUtf8(messageData)).build(),
      (throwable) -> messageToPublish.countDown(),
      (result) -> {
        publishedIds.add(result);
        messageToPublish.countDown();
      });

  messageToPublish.await(3, TimeUnit.SECONDS);
  publisher.shutdown();
  assertEquals(1, publishedIds.size());

  subscriber.startAsync();
  messageToSubscribe.await(3, TimeUnit.SECONDS);
  Thread.sleep(100);
  subscriber.stopAsync().awaitTerminated();
  assertTrue(publishedIds.containsAll(receivedIds.keySet()));
}
 
Example 9
Source File: InspectGcsFile.java    From java-docs-samples with Apache License 2.0 4 votes vote down vote up
public static void inspectGcsFile(
    String projectId, String gcsUri, String topicId, String subscriptionId)
    throws ExecutionException, InterruptedException, IOException {
  // Initialize client that will be used to send requests. This client only needs to be created
  // once, and can be reused for multiple requests. After completing all of your requests, call
  // the "close" method on the client to safely clean up any remaining background resources.
  try (DlpServiceClient dlp = DlpServiceClient.create()) {
    // Specify the GCS file to be inspected.
    CloudStorageOptions cloudStorageOptions =
        CloudStorageOptions.newBuilder()
            .setFileSet(FileSet.newBuilder().setUrl(gcsUri))
            .build();

    StorageConfig storageConfig =
        StorageConfig.newBuilder().setCloudStorageOptions(cloudStorageOptions).build();

    // Specify the type of info the inspection will look for.
    // See https://cloud.google.com/dlp/docs/infotypes-reference for complete list of info types
    List<InfoType> infoTypes =
        Stream.of("PHONE_NUMBER", "EMAIL_ADDRESS", "CREDIT_CARD_NUMBER")
            .map(it -> InfoType.newBuilder().setName(it).build())
            .collect(Collectors.toList());

    // Specify how the content should be inspected.
    InspectConfig inspectConfig =
        InspectConfig.newBuilder()
            .addAllInfoTypes(infoTypes)
            .setIncludeQuote(true)
            .build();

    // Specify the action that is triggered when the job completes.
    String pubSubTopic = String.format("projects/%s/topics/%s", projectId, topicId);
    Action.PublishToPubSub publishToPubSub =
        Action.PublishToPubSub.newBuilder().setTopic(pubSubTopic).build();
    Action action = Action.newBuilder().setPubSub(publishToPubSub).build();

    // Configure the long running job we want the service to perform.
    InspectJobConfig inspectJobConfig =
        InspectJobConfig.newBuilder()
            .setStorageConfig(storageConfig)
            .setInspectConfig(inspectConfig)
            .addActions(action)
            .build();

    // Create the request for the job configured above.
    CreateDlpJobRequest createDlpJobRequest =
        CreateDlpJobRequest.newBuilder()
            .setParent(LocationName.of(projectId, "global").toString())
            .setInspectJob(inspectJobConfig)
            .build();

    // Use the client to send the request.
    final DlpJob dlpJob = dlp.createDlpJob(createDlpJobRequest);
    System.out.println("Job created: " + dlpJob.getName());

    // Set up a Pub/Sub subscriber to listen on the job completion status
    final SettableApiFuture<Boolean> done = SettableApiFuture.create();

    ProjectSubscriptionName subscriptionName =
        ProjectSubscriptionName.of(projectId, subscriptionId);

    MessageReceiver messageHandler =
        (PubsubMessage pubsubMessage, AckReplyConsumer ackReplyConsumer) -> {
          handleMessage(dlpJob, done, pubsubMessage, ackReplyConsumer);
        };
    Subscriber subscriber = Subscriber.newBuilder(subscriptionName, messageHandler).build();
    subscriber.startAsync();

    // Wait for job completion semi-synchronously
    // For long jobs, consider using a truly asynchronous execution model such as Cloud Functions
    try {
      done.get(15, TimeUnit.MINUTES);
    } catch (TimeoutException e) {
      System.out.println("Job was not completed after 15 minutes.");
      return;
    } finally {
      subscriber.stopAsync();
      subscriber.awaitTerminated();
    }

    // Get the latest state of the job from the service
    GetDlpJobRequest request = GetDlpJobRequest.newBuilder().setName(dlpJob.getName()).build();
    DlpJob completedJob = dlp.getDlpJob(request);

    // Parse the response and process results.
    System.out.println("Job status: " + completedJob.getState());
    InspectDataSourceDetails.Result result = completedJob.getInspectDetails().getResult();
    System.out.println("Findings: ");
    for (InfoTypeStats infoTypeStat : result.getInfoTypeStatsList()) {
      System.out.print("\tInfo type: " + infoTypeStat.getInfoType().getName());
      System.out.println("\tCount: " + infoTypeStat.getCount());
    }
  }
}
 
Example 10
Source File: InspectBigQueryTableWithSampling.java    From java-docs-samples with Apache License 2.0 4 votes vote down vote up
public static void inspectBigQueryTableWithSampling(
    String projectId, String topicId, String subscriptionId)
    throws ExecutionException, InterruptedException, IOException {
  // Initialize client that will be used to send requests. This client only needs to be created
  // once, and can be reused for multiple requests. After completing all of your requests, call
  // the "close" method on the client to safely clean up any remaining background resources.
  try (DlpServiceClient dlp = DlpServiceClient.create()) {
    // Specify the BigQuery table to be inspected.
    BigQueryTable tableReference =
        BigQueryTable.newBuilder()
            .setProjectId("bigquery-public-data")
            .setDatasetId("usa_names")
            .setTableId("usa_1910_current")
            .build();

    BigQueryOptions bigQueryOptions =
        BigQueryOptions.newBuilder()
            .setTableReference(tableReference)
            .setRowsLimit(1000)
            .setSampleMethod(SampleMethod.RANDOM_START)
            .addIdentifyingFields(FieldId.newBuilder().setName("name"))
            .build();

    StorageConfig storageConfig =
        StorageConfig.newBuilder().setBigQueryOptions(bigQueryOptions).build();

    // Specify the type of info the inspection will look for.
    // See https://cloud.google.com/dlp/docs/infotypes-reference for complete list of info types
    InfoType infoType = InfoType.newBuilder().setName("PERSON_NAME").build();

    // Specify how the content should be inspected.
    InspectConfig inspectConfig =
        InspectConfig.newBuilder()
            .addInfoTypes(infoType)
            .setIncludeQuote(true)
            .build();

    // Specify the action that is triggered when the job completes.
    String pubSubTopic = String.format("projects/%s/topics/%s", projectId, topicId);
    Action.PublishToPubSub publishToPubSub =
        Action.PublishToPubSub.newBuilder().setTopic(pubSubTopic).build();
    Action action = Action.newBuilder().setPubSub(publishToPubSub).build();

    // Configure the long running job we want the service to perform.
    InspectJobConfig inspectJobConfig =
        InspectJobConfig.newBuilder()
            .setStorageConfig(storageConfig)
            .setInspectConfig(inspectConfig)
            .addActions(action)
            .build();

    // Create the request for the job configured above.
    CreateDlpJobRequest createDlpJobRequest =
        CreateDlpJobRequest.newBuilder()
            .setParent(LocationName.of(projectId, "global").toString())
            .setInspectJob(inspectJobConfig)
            .build();

    // Use the client to send the request.
    final DlpJob dlpJob = dlp.createDlpJob(createDlpJobRequest);
    System.out.println("Job created: " + dlpJob.getName());

    // Set up a Pub/Sub subscriber to listen on the job completion status
    final SettableApiFuture<Boolean> done = SettableApiFuture.create();

    ProjectSubscriptionName subscriptionName =
        ProjectSubscriptionName.of(projectId, subscriptionId);

    MessageReceiver messageHandler =
        (PubsubMessage pubsubMessage, AckReplyConsumer ackReplyConsumer) -> {
          handleMessage(dlpJob, done, pubsubMessage, ackReplyConsumer);
        };
    Subscriber subscriber = Subscriber.newBuilder(subscriptionName, messageHandler).build();
    subscriber.startAsync();

    // Wait for job completion semi-synchronously
    // For long jobs, consider using a truly asynchronous execution model such as Cloud Functions
    try {
      done.get(15, TimeUnit.MINUTES);
    } catch (TimeoutException e) {
      System.out.println("Job was not completed after 15 minutes.");
      return;
    } finally {
      subscriber.stopAsync();
      subscriber.awaitTerminated();
    }

    // Get the latest state of the job from the service
    GetDlpJobRequest request = GetDlpJobRequest.newBuilder().setName(dlpJob.getName()).build();
    DlpJob completedJob = dlp.getDlpJob(request);

    // Parse the response and process results.
    System.out.println("Job status: " + completedJob.getState());
    InspectDataSourceDetails.Result result = completedJob.getInspectDetails().getResult();
    System.out.println("Findings: ");
    for (InfoTypeStats infoTypeStat : result.getInfoTypeStatsList()) {
      System.out.print("\tInfo type: " + infoTypeStat.getInfoType().getName());
      System.out.println("\tCount: " + infoTypeStat.getCount());
    }
  }
}
 
Example 11
Source File: InspectGcsFileWithSampling.java    From java-docs-samples with Apache License 2.0 4 votes vote down vote up
public static void inspectGcsFileWithSampling(
    String projectId, String gcsUri, String topicId, String subscriptionId)
    throws ExecutionException, InterruptedException, IOException {
  // Initialize client that will be used to send requests. This client only needs to be created
  // once, and can be reused for multiple requests. After completing all of your requests, call
  // the "close" method on the client to safely clean up any remaining background resources.
  try (DlpServiceClient dlp = DlpServiceClient.create()) {
    // Specify the GCS file to be inspected and sampling configuration
    CloudStorageOptions cloudStorageOptions =
        CloudStorageOptions.newBuilder()
            .setFileSet(FileSet.newBuilder().setUrl(gcsUri))
            .setBytesLimitPerFile(200)
            .addFileTypes(FileType.TEXT_FILE)
            .setFilesLimitPercent(90)
            .setSampleMethod(SampleMethod.RANDOM_START)
            .build();

    StorageConfig storageConfig =
        StorageConfig.newBuilder().setCloudStorageOptions(cloudStorageOptions).build();

    // Specify the type of info the inspection will look for.
    // See https://cloud.google.com/dlp/docs/infotypes-reference for complete list of info types
    InfoType infoType = InfoType.newBuilder().setName("PERSON_NAME").build();

    // Specify how the content should be inspected.
    InspectConfig inspectConfig =
        InspectConfig.newBuilder()
            .addInfoTypes(infoType)
            .setExcludeInfoTypes(true)
            .setIncludeQuote(true)
            .setMinLikelihood(Likelihood.POSSIBLE)
            .build();

    // Specify the action that is triggered when the job completes.
    String pubSubTopic = String.format("projects/%s/topics/%s", projectId, topicId);
    Action.PublishToPubSub publishToPubSub =
        Action.PublishToPubSub.newBuilder().setTopic(pubSubTopic).build();
    Action action = Action.newBuilder().setPubSub(publishToPubSub).build();

    // Configure the long running job we want the service to perform.
    InspectJobConfig inspectJobConfig =
        InspectJobConfig.newBuilder()
            .setStorageConfig(storageConfig)
            .setInspectConfig(inspectConfig)
            .addActions(action)
            .build();

    // Create the request for the job configured above.
    CreateDlpJobRequest createDlpJobRequest =
        CreateDlpJobRequest.newBuilder()
            .setParent(LocationName.of(projectId, "global").toString())
            .setInspectJob(inspectJobConfig)
            .build();

    // Use the client to send the request.
    final DlpJob dlpJob = dlp.createDlpJob(createDlpJobRequest);
    System.out.println("Job created: " + dlpJob.getName());

    // Set up a Pub/Sub subscriber to listen on the job completion status
    final SettableApiFuture<Boolean> done = SettableApiFuture.create();

    ProjectSubscriptionName subscriptionName =
        ProjectSubscriptionName.of(projectId, subscriptionId);

    MessageReceiver messageHandler =
        (PubsubMessage pubsubMessage, AckReplyConsumer ackReplyConsumer) -> {
          handleMessage(dlpJob, done, pubsubMessage, ackReplyConsumer);
        };
    Subscriber subscriber = Subscriber.newBuilder(subscriptionName, messageHandler).build();
    subscriber.startAsync();

    // Wait for job completion semi-synchronously
    // For long jobs, consider using a truly asynchronous execution model such as Cloud Functions
    try {
      done.get(15, TimeUnit.MINUTES);
    } catch (TimeoutException e) {
      System.out.println("Job was not completed after 15 minutes.");
      return;
    } finally {
      subscriber.stopAsync();
      subscriber.awaitTerminated();
    }

    // Get the latest state of the job from the service
    GetDlpJobRequest request = GetDlpJobRequest.newBuilder().setName(dlpJob.getName()).build();
    DlpJob completedJob = dlp.getDlpJob(request);

    // Parse the response and process results.
    System.out.println("Job status: " + completedJob.getState());
    InspectDataSourceDetails.Result result = completedJob.getInspectDetails().getResult();
    System.out.println("Findings: ");
    for (InfoTypeStats infoTypeStat : result.getInfoTypeStatsList()) {
      System.out.print("\tInfo type: " + infoTypeStat.getInfoType().getName());
      System.out.println("\tCount: " + infoTypeStat.getCount());
    }
  }
}
 
Example 12
Source File: RiskAnalysisNumericalStats.java    From java-docs-samples with Apache License 2.0 4 votes vote down vote up
public static void numericalStatsAnalysis(
    String projectId, String datasetId, String tableId, String topicId, String subscriptionId)
    throws ExecutionException, InterruptedException, IOException {

  // Initialize client that will be used to send requests. This client only needs to be created
  // once, and can be reused for multiple requests. After completing all of your requests, call
  // the "close" method on the client to safely clean up any remaining background resources.
  try (DlpServiceClient dlpServiceClient = DlpServiceClient.create()) {

    // Specify the BigQuery table to analyze
    BigQueryTable bigQueryTable =
        BigQueryTable.newBuilder()
            .setTableId(tableId)
            .setDatasetId(datasetId)
            .setProjectId(projectId)
            .build();

    // This represents the name of the column to analyze, which must contain numerical data
    String columnName = "Age";

    // Configure the privacy metric for the job
    FieldId fieldId = FieldId.newBuilder().setName(columnName).build();
    NumericalStatsConfig numericalStatsConfig =
        NumericalStatsConfig.newBuilder().setField(fieldId).build();
    PrivacyMetric privacyMetric =
        PrivacyMetric.newBuilder().setNumericalStatsConfig(numericalStatsConfig).build();

    // Create action to publish job status notifications over Google Cloud Pub/Sub
    ProjectTopicName topicName = ProjectTopicName.of(projectId, topicId);
    PublishToPubSub publishToPubSub =
        PublishToPubSub.newBuilder().setTopic(topicName.toString()).build();
    Action action = Action.newBuilder().setPubSub(publishToPubSub).build();

    // Configure the risk analysis job to perform
    RiskAnalysisJobConfig riskAnalysisJobConfig =
        RiskAnalysisJobConfig.newBuilder()
            .setSourceTable(bigQueryTable)
            .setPrivacyMetric(privacyMetric)
            .addActions(action)
            .build();

    CreateDlpJobRequest createDlpJobRequest =
        CreateDlpJobRequest.newBuilder()
            .setParent(LocationName.of(projectId, "global").toString())
            .setRiskJob(riskAnalysisJobConfig)
            .build();

    // Send the request to the API using the client
    DlpJob dlpJob = dlpServiceClient.createDlpJob(createDlpJobRequest);

    // Set up a Pub/Sub subscriber to listen on the job completion status
    final SettableApiFuture<Boolean> done = SettableApiFuture.create();

    ProjectSubscriptionName subscriptionName =
        ProjectSubscriptionName.of(projectId, subscriptionId);

    MessageReceiver messageHandler =
        (PubsubMessage pubsubMessage, AckReplyConsumer ackReplyConsumer) -> {
          handleMessage(dlpJob, done, pubsubMessage, ackReplyConsumer);
        };
    Subscriber subscriber = Subscriber.newBuilder(subscriptionName, messageHandler).build();
    subscriber.startAsync();

    // Wait for job completion semi-synchronously
    // For long jobs, consider using a truly asynchronous execution model such as Cloud Functions
    try {
      done.get(15, TimeUnit.MINUTES);
    } catch (TimeoutException e) {
      System.out.println("Job was not completed after 15 minutes.");
      return;
    } finally {
      subscriber.stopAsync();
      subscriber.awaitTerminated();
    }

    // Build a request to get the completed job
    GetDlpJobRequest getDlpJobRequest =
        GetDlpJobRequest.newBuilder().setName(dlpJob.getName()).build();

    // Retrieve completed job status
    DlpJob completedJob = dlpServiceClient.getDlpJob(getDlpJobRequest);
    System.out.println("Job status: " + completedJob.getState());

    // Get the result and parse through and process the information
    NumericalStatsResult result = completedJob.getRiskDetails().getNumericalStatsResult();

    System.out.printf(
        "Value range : [%.3f, %.3f]\n",
        result.getMinValue().getFloatValue(), result.getMaxValue().getFloatValue());

    int percent = 1;
    Double lastValue = null;
    for (Value quantileValue : result.getQuantileValuesList()) {
      Double currentValue = quantileValue.getFloatValue();
      if (lastValue == null || !lastValue.equals(currentValue)) {
        System.out.printf("Value at %s %% quantile : %.3f", percent, currentValue);
      }
      lastValue = currentValue;
    }
  }
}