com.google.cloud.pubsub.v1.Subscriber Java Examples

The following examples show how to use com.google.cloud.pubsub.v1.Subscriber. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: PubsubIntegrationTest.java    From gcp-ingestion with Mozilla Public License 2.0 7 votes vote down vote up
private List<String> receiveLines(int expectedMessageCount) throws Exception {
  List<String> received = new CopyOnWriteArrayList<>();
  ProjectSubscriptionName subscriptionName = ProjectSubscriptionName.of(projectId,
      subscriptionId);

  MessageReceiver receiver = ((PubsubMessage message, AckReplyConsumer consumer) -> {
    try {
      String encoded = Json.asString(new org.apache.beam.sdk.io.gcp.pubsub.PubsubMessage(
          message.getData().toByteArray(), message.getAttributesMap()));
      received.add(encoded);
    } catch (IOException e) {
      throw new UncheckedIOException(e);
    }
    consumer.ack();
  });
  Subscriber subscriber = Subscriber.newBuilder(subscriptionName, receiver).build();
  subscriber.startAsync();
  while (received.size() < expectedMessageCount) {
    Thread.sleep(100);
  }
  subscriber.stopAsync();

  return received;
}
 
Example #2
Source File: Demo.java    From kafka-pubsub-emulator with Apache License 2.0 6 votes vote down vote up
private void subscribe(String subscription) throws InterruptedException {
  System.out.println("Subscribing to messages from " + subscription + "...Press Ctrl-C to exit");
  final CountDownLatch subscribeLatch = new CountDownLatch(1);
  Runtime.getRuntime().addShutdownHook(new Thread(() -> subscribeLatch.countDown()));

  Subscriber subscriber = Subscriber.newBuilder(subscription,
      (message, ackReplyConsumer) -> {
        ackReplyConsumer.ack();
        System.out.println(
            "Received " + message.getData().toStringUtf8() + " with message-id " + message
                .getMessageId());
      })
      .setCredentialsProvider(new NoCredentialsProvider())
      .setChannelProvider(getChannelProvider())
      .build();
  subscriber.startAsync().awaitRunning();
  subscribeLatch.await();
}
 
Example #3
Source File: Subscriptions.java    From java-docs-samples with Apache License 2.0 6 votes vote down vote up
public static int pubSub(String subId, long timeoutSeconds, String projectId)
    throws InterruptedException {
  // String subId = "my-occurrence-subscription";
  // long timeoutSeconds = 20;
  // String projectId = "my-project-id";
  Subscriber subscriber = null;
  MessageReceiverExample receiver = new MessageReceiverExample();

  try {
    // Subscribe to the requested Pub/Sub channel
    ProjectSubscriptionName subName = ProjectSubscriptionName.of(projectId, subId);
    subscriber = Subscriber.newBuilder(subName, receiver).build();
    subscriber.startAsync().awaitRunning();
    // Sleep to listen for messages
    TimeUnit.SECONDS.sleep(timeoutSeconds);
  } finally {
    // Stop listening to the channel
    if (subscriber != null) {
      subscriber.stopAsync();
    }
  }
  // Print and return the number of Pub/Sub messages received
  System.out.println(receiver.messageCount);
  return receiver.messageCount;
}
 
Example #4
Source File: CPSSubscriberTask.java    From pubsub with Apache License 2.0 6 votes vote down vote up
private CPSSubscriberTask(StartRequest request, MetricsHandler metricsHandler, int workerCount) {
  this.metricsHandler = metricsHandler;
  ProjectSubscriptionName subscription =
      ProjectSubscriptionName.of(
          request.getProject(), request.getPubsubOptions().getSubscription());
  try {
    this.subscriber =
        Subscriber.newBuilder(subscription, this)
            .setParallelPullCount(workerCount)
            .setFlowControlSettings(
                FlowControlSettings.newBuilder()
                    .setMaxOutstandingElementCount(Long.MAX_VALUE)
                    .setMaxOutstandingRequestBytes(BYTES_PER_WORKER * workerCount)
                    .build())
            .build();
  } catch (Exception e) {
    throw new RuntimeException(e);
  }
}
 
Example #5
Source File: PubSubSubscriberTemplate.java    From spring-cloud-gcp with Apache License 2.0 6 votes vote down vote up
@Override
public <T> Subscriber subscribeAndConvert(String subscription,
		Consumer<ConvertedBasicAcknowledgeablePubsubMessage<T>> messageConsumer, Class<T> payloadType) {
	Assert.notNull(messageConsumer, "The messageConsumer can't be null.");

	Subscriber subscriber =
			this.subscriberFactory.createSubscriber(subscription,
					(message, ackReplyConsumer) -> messageConsumer.accept(
							new ConvertedPushedAcknowledgeablePubsubMessage<>(
									PubSubSubscriptionUtils.toProjectSubscriptionName(subscription,
											this.subscriberFactory.getProjectId()),
									message,
									this.getMessageConverter().fromPubSubMessage(message, payloadType),
									ackReplyConsumer)));
	subscriber.startAsync();
	return subscriber;
}
 
Example #6
Source File: PubSubTemplateDocumentationTests.java    From spring-cloud-gcp with Apache License 2.0 6 votes vote down vote up
@Test
public void subscribeSimpleTest() {
	pubSubTest((PubSubTemplate pubSubTemplate, String subscriptionName, String topicName) -> {
		pubSubTemplate.publish(topicName, "message");

		Logger logger = new Logger();
		//tag::subscribe[]
		Subscriber subscriber = pubSubTemplate.subscribe(subscriptionName, (message) -> {
			logger.info("Message received from " + subscriptionName + " subscription: "
					+ message.getPubsubMessage().getData().toStringUtf8());
			message.ack();
		});
		//end::subscribe[]

		List<String> messages = logger.getMessages();
		Awaitility.await().atMost(5, TimeUnit.SECONDS).until(() -> !messages.isEmpty());
		assertThat(messages)
				.containsExactly("Message received from " + subscriptionName + " subscription: message");
	});
}
 
Example #7
Source File: PubSubSubscriberTemplate.java    From spring-cloud-gcp with Apache License 2.0 6 votes vote down vote up
@Override
public Subscriber subscribe(String subscription,
		Consumer<BasicAcknowledgeablePubsubMessage> messageConsumer) {
	Assert.notNull(messageConsumer, "The messageConsumer can't be null.");

	Subscriber subscriber =
			this.subscriberFactory.createSubscriber(subscription,
					(message, ackReplyConsumer) -> messageConsumer.accept(
							new PushedAcknowledgeablePubsubMessage(
									PubSubSubscriptionUtils.toProjectSubscriptionName(subscription,
											this.subscriberFactory.getProjectId()),
									message,
									ackReplyConsumer)));
	subscriber.startAsync();
	return subscriber;
}
 
Example #8
Source File: PubSubSubscriberTemplate.java    From spring-cloud-gcp with Apache License 2.0 5 votes vote down vote up
@Override
@Deprecated
public Subscriber subscribe(String subscription, MessageReceiver messageReceiver) {
	Assert.hasText(subscription, "The subscription can't be null or empty.");
	Assert.notNull(messageReceiver, "The messageReceiver can't be null.");

	Subscriber subscriber =
			this.subscriberFactory.createSubscriber(subscription, messageReceiver);
	subscriber.startAsync();
	return subscriber;
}
 
Example #9
Source File: PubsubBenchWrapperImpl.java    From google-cloud-java with Apache License 2.0 5 votes vote down vote up
public void recv(PubsubRecv request, StreamObserver<EmptyResponse> responseObserver) {
  System.out.println("recv has been called");

  ProjectSubscriptionName subscriptionName = ProjectSubscriptionName.of(
      "some-project", request.getSubName());
  
  Subscriber subscriber = null;
  try {
    InstantiatingExecutorProvider executorProvider =
      InstantiatingExecutorProvider.newBuilder().setExecutorThreadCount(1).build();

    subscriber =
        Subscriber.newBuilder(subscriptionName, new SimpleReceiver())
            .setExecutorProvider(executorProvider)
            .build();
    subscriber.startAsync().awaitRunning();

    // Allow the subscriber to run indefinitely unless an unrecoverable error occurs.
    subscriber.awaitTerminated();
  } catch (IllegalStateException e) {
    System.out.println("Subscriber unexpectedly stopped: " + e);
  }

  EmptyResponse reply = EmptyResponse.newBuilder().build();
  responseObserver.onNext(reply);
  responseObserver.onCompleted();
}
 
Example #10
Source File: CheckPubSubEmulatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testPub() throws Exception {
	List<PubsubMessage> receivedMessages = new ArrayList<>();
	Subscriber subscriber = pubsubHelper.
		subscribeToSubscription(
			PROJECT_NAME,
			SUBSCRIPTION_NAME,
			(message, consumer) -> {
				receivedMessages.add(message);
				consumer.ack();
			}
		);
	subscriber.awaitRunning(5, MINUTES);

	Publisher publisher = pubsubHelper.createPublisher(PROJECT_NAME, TOPIC_NAME);
	publisher
		.publish(PubsubMessage
			.newBuilder()
			.setData(ByteString.copyFromUtf8("Hello World"))
			.build())
		.get();

	LOG.info("Waiting a while to receive the message...");

	waitUntil(() -> receivedMessages.size() > 0);

	assertEquals(1, receivedMessages.size());
	assertEquals("Hello World", receivedMessages.get(0).getData().toStringUtf8());

	LOG.info("Received message. Shutting down ...");

	subscriber.stopAsync().awaitTerminated(5, MINUTES);
	publisher.shutdown();
}
 
Example #11
Source File: PubsubHelper.java    From flink with Apache License 2.0 5 votes vote down vote up
public Subscriber subscribeToSubscription(String project, String subscription, MessageReceiver messageReceiver) {
	ProjectSubscriptionName subscriptionName = ProjectSubscriptionName.of(project, subscription);
	Subscriber subscriber =
		Subscriber
			.newBuilder(subscriptionName, messageReceiver)
			.setChannelProvider(channelProvider)
			.setCredentialsProvider(NoCredentialsProvider.create())
			.build();
	subscriber.startAsync();
	return subscriber;
}
 
Example #12
Source File: GooglePubsubSubscriber.java    From echo with Apache License 2.0 5 votes vote down vote up
public synchronized void start() {
  this.subscriber =
      Subscriber.newBuilder(
              ProjectSubscriptionName.of(project, subscriptionName), messageReceiver)
          .setCredentialsProvider(FixedCredentialsProvider.create(credentials))
          .build();

  subscriber.addListener(
      new GooglePubsubFailureHandler(this, formatSubscriptionName(project, subscriptionName)),
      MoreExecutors.directExecutor());
  subscriber.startAsync().awaitRunning();
  log.info(
      "Google Pubsub subscriber started for {}",
      formatSubscriptionName(project, subscriptionName));
}
 
Example #13
Source File: PubSubSource.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@Override
public void destroy() {
  try {
    LOG.debug("Stopping subscribers");
    subscribers.forEach(Subscriber::stopAsync);
    subscribers.forEach(Subscriber::awaitTerminated);

    LOG.debug("Stopping message processors");
    messageProcessors.forEach(MessageProcessor::stop);
  } finally {
    LOG.info("Stopped {} processing threads", conf.maxThreads);
    subscribers.clear();
    messageProcessors.clear();
  }

  if (executor == null) {
    return;
  }

  LOG.debug("Shutting down executor service");
  executor.shutdown();
  try {
    executor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
  } catch (InterruptedException e) {
    LOG.warn("Orderly shutdown interrupted.");
  } finally {
    Thread.currentThread().interrupt();
  }
}
 
Example #14
Source File: PubSubTemplateTests.java    From spring-cloud-gcp with Apache License 2.0 5 votes vote down vote up
@Test
public void testSubscribe() {
	Subscriber subscriber = this.pubSubTemplate.subscribe("testSubscription",
			(message) -> { });
	assertThat(subscriber).isEqualTo(this.mockSubscriber);
	verify(this.mockSubscriber, times(1)).startAsync();
}
 
Example #15
Source File: DefaultSubscriberFactoryTests.java    From spring-cloud-gcp with Apache License 2.0 5 votes vote down vote up
@Test
public void testNewSubscriber() {
	DefaultSubscriberFactory factory = new DefaultSubscriberFactory(() -> "angeldust");
	factory.setCredentialsProvider(this.credentialsProvider);

	Subscriber subscriber = factory.createSubscriber("midnight cowboy", (message, consumer) -> { });

	assertThat(subscriber.getSubscriptionNameString())
			.isEqualTo("projects/angeldust/subscriptions/midnight cowboy");
}
 
Example #16
Source File: PubsubHelper.java    From flink with Apache License 2.0 5 votes vote down vote up
public Subscriber subscribeToSubscription(String project, String subscription, MessageReceiver messageReceiver) {
	ProjectSubscriptionName subscriptionName = ProjectSubscriptionName.of(project, subscription);
	Subscriber subscriber =
		Subscriber
			.newBuilder(subscriptionName, messageReceiver)
			.setChannelProvider(channelProvider)
			.setCredentialsProvider(NoCredentialsProvider.create())
			.build();
	subscriber.startAsync();
	return subscriber;
}
 
Example #17
Source File: CheckPubSubEmulatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testPub() throws Exception {
	List<PubsubMessage> receivedMessages = new ArrayList<>();
	Subscriber subscriber = pubsubHelper.
		subscribeToSubscription(
			PROJECT_NAME,
			SUBSCRIPTION_NAME,
			(message, consumer) -> receivedMessages.add(message)
		);

	Publisher publisher = pubsubHelper.createPublisher(PROJECT_NAME, TOPIC_NAME);
	publisher
		.publish(PubsubMessage
			.newBuilder()
			.setData(ByteString.copyFromUtf8("Hello World"))
			.build())
		.get();

	LOG.info("Waiting a while to receive the message...");

	waitUntill(() -> receivedMessages.size() > 0);

	assertEquals(1, receivedMessages.size());
	assertEquals("Hello World", receivedMessages.get(0).getData().toStringUtf8());

	try {
		subscriber.stopAsync().awaitTerminated(100, MILLISECONDS);
	} catch (TimeoutException tme) {
		// Yeah, whatever. Don't care about clean shutdown here.
	}
	publisher.shutdown();
}
 
Example #18
Source File: PubSubManager.java    From smallrye-reactive-messaging with Apache License 2.0 5 votes vote down vote up
private static Subscriber buildSubscriber(final PubSubConfig config, final PubSubMessageReceiver messageReceiver) {
    final ProjectSubscriptionName subscriptionName = ProjectSubscriptionName.of(config.getProjectId(),
            config.getSubscription());

    final Subscriber.Builder subscriberBuilder = Subscriber.newBuilder(subscriptionName, messageReceiver);

    buildCredentialsProvider(config).ifPresent(subscriberBuilder::setCredentialsProvider);
    buildTransportChannelProvider(config).ifPresent(subscriberBuilder::setChannelProvider);

    return subscriberBuilder.build();
}
 
Example #19
Source File: PubSubClient.java    From daq with Apache License 2.0 5 votes vote down vote up
public PubSubClient(String instName, String topicId) {
  try {
    ProjectTopicName projectTopicName = ProjectTopicName.of(PROJECT_ID, topicId);
    String name = String.format(SUBSCRIPTION_NAME_FORMAT, instName);
    ProjectSubscriptionName subscriptionName = ProjectSubscriptionName.of(PROJECT_ID, name);
    System.out.println("Resetting and connecting to pubsub subscription " + subscriptionName);
    resetSubscription(projectTopicName, subscriptionName);
    subscriber = Subscriber.newBuilder(subscriptionName, new MessageProcessor()).build();
    subscriber.startAsync().awaitRunning();
    active.set(true);
  } catch (Exception e) {
    throw new RuntimeException(String.format(CONNECT_ERROR_FORMAT, PROJECT_ID, topicId), e);
  }
}
 
Example #20
Source File: PerformanceBenchmark.java    From kafka-pubsub-emulator with Apache License 2.0 5 votes vote down vote up
private Subscriber getSubscriber(
    CountDownLatch publisherCountDown, CountDownLatch subscriberCountDown) {
  return Subscriber.newBuilder(
      subscription,
      (message, consumer) -> {
        consumer.ack();
        bytesReceived.add(messageSizeBytes);
        if (!receivedIds.containsKey(message.getMessageId())) {
          receivedIds.put(message.getMessageId(), 1);
        } else {
          int current = receivedIds.get(message.getMessageId());
          receivedIds.put(message.getMessageId(), ++current);
        }
        try {
          // If publishing is done but subscribing is not
          if (publisherCountDown.await(1, TimeUnit.NANOSECONDS)
              && !subscriberCountDown.await(1, TimeUnit.NANOSECONDS)
              && publishedLatencies.keySet().containsAll(receivedIds.keySet())) {
            subscriberCountDown.countDown();
          }
        } catch (InterruptedException ignored) {
        }
      })
      .setChannelProvider(getChannelProvider())
      .setCredentialsProvider(credentialsProvider)
      .build();
}
 
Example #21
Source File: PubSubExtendedBindingsPropertiesTests.java    From spring-cloud-gcp with Apache License 2.0 5 votes vote down vote up
@Bean
public PubSubTemplate pubSubTemplate() {
	PublisherFactory publisherFactory = Mockito.mock(PublisherFactory.class);

	SubscriberFactory subscriberFactory = Mockito.mock(SubscriberFactory.class);
	when(subscriberFactory.getProjectId()).thenReturn("test-project");
	when(subscriberFactory.createSubscriberStub())
			.thenReturn(Mockito.mock(SubscriberStub.class));
	when(subscriberFactory.createSubscriber(anyString(), any()))
			.thenReturn(Mockito.mock(Subscriber.class));

	return new PubSubTemplate(
			new PubSubPublisherTemplate(publisherFactory),
			new PubSubSubscriberTemplate(subscriberFactory));
}
 
Example #22
Source File: WebController.java    From spring-cloud-gcp with Apache License 2.0 5 votes vote down vote up
@GetMapping("/subscribe")
public RedirectView subscribe(@RequestParam("subscription") String subscriptionName) {
	Subscriber subscriber = this.pubSubTemplate.subscribe(subscriptionName, (message) -> {
		LOGGER.info("Message received from " + subscriptionName + " subscription: "
				+ message.getPubsubMessage().getData().toStringUtf8());
		message.ack();
	});

	this.allSubscribers.add(subscriber);
	return buildStatusView("Subscribed.");
}
 
Example #23
Source File: Pubsub.java    From gcp-ingestion with Mozilla Public License 2.0 5 votes vote down vote up
/** Constructor. */
public <T> Read(String subscriptionName, Function<PubsubMessage, CompletableFuture<T>> output,
    Function<Subscriber.Builder, Subscriber.Builder> config,
    Function<PubsubMessage, PubsubMessage> decompress) {
  ProjectSubscriptionName subscription = ProjectSubscriptionName.parse(subscriptionName);
  subscriber = config.apply(Subscriber.newBuilder(subscription,
      // Synchronous CompletableFuture methods are executed by the thread that completes the
      // future, or the current thread if the future is already complete. Use that here to
      // minimize memory usage by doing as much work as immediately possible.
      (message, consumer) -> CompletableFuture.completedFuture(message).thenApply(decompress)
          .thenCompose(output).whenComplete((result, exception) -> {
            if (exception == null) {
              consumer.ack();
            } else {
              // exception is always a CompletionException caused by another exception
              if (exception.getCause() instanceof BatchException) {
                // only log batch exception once
                ((BatchException) exception.getCause()).handle((batchExc) -> LOG.error(
                    String.format("failed to deliver %d messages", batchExc.size),
                    batchExc.getCause()));
              } else {
                // log exception specific to this message
                LOG.error("failed to deliver message", exception.getCause());
              }
              consumer.nack();
            }
          })))
      .build();
}
 
Example #24
Source File: DefaultSubscriberFactory.java    From spring-cloud-gcp with Apache License 2.0 5 votes vote down vote up
@Override
public Subscriber createSubscriber(String subscriptionName, MessageReceiver receiver) {
	Subscriber.Builder subscriberBuilder = Subscriber.newBuilder(
			PubSubSubscriptionUtils.toProjectSubscriptionName(subscriptionName, this.projectId), receiver);

	if (this.channelProvider != null) {
		subscriberBuilder.setChannelProvider(this.channelProvider);
	}

	if (this.executorProvider != null) {
		subscriberBuilder.setExecutorProvider(this.executorProvider);
	}

	if (this.credentialsProvider != null) {
		subscriberBuilder.setCredentialsProvider(this.credentialsProvider);
	}

	if (this.headerProvider != null) {
		subscriberBuilder.setHeaderProvider(this.headerProvider);
	}

	if (this.systemExecutorProvider != null) {
		subscriberBuilder.setSystemExecutorProvider(this.systemExecutorProvider);
	}

	if (this.flowControlSettings != null) {
		subscriberBuilder.setFlowControlSettings(this.flowControlSettings);
	}

	if (this.maxAckExtensionPeriod != null) {
		subscriberBuilder.setMaxAckExtensionPeriod(this.maxAckExtensionPeriod);
	}

	if (this.parallelPullCount != null) {
		subscriberBuilder.setParallelPullCount(this.parallelPullCount);
	}

	return subscriberBuilder.build();
}
 
Example #25
Source File: BaseIT.java    From kafka-pubsub-emulator with Apache License 2.0 4 votes vote down vote up
protected Subscriber getSubscriber(String subscription, MessageReceiver receiver) {
  return Subscriber.newBuilder(ProjectSubscriptionName.of(PROJECT, subscription), receiver)
      .setChannelProvider(getChannelProvider())
      .setCredentialsProvider(NO_CREDENTIALS_PROVIDER)
      .build();
}
 
Example #26
Source File: PubSubSource.java    From datacollector with Apache License 2.0 4 votes vote down vote up
@Override
public void produce(Map<String, String> lastOffsets, int maxBatchSize) throws StageException {
  SynchronousQueue<MessageReplyConsumerBundle> workQueue = new SynchronousQueue<>();

  ProjectSubscriptionName subscriptionName = ProjectSubscriptionName
      .of(
          conf.credentials.projectId,
          conf.subscriptionId
      );

  executor = Executors.newFixedThreadPool(getNumberOfThreads());

  int batchSize = Math.min(maxBatchSize, conf.basic.maxBatchSize);
  if (!getContext().isPreview() && conf.basic.maxBatchSize > maxBatchSize) {
    getContext().reportError(Errors.PUBSUB_10, maxBatchSize);
  }

  for (int i = 0; i < conf.maxThreads; i++) {
    MessageProcessor messageProcessor = new MessageProcessorImpl(
        getContext(),
        batchSize,
        conf.basic.maxWaitTime,
        parserFactory,
        workQueue
    );
    executor.submit(messageProcessor);
    messageProcessors.add(messageProcessor);
  }

  ExecutorProvider executorProvider = InstantiatingExecutorProvider.newBuilder()
      .setExecutorThreadCount(conf.advanced.numThreadsPerSubscriber)
      .build();

  InstantiatingGrpcChannelProvider channelProvider = getChannelProvider();

  FlowControlSettings flowControlSettings = getFlowControlSettings();

  for (int i = 0; i < conf.advanced.numSubscribers; i++) {
    Subscriber s = Subscriber.newBuilder(subscriptionName, new MessageReceiverImpl(workQueue))
        .setCredentialsProvider(credentialsProvider)
        .setExecutorProvider(executorProvider)
        .setChannelProvider(channelProvider)
        .setFlowControlSettings(flowControlSettings)
        .build();
    s.addListener(new Subscriber.Listener() {
      @Override
      public void failed(Subscriber.State from, Throwable failure) {
        LOG.error("Exception thrown in Subscriber: {}", failure.toString(), failure);
        LOG.error("Subscriber state: {}", from.toString());
        Throwables.propagate(failure);
      }
    }, MoreExecutors.directExecutor());
    subscribers.add(s);
  }

  try {
    subscribers.forEach(Subscriber::startAsync);
  } finally {
    LOG.info("Started {} subscribers.", conf.maxThreads);
  }

  while (!getContext().isStopped()) {
    ThreadUtil.sleep(1000);
  }
}
 
Example #27
Source File: BrokerFailureIT.java    From kafka-pubsub-emulator with Apache License 2.0 4 votes vote down vote up
/** Evaluate that a Publisher/Subscriber can survive a broker failure. */
@Test(timeout = 300000)
public void testBrokerUpDownUp() throws Exception {
  ByteString message1 = ByteString.copyFromUtf8("message-1");
  ByteString message2 = ByteString.copyFromUtf8("message-2");
  ByteString messageError = ByteString.copyFromUtf8("message-error");

  CountDownLatch publish1 = new CountDownLatch(1);
  CountDownLatch publish2 = new CountDownLatch(1);
  CountDownLatch error = new CountDownLatch(1);
  CountDownLatch receive1 = new CountDownLatch(1);
  CountDownLatch receive2 = new CountDownLatch(1);

  Set<String> publishedIds = new ConcurrentSkipListSet<>();
  Set<String> receivedIds = new ConcurrentSkipListSet<>();

  Publisher publisher = getPublisher(BROKER_FAILURE_TOPIC);
  Subscriber subscriber =
      getSubscriber(
          SUBSCRIPTION,
          (message, consumer) -> {
            consumer.ack();
            receivedIds.add(message.getMessageId());
            if (message.getData().equals(message1)) {
              receive1.countDown();
            } else if (message.getData().equals(message2)) {
              receive2.countDown();
            }
          });
  subscriber.startAsync().awaitRunning();

  // First, publish and subscribe with everything working as expected
  publish(
      publisher,
      PubsubMessage.newBuilder().setData(message1).build(),
      (throwable) -> LOGGER.warning("Unexpected error during Publish"),
      (result) -> {
        publishedIds.add(result);
        publish1.countDown();
      });

  LOGGER.info("Awaiting successful Publish/Subscribe");
  publish1.await();
  receive1.await();
  assertEquals(1, publishedIds.size());
  assertEquals(publishedIds, receivedIds);

  // Shutdown and try to publish, which should fail
  LOGGER.info("Shutting down brokers...");
  for (int i = 0; i < KAFKA_RULE.getReplicationFactor(); i++) {
    KAFKA_RULE.shutdown(i);
  }

  publish(
      publisher,
      PubsubMessage.newBuilder().setData(messageError).build(),
      (throwable) -> error.countDown(),
      (result) -> LOGGER.warning("Unexpected successful Publish"));
  LOGGER.info("Awaiting error during Publish");
  error.await();

  LOGGER.info("Restarting brokers and waiting 30s...");
  for (int i = 0; i < KAFKA_RULE.getReplicationFactor(); i++) {
    KAFKA_RULE.start(i);
  }
  // This allows Kafka time to stablize topic leadership, if not, results can be tough to validate
  Thread.sleep(30000);

  publish(
      publisher,
      PubsubMessage.newBuilder().setData(message2).build(),
      (throwable) -> LOGGER.warning("Unexpected error during Publish"),
      (result) -> {
        publishedIds.add(result);
        publish2.countDown();
      });
  LOGGER.info("Awaiting successful Publish/Subscribe after restart");
  publish2.await();
  receive2.await();
  assertEquals(2, publishedIds.size());
  assertEquals(publishedIds, receivedIds);
  subscriber.stopAsync().awaitTerminated();
}
 
Example #28
Source File: RiskAnalysisNumericalStats.java    From java-docs-samples with Apache License 2.0 4 votes vote down vote up
public static void numericalStatsAnalysis(
    String projectId, String datasetId, String tableId, String topicId, String subscriptionId)
    throws ExecutionException, InterruptedException, IOException {

  // Initialize client that will be used to send requests. This client only needs to be created
  // once, and can be reused for multiple requests. After completing all of your requests, call
  // the "close" method on the client to safely clean up any remaining background resources.
  try (DlpServiceClient dlpServiceClient = DlpServiceClient.create()) {

    // Specify the BigQuery table to analyze
    BigQueryTable bigQueryTable =
        BigQueryTable.newBuilder()
            .setTableId(tableId)
            .setDatasetId(datasetId)
            .setProjectId(projectId)
            .build();

    // This represents the name of the column to analyze, which must contain numerical data
    String columnName = "Age";

    // Configure the privacy metric for the job
    FieldId fieldId = FieldId.newBuilder().setName(columnName).build();
    NumericalStatsConfig numericalStatsConfig =
        NumericalStatsConfig.newBuilder().setField(fieldId).build();
    PrivacyMetric privacyMetric =
        PrivacyMetric.newBuilder().setNumericalStatsConfig(numericalStatsConfig).build();

    // Create action to publish job status notifications over Google Cloud Pub/Sub
    ProjectTopicName topicName = ProjectTopicName.of(projectId, topicId);
    PublishToPubSub publishToPubSub =
        PublishToPubSub.newBuilder().setTopic(topicName.toString()).build();
    Action action = Action.newBuilder().setPubSub(publishToPubSub).build();

    // Configure the risk analysis job to perform
    RiskAnalysisJobConfig riskAnalysisJobConfig =
        RiskAnalysisJobConfig.newBuilder()
            .setSourceTable(bigQueryTable)
            .setPrivacyMetric(privacyMetric)
            .addActions(action)
            .build();

    CreateDlpJobRequest createDlpJobRequest =
        CreateDlpJobRequest.newBuilder()
            .setParent(LocationName.of(projectId, "global").toString())
            .setRiskJob(riskAnalysisJobConfig)
            .build();

    // Send the request to the API using the client
    DlpJob dlpJob = dlpServiceClient.createDlpJob(createDlpJobRequest);

    // Set up a Pub/Sub subscriber to listen on the job completion status
    final SettableApiFuture<Boolean> done = SettableApiFuture.create();

    ProjectSubscriptionName subscriptionName =
        ProjectSubscriptionName.of(projectId, subscriptionId);

    MessageReceiver messageHandler =
        (PubsubMessage pubsubMessage, AckReplyConsumer ackReplyConsumer) -> {
          handleMessage(dlpJob, done, pubsubMessage, ackReplyConsumer);
        };
    Subscriber subscriber = Subscriber.newBuilder(subscriptionName, messageHandler).build();
    subscriber.startAsync();

    // Wait for job completion semi-synchronously
    // For long jobs, consider using a truly asynchronous execution model such as Cloud Functions
    try {
      done.get(15, TimeUnit.MINUTES);
    } catch (TimeoutException e) {
      System.out.println("Job was not completed after 15 minutes.");
      return;
    } finally {
      subscriber.stopAsync();
      subscriber.awaitTerminated();
    }

    // Build a request to get the completed job
    GetDlpJobRequest getDlpJobRequest =
        GetDlpJobRequest.newBuilder().setName(dlpJob.getName()).build();

    // Retrieve completed job status
    DlpJob completedJob = dlpServiceClient.getDlpJob(getDlpJobRequest);
    System.out.println("Job status: " + completedJob.getState());

    // Get the result and parse through and process the information
    NumericalStatsResult result = completedJob.getRiskDetails().getNumericalStatsResult();

    System.out.printf(
        "Value range : [%.3f, %.3f]\n",
        result.getMinValue().getFloatValue(), result.getMaxValue().getFloatValue());

    int percent = 1;
    Double lastValue = null;
    for (Value quantileValue : result.getQuantileValuesList()) {
      Double currentValue = quantileValue.getFloatValue();
      if (lastValue == null || !lastValue.equals(currentValue)) {
        System.out.printf("Value at %s %% quantile : %.3f", percent, currentValue);
      }
      lastValue = currentValue;
    }
  }
}
 
Example #29
Source File: InspectGcsFileWithSampling.java    From java-docs-samples with Apache License 2.0 4 votes vote down vote up
public static void inspectGcsFileWithSampling(
    String projectId, String gcsUri, String topicId, String subscriptionId)
    throws ExecutionException, InterruptedException, IOException {
  // Initialize client that will be used to send requests. This client only needs to be created
  // once, and can be reused for multiple requests. After completing all of your requests, call
  // the "close" method on the client to safely clean up any remaining background resources.
  try (DlpServiceClient dlp = DlpServiceClient.create()) {
    // Specify the GCS file to be inspected and sampling configuration
    CloudStorageOptions cloudStorageOptions =
        CloudStorageOptions.newBuilder()
            .setFileSet(FileSet.newBuilder().setUrl(gcsUri))
            .setBytesLimitPerFile(200)
            .addFileTypes(FileType.TEXT_FILE)
            .setFilesLimitPercent(90)
            .setSampleMethod(SampleMethod.RANDOM_START)
            .build();

    StorageConfig storageConfig =
        StorageConfig.newBuilder().setCloudStorageOptions(cloudStorageOptions).build();

    // Specify the type of info the inspection will look for.
    // See https://cloud.google.com/dlp/docs/infotypes-reference for complete list of info types
    InfoType infoType = InfoType.newBuilder().setName("PERSON_NAME").build();

    // Specify how the content should be inspected.
    InspectConfig inspectConfig =
        InspectConfig.newBuilder()
            .addInfoTypes(infoType)
            .setExcludeInfoTypes(true)
            .setIncludeQuote(true)
            .setMinLikelihood(Likelihood.POSSIBLE)
            .build();

    // Specify the action that is triggered when the job completes.
    String pubSubTopic = String.format("projects/%s/topics/%s", projectId, topicId);
    Action.PublishToPubSub publishToPubSub =
        Action.PublishToPubSub.newBuilder().setTopic(pubSubTopic).build();
    Action action = Action.newBuilder().setPubSub(publishToPubSub).build();

    // Configure the long running job we want the service to perform.
    InspectJobConfig inspectJobConfig =
        InspectJobConfig.newBuilder()
            .setStorageConfig(storageConfig)
            .setInspectConfig(inspectConfig)
            .addActions(action)
            .build();

    // Create the request for the job configured above.
    CreateDlpJobRequest createDlpJobRequest =
        CreateDlpJobRequest.newBuilder()
            .setParent(LocationName.of(projectId, "global").toString())
            .setInspectJob(inspectJobConfig)
            .build();

    // Use the client to send the request.
    final DlpJob dlpJob = dlp.createDlpJob(createDlpJobRequest);
    System.out.println("Job created: " + dlpJob.getName());

    // Set up a Pub/Sub subscriber to listen on the job completion status
    final SettableApiFuture<Boolean> done = SettableApiFuture.create();

    ProjectSubscriptionName subscriptionName =
        ProjectSubscriptionName.of(projectId, subscriptionId);

    MessageReceiver messageHandler =
        (PubsubMessage pubsubMessage, AckReplyConsumer ackReplyConsumer) -> {
          handleMessage(dlpJob, done, pubsubMessage, ackReplyConsumer);
        };
    Subscriber subscriber = Subscriber.newBuilder(subscriptionName, messageHandler).build();
    subscriber.startAsync();

    // Wait for job completion semi-synchronously
    // For long jobs, consider using a truly asynchronous execution model such as Cloud Functions
    try {
      done.get(15, TimeUnit.MINUTES);
    } catch (TimeoutException e) {
      System.out.println("Job was not completed after 15 minutes.");
      return;
    } finally {
      subscriber.stopAsync();
      subscriber.awaitTerminated();
    }

    // Get the latest state of the job from the service
    GetDlpJobRequest request = GetDlpJobRequest.newBuilder().setName(dlpJob.getName()).build();
    DlpJob completedJob = dlp.getDlpJob(request);

    // Parse the response and process results.
    System.out.println("Job status: " + completedJob.getState());
    InspectDataSourceDetails.Result result = completedJob.getInspectDetails().getResult();
    System.out.println("Findings: ");
    for (InfoTypeStats infoTypeStat : result.getInfoTypeStatsList()) {
      System.out.print("\tInfo type: " + infoTypeStat.getInfoType().getName());
      System.out.println("\tCount: " + infoTypeStat.getCount());
    }
  }
}
 
Example #30
Source File: InspectBigQueryTableWithSampling.java    From java-docs-samples with Apache License 2.0 4 votes vote down vote up
public static void inspectBigQueryTableWithSampling(
    String projectId, String topicId, String subscriptionId)
    throws ExecutionException, InterruptedException, IOException {
  // Initialize client that will be used to send requests. This client only needs to be created
  // once, and can be reused for multiple requests. After completing all of your requests, call
  // the "close" method on the client to safely clean up any remaining background resources.
  try (DlpServiceClient dlp = DlpServiceClient.create()) {
    // Specify the BigQuery table to be inspected.
    BigQueryTable tableReference =
        BigQueryTable.newBuilder()
            .setProjectId("bigquery-public-data")
            .setDatasetId("usa_names")
            .setTableId("usa_1910_current")
            .build();

    BigQueryOptions bigQueryOptions =
        BigQueryOptions.newBuilder()
            .setTableReference(tableReference)
            .setRowsLimit(1000)
            .setSampleMethod(SampleMethod.RANDOM_START)
            .addIdentifyingFields(FieldId.newBuilder().setName("name"))
            .build();

    StorageConfig storageConfig =
        StorageConfig.newBuilder().setBigQueryOptions(bigQueryOptions).build();

    // Specify the type of info the inspection will look for.
    // See https://cloud.google.com/dlp/docs/infotypes-reference for complete list of info types
    InfoType infoType = InfoType.newBuilder().setName("PERSON_NAME").build();

    // Specify how the content should be inspected.
    InspectConfig inspectConfig =
        InspectConfig.newBuilder()
            .addInfoTypes(infoType)
            .setIncludeQuote(true)
            .build();

    // Specify the action that is triggered when the job completes.
    String pubSubTopic = String.format("projects/%s/topics/%s", projectId, topicId);
    Action.PublishToPubSub publishToPubSub =
        Action.PublishToPubSub.newBuilder().setTopic(pubSubTopic).build();
    Action action = Action.newBuilder().setPubSub(publishToPubSub).build();

    // Configure the long running job we want the service to perform.
    InspectJobConfig inspectJobConfig =
        InspectJobConfig.newBuilder()
            .setStorageConfig(storageConfig)
            .setInspectConfig(inspectConfig)
            .addActions(action)
            .build();

    // Create the request for the job configured above.
    CreateDlpJobRequest createDlpJobRequest =
        CreateDlpJobRequest.newBuilder()
            .setParent(LocationName.of(projectId, "global").toString())
            .setInspectJob(inspectJobConfig)
            .build();

    // Use the client to send the request.
    final DlpJob dlpJob = dlp.createDlpJob(createDlpJobRequest);
    System.out.println("Job created: " + dlpJob.getName());

    // Set up a Pub/Sub subscriber to listen on the job completion status
    final SettableApiFuture<Boolean> done = SettableApiFuture.create();

    ProjectSubscriptionName subscriptionName =
        ProjectSubscriptionName.of(projectId, subscriptionId);

    MessageReceiver messageHandler =
        (PubsubMessage pubsubMessage, AckReplyConsumer ackReplyConsumer) -> {
          handleMessage(dlpJob, done, pubsubMessage, ackReplyConsumer);
        };
    Subscriber subscriber = Subscriber.newBuilder(subscriptionName, messageHandler).build();
    subscriber.startAsync();

    // Wait for job completion semi-synchronously
    // For long jobs, consider using a truly asynchronous execution model such as Cloud Functions
    try {
      done.get(15, TimeUnit.MINUTES);
    } catch (TimeoutException e) {
      System.out.println("Job was not completed after 15 minutes.");
      return;
    } finally {
      subscriber.stopAsync();
      subscriber.awaitTerminated();
    }

    // Get the latest state of the job from the service
    GetDlpJobRequest request = GetDlpJobRequest.newBuilder().setName(dlpJob.getName()).build();
    DlpJob completedJob = dlp.getDlpJob(request);

    // Parse the response and process results.
    System.out.println("Job status: " + completedJob.getState());
    InspectDataSourceDetails.Result result = completedJob.getInspectDetails().getResult();
    System.out.println("Findings: ");
    for (InfoTypeStats infoTypeStat : result.getInfoTypeStatsList()) {
      System.out.print("\tInfo type: " + infoTypeStat.getInfoType().getName());
      System.out.println("\tCount: " + infoTypeStat.getCount());
    }
  }
}