Java Code Examples for com.amazonaws.services.kinesis.AmazonKinesis

The following examples show how to use com.amazonaws.services.kinesis.AmazonKinesis. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: camel-kafka-connector   Source File: AWSServiceFactory.java    License: Apache License 2.0 6 votes vote down vote up
public static AWSService<AmazonKinesis> createKinesisService() {
    String awsInstanceType = System.getProperty("aws-service.kinesis.instance.type");
    LOG.info("Creating a {} AWS kinesis instance", getInstanceTypeName(awsInstanceType));

    if (awsInstanceType == null || awsInstanceType.equals("local-aws-container")) {
        return new AWSKinesisLocalContainerService();
    }

    if (awsInstanceType.equals("remote")) {
        return new AWSRemoteService<>(AWSClientUtils::newKinesisClient);
    }

    LOG.error("Invalid AWS instance type: {}. Must be either 'remote' or 'local-aws-container'",
            awsInstanceType);
    throw new UnsupportedOperationException("Invalid AWS instance type");
}
 
Example 2
Source Project: Flink-CEPplus   Source File: DynamoDBStreamsProxy.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Creates an AmazonDynamoDBStreamsAdapterClient.
 * Uses it as the internal client interacting with the DynamoDB streams.
 *
 * @param configProps configuration properties
 * @return an AWS DynamoDB streams adapter client
 */
@Override
protected AmazonKinesis createKinesisClient(Properties configProps) {
	ClientConfiguration awsClientConfig = new ClientConfigurationFactory().getConfig();
	setAwsClientConfigProperties(awsClientConfig, configProps);

	AWSCredentialsProvider credentials = getCredentialsProvider(configProps);
	awsClientConfig.setUserAgentPrefix(
			String.format(
					USER_AGENT_FORMAT,
					EnvironmentInformation.getVersion(),
					EnvironmentInformation.getRevisionInformation().commitId));

	AmazonDynamoDBStreamsAdapterClient adapterClient =
			new AmazonDynamoDBStreamsAdapterClient(credentials, awsClientConfig);

	if (configProps.containsKey(AWS_ENDPOINT)) {
		adapterClient.setEndpoint(configProps.getProperty(AWS_ENDPOINT));
	} else {
		adapterClient.setRegion(Region.getRegion(
				Regions.fromName(configProps.getProperty(AWS_REGION))));
	}

	return adapterClient;
}
 
Example 3
Source Project: Flink-CEPplus   Source File: AWSUtil.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Creates an Amazon Kinesis Client.
 * @param configProps configuration properties containing the access key, secret key, and region
 * @param awsClientConfig preconfigured AWS SDK client configuration
 * @return a new Amazon Kinesis Client
 */
public static AmazonKinesis createKinesisClient(Properties configProps, ClientConfiguration awsClientConfig) {
	// set a Flink-specific user agent
	awsClientConfig.setUserAgentPrefix(String.format(USER_AGENT_FORMAT,
			EnvironmentInformation.getVersion(),
			EnvironmentInformation.getRevisionInformation().commitId));

	// utilize automatic refreshment of credentials by directly passing the AWSCredentialsProvider
	AmazonKinesisClientBuilder builder = AmazonKinesisClientBuilder.standard()
			.withCredentials(AWSUtil.getCredentialsProvider(configProps))
			.withClientConfiguration(awsClientConfig);

	if (configProps.containsKey(AWSConfigConstants.AWS_ENDPOINT)) {
		// Set signingRegion as null, to facilitate mocking Kinesis for local tests
		builder.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(
												configProps.getProperty(AWSConfigConstants.AWS_ENDPOINT),
												null));
	} else {
		builder.withRegion(Regions.fromName(configProps.getProperty(AWSConfigConstants.AWS_REGION)));
	}
	return builder.build();
}
 
Example 4
Source Project: Flink-CEPplus   Source File: KinesisProxyTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testGetShardWithNoNewShards() throws Exception {
	// given
	String fakeStreamName = "fake-stream";

	AmazonKinesis mockClient = mock(AmazonKinesis.class);
	KinesisProxy kinesisProxy = getProxy(mockClient);

	Mockito.when(mockClient.listShards(
		new ListShardsRequest()
		.withStreamName(fakeStreamName)
		.withExclusiveStartShardId(KinesisShardIdGenerator.generateFromShardOrder(1))
	)).thenReturn(new ListShardsResult().withShards(Collections.emptyList()));

	HashMap<String, String> streamHashMap = new HashMap<>();
	streamHashMap.put(fakeStreamName, KinesisShardIdGenerator.generateFromShardOrder(1));

	// when
	GetShardListResult shardListResult = kinesisProxy.getShardList(streamHashMap);

	// then
	Assert.assertFalse(shardListResult.hasRetrievedShards());
}
 
Example 5
Source Project: flink   Source File: DynamoDBStreamsProxy.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Creates an AmazonDynamoDBStreamsAdapterClient.
 * Uses it as the internal client interacting with the DynamoDB streams.
 *
 * @param configProps configuration properties
 * @return an AWS DynamoDB streams adapter client
 */
@Override
protected AmazonKinesis createKinesisClient(Properties configProps) {
	ClientConfiguration awsClientConfig = new ClientConfigurationFactory().getConfig();
	setAwsClientConfigProperties(awsClientConfig, configProps);

	AWSCredentialsProvider credentials = getCredentialsProvider(configProps);
	awsClientConfig.setUserAgentPrefix(
			String.format(
					USER_AGENT_FORMAT,
					EnvironmentInformation.getVersion(),
					EnvironmentInformation.getRevisionInformation().commitId));

	AmazonDynamoDBStreamsAdapterClient adapterClient =
			new AmazonDynamoDBStreamsAdapterClient(credentials, awsClientConfig);

	if (configProps.containsKey(AWS_ENDPOINT)) {
		adapterClient.setEndpoint(configProps.getProperty(AWS_ENDPOINT));
	} else {
		adapterClient.setRegion(Region.getRegion(
				Regions.fromName(configProps.getProperty(AWS_REGION))));
	}

	return adapterClient;
}
 
Example 6
Source Project: flink   Source File: AWSUtil.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Creates an Amazon Kinesis Client.
 * @param configProps configuration properties containing the access key, secret key, and region
 * @param awsClientConfig preconfigured AWS SDK client configuration
 * @return a new Amazon Kinesis Client
 */
public static AmazonKinesis createKinesisClient(Properties configProps, ClientConfiguration awsClientConfig) {
	// set a Flink-specific user agent
	awsClientConfig.setUserAgentPrefix(String.format(USER_AGENT_FORMAT,
			EnvironmentInformation.getVersion(),
			EnvironmentInformation.getRevisionInformation().commitId));

	// utilize automatic refreshment of credentials by directly passing the AWSCredentialsProvider
	AmazonKinesisClientBuilder builder = AmazonKinesisClientBuilder.standard()
			.withCredentials(AWSUtil.getCredentialsProvider(configProps))
			.withClientConfiguration(awsClientConfig);

	if (configProps.containsKey(AWSConfigConstants.AWS_ENDPOINT)) {
		// Set signingRegion as null, to facilitate mocking Kinesis for local tests
		builder.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(
												configProps.getProperty(AWSConfigConstants.AWS_ENDPOINT),
												null));
	} else {
		builder.withRegion(Regions.fromName(configProps.getProperty(AWSConfigConstants.AWS_REGION)));
	}
	return builder.build();
}
 
Example 7
Source Project: flink   Source File: KinesisProxyTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testGetShardWithNoNewShards() throws Exception {
	// given
	String fakeStreamName = "fake-stream";

	AmazonKinesis mockClient = mock(AmazonKinesis.class);
	KinesisProxy kinesisProxy = getProxy(mockClient);

	Mockito.when(mockClient.listShards(
		new ListShardsRequest()
		.withStreamName(fakeStreamName)
		.withExclusiveStartShardId(KinesisShardIdGenerator.generateFromShardOrder(1))
	)).thenReturn(new ListShardsResult().withShards(Collections.emptyList()));

	HashMap<String, String> streamHashMap = new HashMap<>();
	streamHashMap.put(fakeStreamName, KinesisShardIdGenerator.generateFromShardOrder(1));

	// when
	GetShardListResult shardListResult = kinesisProxy.getShardList(streamHashMap);

	// then
	Assert.assertFalse(shardListResult.hasRetrievedShards());
}
 
Example 8
@Test
public void testKinesis() throws Exception {
    AmazonKinesis kinesis = amazonDockerClientsHolder.amazonKinesis();

    ListStreamsResult streamsResult = kinesis.listStreams();
    assertThat(streamsResult.getStreamNames().size(), is(0));

    CreateStreamRequest createStreamRequest = new CreateStreamRequest()
        .withStreamName("test-stream")
        .withShardCount(2);

    kinesis.createStream(createStreamRequest);

    streamsResult = kinesis.listStreams();
    assertThat(streamsResult.getStreamNames(), hasItem("test-stream"));
}
 
Example 9
@Test
void testProvisionProducerSuccessfulWithExistingStream() {
	AmazonKinesis amazonKinesisMock = mock(AmazonKinesis.class);
	KinesisBinderConfigurationProperties binderProperties = new KinesisBinderConfigurationProperties();
	KinesisStreamProvisioner provisioner = new KinesisStreamProvisioner(
			amazonKinesisMock, binderProperties);
	ExtendedProducerProperties<KinesisProducerProperties> extendedProducerProperties =
			new ExtendedProducerProperties<>(new KinesisProducerProperties());
	String name = "test-stream";

	DescribeStreamResult describeStreamResult = describeStreamResultWithShards(
			Collections.singletonList(new Shard()));

	when(amazonKinesisMock.describeStream(any(DescribeStreamRequest.class)))
			.thenReturn(describeStreamResult);

	ProducerDestination destination = provisioner.provisionProducerDestination(name,
			extendedProducerProperties);

	verify(amazonKinesisMock).describeStream(any(DescribeStreamRequest.class));

	assertThat(destination.getName()).isEqualTo(name);
}
 
Example 10
/**
 * Use the callback mechanism and a lambda function to send aggregated
 * records to Kinesis.
 */
private static void sendViaCallback(AmazonKinesis producer, String streamName, RecordAggregator aggregator) {
	// add a lambda callback to be called when a full record is ready to
	// transmit
	aggregator.onRecordComplete((aggRecord) -> {
		sendRecord(producer, streamName, aggRecord);
	});

	System.out.println("Creating " + ProducerConfig.RECORDS_TO_TRANSMIT + " records...");
	for (int i = 1; i <= ProducerConfig.RECORDS_TO_TRANSMIT; i++) {
		String pk = ProducerUtils.randomPartitionKey();
		String ehk = ProducerUtils.randomExplicitHashKey();
		byte[] data = ProducerUtils.randomData(i, ProducerConfig.RECORD_SIZE_BYTES);
		try {
               aggregator.addUserRecord(pk, ehk, data);
           }
           catch (Exception e) {
               e.printStackTrace();
               System.err.println("Failed to add user record: " + e.getMessage());
           }
	}

	flushAndFinish(producer, streamName, aggregator);
}
 
Example 11
Source Project: datacollector   Source File: KinesisUtil.java    License: Apache License 2.0 6 votes vote down vote up
public static List<com.amazonaws.services.kinesis.model.Record> getPreviewRecords(
    ClientConfiguration awsClientConfig,
    KinesisConfigBean conf,
    int maxBatchSize,
    GetShardIteratorRequest getShardIteratorRequest
) throws StageException {
  AmazonKinesis kinesisClient = getKinesisClient(awsClientConfig, conf);

  GetShardIteratorResult getShardIteratorResult = kinesisClient.getShardIterator(getShardIteratorRequest);
  String shardIterator = getShardIteratorResult.getShardIterator();

  GetRecordsRequest getRecordsRequest = new GetRecordsRequest();
  getRecordsRequest.setShardIterator(shardIterator);
  getRecordsRequest.setLimit(maxBatchSize);

  GetRecordsResult getRecordsResult = kinesisClient.getRecords(getRecordsRequest);
  return getRecordsResult.getRecords();
}
 
Example 12
Source Project: flink   Source File: DynamoDBStreamsProxy.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Creates an AmazonDynamoDBStreamsAdapterClient.
 * Uses it as the internal client interacting with the DynamoDB streams.
 *
 * @param configProps configuration properties
 * @return an AWS DynamoDB streams adapter client
 */
@Override
protected AmazonKinesis createKinesisClient(Properties configProps) {
	ClientConfiguration awsClientConfig = new ClientConfigurationFactory().getConfig();
	setAwsClientConfigProperties(awsClientConfig, configProps);

	AWSCredentialsProvider credentials = getCredentialsProvider(configProps);
	awsClientConfig.setUserAgentPrefix(
			String.format(
					USER_AGENT_FORMAT,
					EnvironmentInformation.getVersion(),
					EnvironmentInformation.getRevisionInformation().commitId));

	AmazonDynamoDBStreamsAdapterClient adapterClient =
			new AmazonDynamoDBStreamsAdapterClient(credentials, awsClientConfig);

	if (configProps.containsKey(AWS_ENDPOINT)) {
		adapterClient.setEndpoint(configProps.getProperty(AWS_ENDPOINT));
	} else {
		adapterClient.setRegion(Region.getRegion(
				Regions.fromName(configProps.getProperty(AWS_REGION))));
	}

	return adapterClient;
}
 
Example 13
Source Project: flink   Source File: AWSUtil.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Creates an Amazon Kinesis Client.
 * @param configProps configuration properties containing the access key, secret key, and region
 * @param awsClientConfig preconfigured AWS SDK client configuration
 * @return a new Amazon Kinesis Client
 */
public static AmazonKinesis createKinesisClient(Properties configProps, ClientConfiguration awsClientConfig) {
	// set a Flink-specific user agent
	awsClientConfig.setUserAgentPrefix(String.format(USER_AGENT_FORMAT,
			EnvironmentInformation.getVersion(),
			EnvironmentInformation.getRevisionInformation().commitId));

	// utilize automatic refreshment of credentials by directly passing the AWSCredentialsProvider
	AmazonKinesisClientBuilder builder = AmazonKinesisClientBuilder.standard()
			.withCredentials(AWSUtil.getCredentialsProvider(configProps))
			.withClientConfiguration(awsClientConfig);

	if (configProps.containsKey(AWSConfigConstants.AWS_ENDPOINT)) {
		// If an endpoint is specified, we give preference to using an endpoint and use the region property to
		// sign the request.
		builder.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(
			configProps.getProperty(AWSConfigConstants.AWS_ENDPOINT),
			configProps.getProperty(AWSConfigConstants.AWS_REGION)));
	} else {
		builder.withRegion(Regions.fromName(configProps.getProperty(AWSConfigConstants.AWS_REGION)));
	}
	return builder.build();
}
 
Example 14
Source Project: flink   Source File: KinesisProxyTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testGetShardWithNoNewShards() throws Exception {
	// given
	String fakeStreamName = "fake-stream";

	AmazonKinesis mockClient = mock(AmazonKinesis.class);
	KinesisProxy kinesisProxy = getProxy(mockClient);

	Mockito.when(mockClient.listShards(
		new ListShardsRequest()
		.withStreamName(fakeStreamName)
		.withExclusiveStartShardId(KinesisShardIdGenerator.generateFromShardOrder(1))
	)).thenReturn(new ListShardsResult().withShards(Collections.emptyList()));

	HashMap<String, String> streamHashMap = new HashMap<>();
	streamHashMap.put(fakeStreamName, KinesisShardIdGenerator.generateFromShardOrder(1));

	// when
	GetShardListResult shardListResult = kinesisProxy.getShardList(streamHashMap);

	// then
	Assert.assertFalse(shardListResult.hasRetrievedShards());
}
 
Example 15
Source Project: flink   Source File: KinesisPubsubClient.java    License: Apache License 2.0 5 votes vote down vote up
private static AmazonKinesis createClientWithCredentials(Properties props) throws AmazonClientException {
	AWSCredentialsProvider credentialsProvider = new EnvironmentVariableCredentialsProvider();
	return AmazonKinesisClientBuilder.standard()
		.withCredentials(credentialsProvider)
		.withEndpointConfiguration(
			new AwsClientBuilder.EndpointConfiguration(
				props.getProperty(ConsumerConfigConstants.AWS_ENDPOINT), "us-east-1"))
		.build();
}
 
Example 16
@Override
public AmazonKinesis getAmazonKinesisClient() {
    if (amazonKinesis == null) {
        amazonKinesis = buildClient();
    }

    return amazonKinesis;
}
 
Example 17
@Override
public AmazonKinesis getClient() {
    ClientConfiguration clientConfiguration = new ClientConfiguration();
    clientConfiguration.setProtocol(Protocol.HTTP);

    return AmazonKinesisClientBuilder
            .standard()
            .withEndpointConfiguration(getContainer().getEndpointConfiguration(LocalStackContainer.Service.KINESIS))
            .withCredentials(getContainer().getDefaultCredentialsProvider())
            .withClientConfiguration(clientConfiguration)
            .build();
}
 
Example 18
Source Project: camel-kafka-connector   Source File: AWSClientUtils.java    License: Apache License 2.0 5 votes vote down vote up
public static AmazonKinesis newKinesisClient() {
    LOG.debug("Creating a new AWS Kinesis client");
    AmazonKinesisClientBuilder clientBuilder = AmazonKinesisClientBuilder.standard();

    String awsInstanceType = System.getProperty("aws-service.kinesis.instance.type");
    String region = getRegion();

    if (awsInstanceType == null || awsInstanceType.equals("local-aws-container")) {
        String amazonHost = System.getProperty(AWSConfigs.AMAZON_AWS_HOST);

        LOG.debug("Creating a new AWS Kinesis client to access {}", amazonHost);

        ClientConfiguration clientConfiguration = new ClientConfiguration();
        clientConfiguration.setProtocol(Protocol.HTTP);

        clientBuilder
                .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(amazonHost, region))
                .withClientConfiguration(clientConfiguration)
                .withCredentials(new TestAWSCredentialsProvider("accesskey", "secretkey"));
    } else {
        clientBuilder
            .withRegion(region)
            .withCredentials(new TestAWSCredentialsProvider());
    }

    return clientBuilder.build();
}
 
Example 19
Source Project: Flink-CEPplus   Source File: KinesisProxy.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Create the Kinesis client, using the provided configuration properties and default {@link ClientConfiguration}.
 * Derived classes can override this method to customize the client configuration.
 * @param configProps
 * @return
 */
protected AmazonKinesis createKinesisClient(Properties configProps) {

	ClientConfiguration awsClientConfig = new ClientConfigurationFactory().getConfig();
	AWSUtil.setAwsClientConfigProperties(awsClientConfig, configProps);
	return AWSUtil.createKinesisClient(configProps, awsClientConfig);
}
 
Example 20
Source Project: Flink-CEPplus   Source File: KinesisProxyTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testClientConfigOverride() {

	Properties configProps = new Properties();
	configProps.setProperty(AWSConfigConstants.AWS_REGION, "us-east-1");
	configProps.setProperty(AWSUtil.AWS_CLIENT_CONFIG_PREFIX + "socketTimeout", "9999");

	KinesisProxyInterface proxy = KinesisProxy.create(configProps);

	AmazonKinesis kinesisClient = Whitebox.getInternalState(proxy, "kinesisClient");
	ClientConfiguration clientConfiguration = Whitebox.getInternalState(kinesisClient,
		"clientConfiguration");
	assertEquals(9999, clientConfiguration.getSocketTimeout());
}
 
Example 21
Source Project: Flink-CEPplus   Source File: KinesisProxyTest.java    License: Apache License 2.0 5 votes vote down vote up
private KinesisProxy getProxy(AmazonKinesis awsKinesis) {
	Properties kinesisConsumerConfig = new Properties();
	kinesisConsumerConfig.setProperty(ConsumerConfigConstants.AWS_REGION, "us-east-1");
	kinesisConsumerConfig.setProperty(ConsumerConfigConstants.AWS_ACCESS_KEY_ID, "fake_accesskey");
	kinesisConsumerConfig.setProperty(
		ConsumerConfigConstants.AWS_SECRET_ACCESS_KEY, "fake_secretkey");
	KinesisProxy kinesisProxy = new KinesisProxy(kinesisConsumerConfig);
	Whitebox.setInternalState(kinesisProxy, "kinesisClient", awsKinesis);

	return kinesisProxy;
}
 
Example 22
Source Project: flink   Source File: KinesisProxy.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Create the Kinesis client, using the provided configuration properties and default {@link ClientConfiguration}.
 * Derived classes can override this method to customize the client configuration.
 * @param configProps
 * @return
 */
protected AmazonKinesis createKinesisClient(Properties configProps) {

	ClientConfiguration awsClientConfig = new ClientConfigurationFactory().getConfig();
	AWSUtil.setAwsClientConfigProperties(awsClientConfig, configProps);
	return AWSUtil.createKinesisClient(configProps, awsClientConfig);
}
 
Example 23
Source Project: flink   Source File: KinesisProxyTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testClientConfigOverride() {

	Properties configProps = new Properties();
	configProps.setProperty(AWSConfigConstants.AWS_REGION, "us-east-1");
	configProps.setProperty(AWSUtil.AWS_CLIENT_CONFIG_PREFIX + "socketTimeout", "9999");

	KinesisProxyInterface proxy = KinesisProxy.create(configProps);

	AmazonKinesis kinesisClient = Whitebox.getInternalState(proxy, "kinesisClient");
	ClientConfiguration clientConfiguration = Whitebox.getInternalState(kinesisClient,
		"clientConfiguration");
	assertEquals(9999, clientConfiguration.getSocketTimeout());
}
 
Example 24
Source Project: flink   Source File: KinesisProxyTest.java    License: Apache License 2.0 5 votes vote down vote up
private KinesisProxy getProxy(AmazonKinesis awsKinesis) {
	Properties kinesisConsumerConfig = new Properties();
	kinesisConsumerConfig.setProperty(ConsumerConfigConstants.AWS_REGION, "us-east-1");
	kinesisConsumerConfig.setProperty(ConsumerConfigConstants.AWS_ACCESS_KEY_ID, "fake_accesskey");
	kinesisConsumerConfig.setProperty(
		ConsumerConfigConstants.AWS_SECRET_ACCESS_KEY, "fake_secretkey");
	KinesisProxy kinesisProxy = new KinesisProxy(kinesisConsumerConfig);
	Whitebox.setInternalState(kinesisProxy, "kinesisClient", awsKinesis);

	return kinesisProxy;
}
 
Example 25
Source Project: flink   Source File: KinesisProxyTest.java    License: Apache License 2.0 5 votes vote down vote up
private KinesisProxy getProxy(AmazonKinesis awsKinesis) {
	Properties kinesisConsumerConfig = new Properties();
	kinesisConsumerConfig.setProperty(ConsumerConfigConstants.AWS_REGION, "us-east-1");
	kinesisConsumerConfig.setProperty(ConsumerConfigConstants.AWS_ACCESS_KEY_ID, "fake_accesskey");
	kinesisConsumerConfig.setProperty(
		ConsumerConfigConstants.AWS_SECRET_ACCESS_KEY, "fake_secretkey");
	KinesisProxy kinesisProxy = new KinesisProxy(kinesisConsumerConfig);
	Whitebox.setInternalState(kinesisProxy, "kinesisClient", awsKinesis);

	return kinesisProxy;
}
 
Example 26
@Override
public AmazonKinesis amazonKinesis() {
    return decorateWithConfigsAndBuild(
        AmazonKinesisClientBuilder.standard(),
        LocalstackDocker::getEndpointKinesis
    );
}
 
Example 27
Source Project: pitchfork   Source File: HaystackKinesisForwarderTest.java    License: Apache License 2.0 5 votes vote down vote up
private static AmazonKinesis setupKinesisClient() {
    var endpointConfiguration = new AwsClientBuilder.EndpointConfiguration(KINESIS_SERVICE_ENDPOINT, "us-west-1");

    return AmazonKinesisClientBuilder.standard()
            .withCredentials(kinesisContainer.getDefaultCredentialsProvider())
            .withEndpointConfiguration(endpointConfiguration)
            .build();
}
 
Example 28
public KinesisStreamProvisioner(AmazonKinesis amazonKinesis,
		KinesisBinderConfigurationProperties kinesisBinderConfigurationProperties) {

	Assert.notNull(amazonKinesis, "'amazonKinesis' must not be null");
	Assert.notNull(kinesisBinderConfigurationProperties,
			"'kinesisBinderConfigurationProperties' must not be null");
	this.amazonKinesis = amazonKinesis;
	this.configurationProperties = kinesisBinderConfigurationProperties;
}
 
Example 29
@Test
void testProvisionConsumerSuccessfulWithExistingStream() {
	AmazonKinesis amazonKinesisMock = mock(AmazonKinesis.class);
	KinesisBinderConfigurationProperties binderProperties = new KinesisBinderConfigurationProperties();
	KinesisStreamProvisioner provisioner = new KinesisStreamProvisioner(
			amazonKinesisMock, binderProperties);

	ExtendedConsumerProperties<KinesisConsumerProperties> extendedConsumerProperties =
			new ExtendedConsumerProperties<>(
			new KinesisConsumerProperties());

	String name = "test-stream";
	String group = "test-group";

	DescribeStreamResult describeStreamResult = describeStreamResultWithShards(
			Collections.singletonList(new Shard()));

	when(amazonKinesisMock.describeStream(any(DescribeStreamRequest.class)))
			.thenReturn(describeStreamResult);

	ConsumerDestination destination = provisioner.provisionConsumerDestination(name,
			group, extendedConsumerProperties);

	verify(amazonKinesisMock).describeStream(any(DescribeStreamRequest.class));

	assertThat(destination.getName()).isEqualTo(name);
}
 
Example 30
@Test
void testProvisionConsumerExistingStreamUpdateShards() {
	AmazonKinesis amazonKinesisMock = mock(AmazonKinesis.class);
	ArgumentCaptor<UpdateShardCountRequest> updateShardCaptor = ArgumentCaptor
			.forClass(UpdateShardCountRequest.class);
	String name = "test-stream";
	String group = "test-group";
	int targetShardCount = 2;
	KinesisBinderConfigurationProperties binderProperties = new KinesisBinderConfigurationProperties();
	binderProperties.setMinShardCount(targetShardCount);
	binderProperties.setAutoAddShards(true);
	KinesisStreamProvisioner provisioner = new KinesisStreamProvisioner(
			amazonKinesisMock, binderProperties);

	ExtendedConsumerProperties<KinesisConsumerProperties> extendedConsumerProperties =
			new ExtendedConsumerProperties<>(
			new KinesisConsumerProperties());

	DescribeStreamResult describeOriginalStream = describeStreamResultWithShards(
			Collections.singletonList(new Shard()));

	DescribeStreamResult describeUpdatedStream = describeStreamResultWithShards(
			Arrays.asList(new Shard(), new Shard()));

	when(amazonKinesisMock.describeStream(any(DescribeStreamRequest.class)))
			.thenReturn(describeOriginalStream).thenReturn(describeUpdatedStream);

	provisioner.provisionConsumerDestination(name, group, extendedConsumerProperties);

	verify(amazonKinesisMock, times(1)).updateShardCount(updateShardCaptor.capture());

	assertThat(updateShardCaptor.getValue().getStreamName()).isEqualTo(name);
	assertThat(updateShardCaptor.getValue().getScalingType())
			.isEqualTo(ScalingType.UNIFORM_SCALING.name());
	assertThat(updateShardCaptor.getValue().getTargetShardCount())
			.isEqualTo(targetShardCount);
}