com.amazonaws.client.builder.AwsClientBuilder Java Examples

The following examples show how to use com.amazonaws.client.builder.AwsClientBuilder. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MinIOServiceImpl.java    From zfile with MIT License 10 votes vote down vote up
@Override
public void init(Integer driveId) {
    this.driveId = driveId;
    Map<String, StorageConfig> stringStorageConfigMap =
            storageConfigService.selectStorageConfigMapByDriveId(driveId);
    String accessKey = stringStorageConfigMap.get(StorageConfigConstant.ACCESS_KEY).getValue();
    String secretKey = stringStorageConfigMap.get(StorageConfigConstant.SECRET_KEY).getValue();
    String endPoint = stringStorageConfigMap.get(StorageConfigConstant.ENDPOINT_KEY).getValue();
    bucketName = stringStorageConfigMap.get(StorageConfigConstant.BUCKET_NAME_KEY).getValue();
    basePath = stringStorageConfigMap.get(StorageConfigConstant.BASE_PATH).getValue();
    isPrivate = Convert.toBool(stringStorageConfigMap.get(StorageConfigConstant.IS_PRIVATE).getValue(), true);

    if (Objects.isNull(accessKey) || Objects.isNull(secretKey) || Objects.isNull(endPoint) || Objects.isNull(bucketName)) {
        log.debug("初始化存储策略 [{}] 失败: 参数不完整", getStorageTypeEnum().getDescription());
        isInitialized = false;
    } else {
        BasicAWSCredentials credentials = new BasicAWSCredentials(accessKey, secretKey);
        s3Client = AmazonS3ClientBuilder.standard()
                .withPathStyleAccessEnabled(true)
                .withCredentials(new AWSStaticCredentialsProvider(credentials))
                .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endPoint, "minio")).build();

        testConnection();
        isInitialized = true;
    }
}
 
Example #2
Source File: AwsClientTracing.java    From zipkin-aws with Apache License 2.0 6 votes vote down vote up
public <Builder extends AwsClientBuilder, Client> Client build(
    AwsClientBuilder<Builder, Client> builder
) {
  if (builder == null) throw new NullPointerException("builder == null");
  if (builder instanceof AwsAsyncClientBuilder) {
    ExecutorFactory executorFactory = ((AwsAsyncClientBuilder) builder).getExecutorFactory();
    if (executorFactory == null) {
      ClientConfiguration clientConfiguration = builder.getClientConfiguration();
      if (clientConfiguration == null) {
        clientConfiguration = defaultClientConfigurationFactory.getConfig();
      }
      ((AwsAsyncClientBuilder) builder).setExecutorFactory(
          new TracingExecutorFactory(currentTraceContext, clientConfiguration)
      );
    } else {
      ((AwsAsyncClientBuilder) builder).setExecutorFactory(
          new TracingExecutorFactoryWrapper(currentTraceContext, executorFactory)
      );
    }
  }
  builder.withRequestHandlers(new TracingRequestHandler(httpTracing));
  return builder.build();
}
 
Example #3
Source File: AWSUtil.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Creates an Amazon Kinesis Client.
 * @param configProps configuration properties containing the access key, secret key, and region
 * @param awsClientConfig preconfigured AWS SDK client configuration
 * @return a new Amazon Kinesis Client
 */
public static AmazonKinesis createKinesisClient(Properties configProps, ClientConfiguration awsClientConfig) {
	// set a Flink-specific user agent
	awsClientConfig.setUserAgentPrefix(String.format(USER_AGENT_FORMAT,
			EnvironmentInformation.getVersion(),
			EnvironmentInformation.getRevisionInformation().commitId));

	// utilize automatic refreshment of credentials by directly passing the AWSCredentialsProvider
	AmazonKinesisClientBuilder builder = AmazonKinesisClientBuilder.standard()
			.withCredentials(AWSUtil.getCredentialsProvider(configProps))
			.withClientConfiguration(awsClientConfig);

	if (configProps.containsKey(AWSConfigConstants.AWS_ENDPOINT)) {
		// Set signingRegion as null, to facilitate mocking Kinesis for local tests
		builder.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(
												configProps.getProperty(AWSConfigConstants.AWS_ENDPOINT),
												null));
	} else {
		builder.withRegion(Regions.fromName(configProps.getProperty(AWSConfigConstants.AWS_REGION)));
	}
	return builder.build();
}
 
Example #4
Source File: DynamoDBLocalFixture.java    From aws-dynamodb-examples with Apache License 2.0 6 votes vote down vote up
/**
 * You can use mvn to run DynamoDBLocalFixture, e.g.
 * <p>
 * $ mvn clean package
 * <p>
 * $ mvn exec:java -Dexec.mainClass="com.amazonaws.services.dynamodbv2.DynamoDBLocalFixture" \
 * -Dexec.classpathScope="test" \
 * -Dsqlite4java.library.path=target/dependencies
 * <p>
 * It's recommended to run "aws configure" one time before you run DynamoDBLocalFixture
 *
 * @param args - no args
 * @throws Exception
 */
public static void main(String[] args) throws Exception {
    AmazonDynamoDB dynamodb = null;
    try {
        // Create an in-memory and in-process instance of DynamoDB Local that skips HTTP
        dynamodb = DynamoDBEmbedded.create().amazonDynamoDB();
        // use the DynamoDB API with DynamoDBEmbedded
        listTables(dynamodb.listTables(), "DynamoDB Embedded");
    } finally {
        // Shutdown the thread pools in DynamoDB Local / Embedded
        if(dynamodb != null) {
            dynamodb.shutdown();
        }
    }
    
    // Create an in-memory and in-process instance of DynamoDB Local that runs over HTTP
    final String[] localArgs = { "-inMemory" };
    DynamoDBProxyServer server = null;
    try {
        server = ServerRunner.createServerFromCommandLineArgs(localArgs);
        server.start();

        dynamodb = AmazonDynamoDBClientBuilder.standard().withEndpointConfiguration(
            // we can use any region here
            new AwsClientBuilder.EndpointConfiguration("http://localhost:8000", "us-west-2"))
            .build();

        // use the DynamoDB API over HTTP
        listTables(dynamodb.listTables(), "DynamoDB Local over HTTP");
    } finally {
        // Stop the DynamoDB Local endpoint
        if(server != null) {
            server.stop();
        }
    }
}
 
Example #5
Source File: TencentServiceImpl.java    From zfile with MIT License 6 votes vote down vote up
@Override
public void init(Integer driveId) {
    this.driveId = driveId;
    Map<String, StorageConfig> stringStorageConfigMap =
            storageConfigService.selectStorageConfigMapByDriveId(driveId);
    String secretId = stringStorageConfigMap.get(StorageConfigConstant.SECRET_ID_KEY).getValue();
    String secretKey = stringStorageConfigMap.get(StorageConfigConstant.SECRET_KEY).getValue();
    String endPoint = stringStorageConfigMap.get(StorageConfigConstant.ENDPOINT_KEY).getValue();
    bucketName = stringStorageConfigMap.get(StorageConfigConstant.BUCKET_NAME_KEY).getValue();
    domain = stringStorageConfigMap.get(StorageConfigConstant.DOMAIN_KEY).getValue();
    basePath = stringStorageConfigMap.get(StorageConfigConstant.BASE_PATH).getValue();
    isPrivate = Convert.toBool(stringStorageConfigMap.get(StorageConfigConstant.IS_PRIVATE).getValue(), true);

    if (Objects.isNull(secretId) || Objects.isNull(secretKey) || Objects.isNull(endPoint) || Objects.isNull(bucketName)) {
        log.debug("初始化存储策略 [{}] 失败: 参数不完整", getStorageTypeEnum().getDescription());
        isInitialized = false;
    } else {
        BasicAWSCredentials credentials = new BasicAWSCredentials(secretId, secretKey);
        s3Client = AmazonS3ClientBuilder.standard()
                .withCredentials(new AWSStaticCredentialsProvider(credentials))
                .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endPoint, "cos")).build();

        testConnection();
        isInitialized = true;
    }
}
 
Example #6
Source File: AWSUtil.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Creates an Amazon Kinesis Client.
 * @param configProps configuration properties containing the access key, secret key, and region
 * @param awsClientConfig preconfigured AWS SDK client configuration
 * @return a new Amazon Kinesis Client
 */
public static AmazonKinesis createKinesisClient(Properties configProps, ClientConfiguration awsClientConfig) {
	// set a Flink-specific user agent
	awsClientConfig.setUserAgentPrefix(String.format(USER_AGENT_FORMAT,
			EnvironmentInformation.getVersion(),
			EnvironmentInformation.getRevisionInformation().commitId));

	// utilize automatic refreshment of credentials by directly passing the AWSCredentialsProvider
	AmazonKinesisClientBuilder builder = AmazonKinesisClientBuilder.standard()
			.withCredentials(AWSUtil.getCredentialsProvider(configProps))
			.withClientConfiguration(awsClientConfig);

	if (configProps.containsKey(AWSConfigConstants.AWS_ENDPOINT)) {
		// Set signingRegion as null, to facilitate mocking Kinesis for local tests
		builder.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(
												configProps.getProperty(AWSConfigConstants.AWS_ENDPOINT),
												null));
	} else {
		builder.withRegion(Regions.fromName(configProps.getProperty(AWSConfigConstants.AWS_REGION)));
	}
	return builder.build();
}
 
Example #7
Source File: DefaultS3ClientBuilderFactory.java    From beam with Apache License 2.0 6 votes vote down vote up
@Override
public AmazonS3ClientBuilder createBuilder(S3Options s3Options) {
  AmazonS3ClientBuilder builder =
      AmazonS3ClientBuilder.standard().withCredentials(s3Options.getAwsCredentialsProvider());

  if (s3Options.getClientConfiguration() != null) {
    builder = builder.withClientConfiguration(s3Options.getClientConfiguration());
  }

  if (!Strings.isNullOrEmpty(s3Options.getAwsServiceEndpoint())) {
    builder =
        builder.withEndpointConfiguration(
            new AwsClientBuilder.EndpointConfiguration(
                s3Options.getAwsServiceEndpoint(), s3Options.getAwsRegion()));
  } else if (!Strings.isNullOrEmpty(s3Options.getAwsRegion())) {
    builder = builder.withRegion(s3Options.getAwsRegion());
  } else {
    LOG.info(
        "The AWS S3 Beam extension was included in this build, but the awsRegion flag "
            + "was not specified. If you don't plan to use S3, then ignore this message.");
  }
  return builder;
}
 
Example #8
Source File: TestAmazonS3Target.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void setUpClass() throws IOException, InterruptedException {
  File dir = new File(new File("target", UUID.randomUUID().toString()), "fakes3_root").getAbsoluteFile();
  Assert.assertTrue(dir.mkdirs());
  fakeS3Root = dir.getAbsolutePath();
  port = TestUtil.getFreePort();
  fakeS3 = new FakeS3(fakeS3Root, port);
  Assume.assumeTrue("Please install fakes3 in your system", fakeS3.fakes3Installed());
  //Start the fakes3 server
  executorService = Executors.newSingleThreadExecutor();
  executorService.submit(fakeS3);

  BasicAWSCredentials credentials = new BasicAWSCredentials("foo", "bar");
  s3client = AmazonS3ClientBuilder
      .standard()
      .withCredentials(new AWSStaticCredentialsProvider(credentials))
      .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration("http://localhost:" + port, null))
      .withPathStyleAccessEnabled(true)
      .withChunkedEncodingDisabled(true) // FakeS3 does not correctly calculate checksums with chunked encoding enabled.
      .build();

  TestUtil.createBucket(s3client, BUCKET_NAME);
  TestUtil.createBucket(s3client, SECOND_BUCKET_NAME);
}
 
Example #9
Source File: S3Service.java    From crate with Apache License 2.0 6 votes vote down vote up
private AmazonS3 buildClient(final S3ClientSettings clientSettings) {
    final AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard();
    builder.withCredentials(buildCredentials(LOGGER, clientSettings));
    builder.withClientConfiguration(buildConfiguration(clientSettings));

    final String endpoint = Strings.hasLength(clientSettings.endpoint)
        ? clientSettings.endpoint
        : Constants.S3_HOSTNAME;
    LOGGER.debug("using endpoint [{}]", endpoint);

    // If the endpoint configuration isn't set on the builder then the default behaviour is to try
    // and work out what region we are in and use an appropriate endpoint - see AwsClientBuilder#setRegion.
    // In contrast, directly-constructed clients use s3.amazonaws.com unless otherwise instructed. We currently
    // use a directly-constructed client, and need to keep the existing behaviour to avoid a breaking change,
    // so to move to using the builder we must set it explicitly to keep the existing behaviour.
    //
    // We do this because directly constructing the client is deprecated (was already deprecated in 1.1.223 too)
    // so this change removes that usage of a deprecated API.
    builder.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endpoint, null));

    return builder.build();
}
 
Example #10
Source File: MoviesDeleteTable.java    From aws-doc-sdk-examples with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {

        AmazonDynamoDB client = AmazonDynamoDBClientBuilder.standard()
            .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration("http://localhost:8000", "us-west-2"))
            .build();

        DynamoDB dynamoDB = new DynamoDB(client);

        Table table = dynamoDB.getTable("Movies");

        try {
            System.out.println("Attempting to delete table; please wait...");
            table.delete();
            table.waitForDelete();
            System.out.print("Success.");

        }
        catch (Exception e) {
            System.err.println("Unable to delete table: ");
            System.err.println(e.getMessage());
        }
    }
 
Example #11
Source File: AmazonS3Provider.java    From emodb with Apache License 2.0 6 votes vote down vote up
private static AmazonS3 getAmazonS3(final S3BucketConfiguration s3BucketConfiguration, Clock clock) {
    AmazonS3ClientBuilder amazonS3ClientBuilder = AmazonS3ClientBuilder.standard()
            .withCredentials(getAwsCredentialsProvider(s3BucketConfiguration))
            .withAccelerateModeEnabled(s3BucketConfiguration.getAccelerateModeEnabled());
    S3ClientConfiguration.RateLimitConfiguration rateLimitConfiguration = new S3ClientConfiguration.RateLimitConfiguration();
    if (null != s3BucketConfiguration.getRegion()) {
        amazonS3ClientBuilder
                .withRegion(Regions.fromName(s3BucketConfiguration.getRegion()));
    } else if (null != s3BucketConfiguration.getS3ClientConfiguration()) {
        S3ClientConfiguration.EndpointConfiguration endpointConfiguration = s3BucketConfiguration.getS3ClientConfiguration().getEndpointConfiguration();
        amazonS3ClientBuilder
                .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endpointConfiguration.getServiceEndpoint(), endpointConfiguration.getSigningRegion()));
        rateLimitConfiguration = s3BucketConfiguration.getS3ClientConfiguration().getRateLimitConfiguration();
    }
    AmazonS3 amazonS3 = amazonS3ClientBuilder
            .build();

    return new S3RateLimiter(clock, rateLimitConfiguration)
            .rateLimit(amazonS3);
}
 
Example #12
Source File: CloudStreamKinesisToWebfluxApplicationTests.java    From spring-cloud-stream-samples with Apache License 2.0 6 votes vote down vote up
@Bean
public AmazonKinesisAsync amazonKinesis() {
	// See https://github.com/mhart/kinesalite#cbor-protocol-issues-with-the-java-sdk
	System.setProperty(SDKGlobalConfiguration.AWS_CBOR_DISABLE_SYSTEM_PROPERTY, "true");

	return AmazonKinesisAsyncClientBuilder.standard()
			.withClientConfiguration(
					new ClientConfiguration()
							.withMaxErrorRetry(0)
							.withConnectionTimeout(1000))
			.withEndpointConfiguration(
					new AwsClientBuilder.EndpointConfiguration("http://localhost:" + DEFAULT_KINESALITE_PORT,
							Regions.DEFAULT_REGION.getName()))
			.withCredentials(new AWSStaticCredentialsProvider(new BasicAWSCredentials("", "")))
			.build();
}
 
Example #13
Source File: AWSUtil.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Creates an Amazon Kinesis Client.
 * @param configProps configuration properties containing the access key, secret key, and region
 * @param awsClientConfig preconfigured AWS SDK client configuration
 * @return a new Amazon Kinesis Client
 */
public static AmazonKinesis createKinesisClient(Properties configProps, ClientConfiguration awsClientConfig) {
	// set a Flink-specific user agent
	awsClientConfig.setUserAgentPrefix(String.format(USER_AGENT_FORMAT,
			EnvironmentInformation.getVersion(),
			EnvironmentInformation.getRevisionInformation().commitId));

	// utilize automatic refreshment of credentials by directly passing the AWSCredentialsProvider
	AmazonKinesisClientBuilder builder = AmazonKinesisClientBuilder.standard()
			.withCredentials(AWSUtil.getCredentialsProvider(configProps))
			.withClientConfiguration(awsClientConfig);

	if (configProps.containsKey(AWSConfigConstants.AWS_ENDPOINT)) {
		// If an endpoint is specified, we give preference to using an endpoint and use the region property to
		// sign the request.
		builder.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(
			configProps.getProperty(AWSConfigConstants.AWS_ENDPOINT),
			configProps.getProperty(AWSConfigConstants.AWS_REGION)));
	} else {
		builder.withRegion(Regions.fromName(configProps.getProperty(AWSConfigConstants.AWS_REGION)));
	}
	return builder.build();
}
 
Example #14
Source File: AwsS3Test.java    From ecs-sync with Apache License 2.0 6 votes vote down vote up
@Before
public void setup() throws Exception {
    Properties syncProperties = TestConfig.getProperties();
    endpoint = syncProperties.getProperty(TestConfig.PROP_S3_ENDPOINT);
    accessKey = syncProperties.getProperty(TestConfig.PROP_S3_ACCESS_KEY_ID);
    secretKey = syncProperties.getProperty(TestConfig.PROP_S3_SECRET_KEY);
    region = syncProperties.getProperty(TestConfig.PROP_S3_REGION);
    String proxyUri = syncProperties.getProperty(TestConfig.PROP_HTTP_PROXY_URI);
    Assume.assumeNotNull(endpoint, accessKey, secretKey);
    endpointUri = new URI(endpoint);

    ClientConfiguration config = new ClientConfiguration().withSignerOverride("S3SignerType");
    if (proxyUri != null) {
        URI uri = new URI(proxyUri);
        config.setProxyHost(uri.getHost());
        config.setProxyPort(uri.getPort());
    }

    AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard()
            .withCredentials(new AWSStaticCredentialsProvider(new BasicAWSCredentials(accessKey, secretKey)))
            .withClientConfiguration(config)
            .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endpoint, region));

    s3 = builder.build();
}
 
Example #15
Source File: GetMediaWorker.java    From amazon-kinesis-video-streams-parser-library with Apache License 2.0 6 votes vote down vote up
private GetMediaWorker(Regions region,
        AWSCredentialsProvider credentialsProvider,
        String streamName,
        StartSelector startSelector,
        String endPoint,
        MkvElementVisitor elementVisitor) {
    super(region, credentialsProvider, streamName);

    AmazonKinesisVideoMediaClientBuilder builder = AmazonKinesisVideoMediaClientBuilder.standard()
            .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endPoint, region.getName()))
            .withCredentials(getCredentialsProvider());

    this.videoMedia = builder.build();
    this.elementVisitor = elementVisitor;
    this.startSelector = startSelector;
}
 
Example #16
Source File: ContinuousGetMediaWorker.java    From amazon-kinesis-video-streams-parser-library with Apache License 2.0 6 votes vote down vote up
private ContinuousGetMediaWorker(Regions region,
        AWSCredentialsProvider credentialsProvider,
        String streamName,
        StartSelector startSelector,
        String endPoint,
        GetMediaResponseStreamConsumerFactory consumerFactory) {
    super(region, credentialsProvider, streamName);

    AmazonKinesisVideoMediaClientBuilder builder = AmazonKinesisVideoMediaClientBuilder.standard()
            .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endPoint, region.getName()))
            .withCredentials(getCredentialsProvider());

    this.videoMedia = builder.build();
    this.consumerFactory = consumerFactory;
    this.startSelector = startSelector;
}
 
Example #17
Source File: AwsClientTracingTest.java    From zipkin-aws with Apache License 2.0 6 votes vote down vote up
@Before
public void setup() {
  String endpoint = "http://localhost:" + mockServer.getPort();
  HttpTracing httpTracing = HttpTracing.create(tracing);
  AmazonDynamoDBClientBuilder clientBuilder = AmazonDynamoDBClientBuilder.standard()
      .withCredentials(
          new AWSStaticCredentialsProvider(new BasicAWSCredentials("access", "secret")))
      .withEndpointConfiguration(
          new AwsClientBuilder.EndpointConfiguration(endpoint, "us-east-1"));

  dbClient = AwsClientTracing.create(httpTracing).build(clientBuilder);

  s3Client = AwsClientTracing.create(httpTracing).build(AmazonS3ClientBuilder.standard()
      .withCredentials(
          new AWSStaticCredentialsProvider(new BasicAWSCredentials("access", "secret")))
      .withEndpointConfiguration(
          new AwsClientBuilder.EndpointConfiguration(endpoint, "us-east-1"))
      .enableForceGlobalBucketAccess());
}
 
Example #18
Source File: AWSClientUtils.java    From camel-kafka-connector with Apache License 2.0 6 votes vote down vote up
public static AmazonSNS newSNSClient() {
    LOG.debug("Creating a custom SNS client for running a AWS SNS test");
    AmazonSNSClientBuilder clientBuilder = AmazonSNSClientBuilder
            .standard();

    String awsInstanceType = System.getProperty("aws-service.instance.type");
    String region = getRegion();

    if (awsInstanceType == null || awsInstanceType.equals("local-aws-container")) {
        String amazonHost = System.getProperty(AWSConfigs.AMAZON_AWS_HOST);

        ClientConfiguration clientConfiguration = new ClientConfiguration();
        clientConfiguration.setProtocol(Protocol.HTTP);

        clientBuilder
                .withClientConfiguration(clientConfiguration)
                .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(amazonHost, region))
                .withCredentials(new TestAWSCredentialsProvider("accesskey", "secretkey"));
    } else {
        clientBuilder
                .withRegion(region)
                .withCredentials(new TestAWSCredentialsProvider());
    }

    return clientBuilder.build();
}
 
Example #19
Source File: AWSSecretsManagerDriver.java    From aws-secretsmanager-jdbc with Apache License 2.0 6 votes vote down vote up
/**
 * Constructs the driver setting the properties from the properties file using system properties as defaults.
 * Sets the secret cache to the cache that was passed in.
 *
 * @param cache                                             Secret cache to use to retrieve secrets
 */
protected AWSSecretsManagerDriver(SecretCache cache) {

    final Config config = Config.loadMainConfig();

    String vpcEndpointUrl = config.getStringPropertyWithDefault(PROPERTY_PREFIX+"."+PROPERTY_VPC_ENDPOINT_URL, null);
    String vpcEndpointRegion = config.getStringPropertyWithDefault(PROPERTY_PREFIX+"."+PROPERTY_VPC_ENDPOINT_REGION, null);

    if (vpcEndpointUrl == null || vpcEndpointUrl.isEmpty() || vpcEndpointRegion == null || vpcEndpointRegion.isEmpty()) {
        this.secretCache = cache;
    } else {
        AWSSecretsManagerClientBuilder builder = AWSSecretsManagerClientBuilder.standard();
        builder.setEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(vpcEndpointUrl, vpcEndpointRegion));

        this.secretCache = new SecretCache(builder);
    }

    setProperties();
    AWSSecretsManagerDriver.register(this);
}
 
Example #20
Source File: SqsConsumer.java    From datacollector with Apache License 2.0 6 votes vote down vote up
private AmazonSQSAsync buildAsyncClient() {
  final AmazonSQSAsyncClientBuilder builder = AmazonSQSAsyncClientBuilder.standard();
  if(conf.region == AwsRegion.OTHER) {
    builder.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(conf.endpoint, null));
  } else {
    builder.withRegion(conf.region.getId());
  }
  builder.setCredentials(credentials);
  builder.setClientConfiguration(clientConfiguration);

  return builder.build();
}
 
Example #21
Source File: ListGcsObjects.java    From java-docs-samples with Apache License 2.0 5 votes vote down vote up
public static void listGcsObjects(
    String googleAccessKeyId, String googleAccessKeySecret, String bucketName) {

  // String googleAccessKeyId = "your-google-access-key-id";
  // String googleAccessKeySecret = "your-google-access-key-secret";
  // String bucketName = "bucket-name";

  // Create a BasicAWSCredentials using Cloud Storage HMAC credentials.
  BasicAWSCredentials googleCreds =
      new BasicAWSCredentials(googleAccessKeyId, googleAccessKeySecret);

  // Create a new client and do the following:
  // 1. Change the endpoint URL to use the Google Cloud Storage XML API endpoint.
  // 2. Use Cloud Storage HMAC Credentials.
  AmazonS3 interopClient =
      AmazonS3ClientBuilder.standard()
          .withEndpointConfiguration(
              new AwsClientBuilder.EndpointConfiguration(
                  "https://storage.googleapis.com", "auto"))
          .withCredentials(new AWSStaticCredentialsProvider(googleCreds))
          .build();

  // Call GCS to list current objects
  ObjectListing objects = interopClient.listObjects(bucketName);

  // Print objects names
  System.out.println("Objects:");
  for (S3ObjectSummary object : objects.getObjectSummaries()) {
    System.out.println(object.getKey());
  }

  // Explicitly clean up client resources.
  interopClient.shutdown();
}
 
Example #22
Source File: MockedS3Client.java    From lightning with MIT License 5 votes vote down vote up
public static MockedS3Client createInstance(String region) {
    String s3MockUrl = getMockedUrl();
    AwsClientBuilder.EndpointConfiguration endpointConfiguration = new AwsClientBuilder.EndpointConfiguration(s3MockUrl, region);
    AmazonS3 amazonS3 = AmazonS3ClientBuilder
            .standard()
            .withPathStyleAccessEnabled(true)
            .withEndpointConfiguration(endpointConfiguration)
            .build();
    return new MockedS3Client(amazonS3);
}
 
Example #23
Source File: S3NotebookRepoTest.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
@Before
public void setUp() throws IOException {
  String bucket = "test-bucket";
  notebookRepo = new S3NotebookRepo();
  ZeppelinConfiguration conf = ZeppelinConfiguration.create();
  System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_S3_ENDPOINT.getVarName(),
          s3Proxy.getUri().toString());
  System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_S3_BUCKET.getVarName(),
          bucket);
  System.setProperty("aws.accessKeyId", s3Proxy.getAccessKey());
  System.setProperty("aws.secretKey", s3Proxy.getSecretKey());

  notebookRepo.init(conf);

  // create bucket for notebook
  AmazonS3 s3Client = AmazonS3ClientBuilder
          .standard()
          .withCredentials(
                  new AWSStaticCredentialsProvider(
                          new BasicAWSCredentials(s3Proxy.getAccessKey(),
                                  s3Proxy.getSecretKey())))
          .withEndpointConfiguration(
                  new AwsClientBuilder.EndpointConfiguration(s3Proxy.getUri().toString(),
                          Regions.US_EAST_1.getName()))
          .build();
  s3Client.createBucket(bucket);
}
 
Example #24
Source File: MockedS3ClientTest.java    From lightning with MIT License 5 votes vote down vote up
@BeforeClass
public void setupEnv() {
    int port = 8001;

    AwsClientBuilder.EndpointConfiguration endpointConfiguration = new AwsClientBuilder.EndpointConfiguration(S3_MOCK_URL, REGION);
    amazonS3Client = AmazonS3ClientBuilder
            .standard()
            .withPathStyleAccessEnabled(true)
            .withEndpointConfiguration(endpointConfiguration)
            .build();

    s3Mock = new S3Mock.Builder().withPort(port).withInMemoryBackend().build();
    s3Mock.start();
}
 
Example #25
Source File: DynamoDBSourceConfig.java    From pulsar with Apache License 2.0 5 votes vote down vote up
public AmazonDynamoDB buildDynamoDBClient(AwsCredentialProviderPlugin credPlugin) {
    AmazonDynamoDBClientBuilder builder = AmazonDynamoDBClientBuilder.standard();

    if (!this.getAwsEndpoint().isEmpty()) {
        builder.setEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(this.getDynamoEndpoint(), this.getAwsRegion()));
    }
    if (!this.getAwsRegion().isEmpty()) {
        builder.setRegion(this.getAwsRegion());
    }
    builder.setCredentials(credPlugin.getCredentialProvider());
    return builder.build();
}
 
Example #26
Source File: MoviesItemOps02.java    From aws-doc-sdk-examples with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {

        AmazonDynamoDB client = AmazonDynamoDBClientBuilder.standard()
            .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration("http://localhost:8000", "us-west-2"))
            .build();

        DynamoDB dynamoDB = new DynamoDB(client);

        Table table = dynamoDB.getTable("Movies");

        int year = 2015;
        String title = "The Big New Movie";

        GetItemSpec spec = new GetItemSpec().withPrimaryKey("year", year, "title", title);

        try {
            System.out.println("Attempting to read the item...");
            Item outcome = table.getItem(spec);
            System.out.println("GetItem succeeded: " + outcome);

        }
        catch (Exception e) {
            System.err.println("Unable to read item: " + year + " " + title);
            System.err.println(e.getMessage());
        }

    }
 
Example #27
Source File: DynamoDBSourceConfig.java    From pulsar with Apache License 2.0 5 votes vote down vote up
public AmazonCloudWatch buildCloudwatchClient(AwsCredentialProviderPlugin credPlugin) {
    AmazonCloudWatchClientBuilder builder = AmazonCloudWatchClientBuilder.standard();

    if (!this.getAwsEndpoint().isEmpty()) {
        builder.setEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(this.getCloudwatchEndpoint(), this.getAwsRegion()));
    }
    if (!this.getAwsRegion().isEmpty()) {
        builder.setRegion(this.getAwsRegion());
    }
    builder.setCredentials(credPlugin.getCredentialProvider());
    return builder.build();
}
 
Example #28
Source File: FirehoseTarget.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@Override
protected List<ConfigIssue> init() {
  List<ConfigIssue> issues = super.init();
  errorRecordHandler = new DefaultErrorRecordHandler(getContext());
  if (!issues.isEmpty()) {
    return issues;
  }

  conf.init(getContext(), issues);
  if (!issues.isEmpty()) {
    return issues;
  }

  generatorFactory = conf.dataFormatConfig.getDataGeneratorFactory();
  try {
    AmazonKinesisFirehoseClientBuilder builder = AmazonKinesisFirehoseClientBuilder
      .standard()
      .withCredentials(AWSUtil.getCredentialsProvider(conf.awsConfig));

    if (conf.region == AwsRegion.OTHER) {
      builder.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(conf.endpoint, null));
    } else {
      builder.withRegion(conf.region.getId());
    }

    firehoseClient = builder.build();
  } catch (StageException ex) {
    LOG.error(Utils.format(Errors.KINESIS_12.getMessage(), ex.toString()), ex);
    issues.add(getContext().createConfigIssue(
        Groups.KINESIS.name(),
        "kinesisConfig.awsConfig.awsAccessKeyId",
        Errors.KINESIS_12,
        ex.toString()
    ));
  }

  return issues;
}
 
Example #29
Source File: ListGcsBuckets.java    From java-docs-samples with Apache License 2.0 5 votes vote down vote up
public static void listGcsBuckets(String googleAccessKeyId, String googleAccessKeySecret) {

    // String googleAccessKeyId = "your-google-access-key-id";
    // String googleAccessKeySecret = "your-google-access-key-secret";

    // Create a BasicAWSCredentials using Cloud Storage HMAC credentials.
    BasicAWSCredentials googleCreds =
        new BasicAWSCredentials(googleAccessKeyId, googleAccessKeySecret);

    // Create a new client and do the following:
    // 1. Change the endpoint URL to use the Google Cloud Storage XML API endpoint.
    // 2. Use Cloud Storage HMAC Credentials.
    AmazonS3 interopClient =
        AmazonS3ClientBuilder.standard()
            .withEndpointConfiguration(
                new AwsClientBuilder.EndpointConfiguration(
                    "https://storage.googleapis.com", "auto"))
            .withCredentials(new AWSStaticCredentialsProvider(googleCreds))
            .build();

    // Call GCS to list current buckets
    List<Bucket> buckets = interopClient.listBuckets();

    // Print bucket names
    System.out.println("Buckets:");
    for (Bucket bucket : buckets) {
      System.out.println(bucket.getName());
    }

    // Explicitly clean up client resources.
    interopClient.shutdown();
  }
 
Example #30
Source File: KinesisPubsubClient.java    From flink with Apache License 2.0 5 votes vote down vote up
private static AmazonKinesis createClientWithCredentials(Properties props) throws AmazonClientException {
	AWSCredentialsProvider credentialsProvider = new EnvironmentVariableCredentialsProvider();
	return AmazonKinesisClientBuilder.standard()
		.withCredentials(credentialsProvider)
		.withEndpointConfiguration(
			new AwsClientBuilder.EndpointConfiguration(
				props.getProperty(ConsumerConfigConstants.AWS_ENDPOINT), "us-east-1"))
		.build();
}