com.amazonaws.services.s3.AmazonS3Client Java Examples

The following examples show how to use com.amazonaws.services.s3.AmazonS3Client. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: S3Source.java    From sequenceiq-samples with Apache License 2.0 7 votes vote down vote up
@Override
protected void doStart() {
    AWSCredentials myCredentials = new BasicAWSCredentials(accessKey, secretKey);
    AmazonS3 s3Client = new AmazonS3Client(myCredentials);
    ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(bucket);
    ObjectListing objectListing = s3Client.listObjects(listObjectsRequest);
    ChannelProcessor channelProcessor = getChannelProcessor();
    for (S3ObjectSummary s3ObjectSummary : objectListing.getObjectSummaries()) {
        String file = s3ObjectSummary.getKey();
        LOGGER.info("Read the content of {}", file);
        GetObjectRequest objectRequest = new GetObjectRequest(bucket, file);
        S3Object objectPortion = s3Client.getObject(objectRequest);
        try {
            long startTime = System.currentTimeMillis();
            processLines(channelProcessor, objectPortion.getObjectContent());
            LOGGER.info("Processing of {} took {} ms", file, System.currentTimeMillis() - startTime);
        } catch (IOException e) {
            LOGGER.warn("Cannot process the {}, skipping", file, e);
        }
    }
}
 
Example #2
Source File: PublishExecutorTest.java    From gocd-s3-artifacts with Apache License 2.0 6 votes vote down vote up
@Test
public void shouldFailIfNoFilesToUploadBasedOnSource() {
    AmazonS3Client mockClient = mockClient();

    Config config = new Config(Maps.builder()
            .with(Constants.SOURCEDESTINATIONS, Maps.builder().with("value", "[{\"source\": \"target/*\", \"destination\": \"\"}]").build())
            .with(Constants.DESTINATION_PREFIX, Maps.builder().with("value", "").build())
            .with(Constants.ARTIFACTS_BUCKET, Maps.builder().with("value", "").build())
            .build());

    TaskExecutionResult result = executeMockPublish(
            mockClient,
            config,
            new String[]{}
    );

    assertFalse(result.isSuccessful());
    assertThat(result.message(), containsString("Source target/* didn't yield any files to upload"));
}
 
Example #3
Source File: S3StorageIT.java    From digdag with Apache License 2.0 6 votes vote down vote up
@Before
public void setUp()
        throws Exception
{
    assumeThat(TEST_S3_ENDPOINT, not(isEmptyOrNullString()));

    projectDir = folder.getRoot().toPath().resolve("foobar");
    config = folder.newFile().toPath();

    client = DigdagClient.builder()
            .host(server.host())
            .port(server.port())
            .build();

    AWSCredentials credentials = new BasicAWSCredentials(TEST_S3_ACCESS_KEY_ID, TEST_S3_SECRET_ACCESS_KEY);
    s3 = new AmazonS3Client(credentials);
    s3.setEndpoint(TEST_S3_ENDPOINT);

    s3.createBucket(archiveBucket);
    s3.createBucket(logStorageBucket);
}
 
Example #4
Source File: PublishExecutorTest.java    From gocd-s3-artifacts with Apache License 2.0 6 votes vote down vote up
@Test
public void shouldNotThrowIfAWSUseIAMRoleIsTrueAndAWS_SECRET_ACCESS_KEYNotPresent() {
    Maps.MapBuilder<String, String> mockVariables = mockEnvironmentVariables
            .with(AWS_USE_IAM_ROLE, "True")
            .with(AWS_ACCESS_KEY_ID, "")
            .with(AWS_SECRET_ACCESS_KEY, "");
    AmazonS3Client mockClient = mockClient();

    Config config = new Config(Maps.builder()
            .with(Constants.SOURCEDESTINATIONS, Maps.builder().with("value", "[{\"source\": \"target/*\", \"destination\": \"\"}]").build())
            .with(Constants.DESTINATION_PREFIX, Maps.builder().with("value", "").build())
            .with(Constants.ARTIFACTS_BUCKET, Maps.builder().with("value", "").build())
            .build());

    TaskExecutionResult result = executeMockPublish(
            mockClient,
            config,
            new String[]{"README.md"},
            mockVariables
    );

    assertTrue(result.isSuccessful());
}
 
Example #5
Source File: S3DataTransfererFactory.java    From oodt with Apache License 2.0 6 votes vote down vote up
@Override
public S3DataTransferer createDataTransfer() {
String bucketName = System.getProperty(BUCKET_NAME_PROPERTY);
String region = System.getProperty(REGION_PROPERTY);		
String accessKey = System.getProperty(ACCESS_KEY_PROPERTY);
  String secretKey = System.getProperty(SECRET_KEY_PROPERTY);
  boolean encrypt = Boolean.getBoolean(ENCRYPT_PROPERTY);

AmazonS3Client s3 = (AmazonS3Client) AmazonS3ClientBuilder.standard()
        .withRegion(region)
        .withCredentials(
                new AWSStaticCredentialsProvider(
                        new BasicAWSCredentials(accessKey, secretKey)))
        .build();

  return new S3DataTransferer(s3, bucketName, encrypt);
}
 
Example #6
Source File: KinesisClientManager.java    From presto with Apache License 2.0 6 votes vote down vote up
@Inject
public KinesisClientManager(KinesisConfig config)
{
    if (!isNullOrEmpty(config.getAccessKey()) && !isNullOrEmpty(config.getSecretKey())) {
        BasicAWSCredentials awsCredentials = new BasicAWSCredentials(config.getAccessKey(), config.getSecretKey());
        this.client = new AmazonKinesisClient(awsCredentials);
        this.amazonS3Client = new AmazonS3Client(awsCredentials);
        this.dynamoDbClient = new AmazonDynamoDBClient(awsCredentials);
    }
    else {
        DefaultAWSCredentialsProviderChain defaultChain = new DefaultAWSCredentialsProviderChain();
        this.client = new AmazonKinesisClient(defaultChain);
        this.amazonS3Client = new AmazonS3Client(defaultChain);
        this.dynamoDbClient = new AmazonDynamoDBClient(defaultChain);
    }

    this.client.setEndpoint("kinesis." + config.getAwsRegion() + ".amazonaws.com");
    this.dynamoDbClient.setEndpoint("dynamodb." + config.getAwsRegion() + ".amazonaws.com");
}
 
Example #7
Source File: S3RunnerTest.java    From micro-server with Apache License 2.0 6 votes vote down vote up
@Test
public void runAppAndBasicTest() {

    AmazonS3Client s3client = server.getSpringContext()
                                    .getBean(AmazonS3Client.class);
    assertThat(s3client != null, is(true));

    S3Configuration s3Configuration = server.getSpringContext()
                                            .getBean(S3Configuration.class);
    assertThat(s3Configuration.getAccessKey(), is(""));
    assertThat(s3Configuration.getSecretKey(), is(""));
    assertThat(s3Configuration.getSessionToken() == null, is(true));
    assertThat(s3Configuration.getRegion() == null, is(true));
    assertThat(s3Configuration.getUploadThreads(), is(5));
    assertThat(s3Configuration.getUploadThreadNamePrefix(), is("s3-transfer-manager-worker-"));

    S3Utils s3Utils = server.getSpringContext()
                            .getBean(S3Utils.class);
    assertThat(s3Utils != null, is(true));

    TransferManager tm = server.getSpringContext()
                               .getBean(TransferManager.class);
    assertThat(tm != null, is(true));

}
 
Example #8
Source File: S3UtilsTest.java    From micro-server with Apache License 2.0 6 votes vote down vote up
@Test
public void getAllSummaries() {
    answer = true;
    AmazonS3Client client = mock(AmazonS3Client.class);
    ObjectListing objectListing = mock(ObjectListing.class);
    when(client.listObjects(any(ListObjectsRequest.class))).thenReturn(objectListing);
    when(objectListing.isTruncated()).thenAnswer(__ -> {
        try {
            return answer;
        } finally {
            answer = false;
        }
    });
    S3Utils utils = new S3Utils(
                                client, null, null, false, null);
    utils.getAllSummaries(new ListObjectsRequest());
    verify(objectListing, times(2)).getObjectSummaries();
}
 
Example #9
Source File: PackageFetchExecutorTest.java    From gocd-s3-artifacts with Apache License 2.0 6 votes vote down vote up
@Test
public void shouldBeFailureIfFetchConfigNotValid() {
    Map<String, String> mockVariables = mockEnvironmentVariables.build();
    config = new Config(Maps.builder()
            .with(Constants.REPO, Maps.builder().with("value", "Wrong").build())
            .with(Constants.PACKAGE, Maps.builder().with("value", "TESTPUBLISHS3ARTIFACTS").build())
            .with(Constants.DESTINATION, Maps.builder().with("value", "artifacts").build())
            .build());
    AmazonS3Client mockClient = mockClient();
    S3ArtifactStore store = new S3ArtifactStore(mockClient, bucket);
    doReturn(store).when(fetchExecutor).getS3ArtifactStore(any(GoEnvironment.class), eq(bucket));

    TaskExecutionResult result = fetchExecutor.execute(config, mockContext(mockVariables));

    assertFalse(result.isSuccessful());
    assertThat(result.message(), is("Failure while downloading artifacts - Please check Repository name or Package name configuration. Also, ensure that the appropriate S3 material is configured for the pipeline."));
}
 
Example #10
Source File: S3StorageDriver.java    From dcos-cassandra-service with Apache License 2.0 6 votes vote down vote up
String getEndpoint(BackupRestoreContext ctx) throws URISyntaxException {
    URI uri = new URI(ctx.getExternalLocation());
    String scheme = uri.getScheme();
    if (scheme.equals(AmazonS3Client.S3_SERVICE_NAME)) {
        return Constants.S3_HOSTNAME;
    } else {
        String endpoint = scheme + "://" + uri.getHost();

        int port = uri.getPort();
        if (port != -1) {
            endpoint += ":" + Integer.toString(port);
        }

        return endpoint;
    }
}
 
Example #11
Source File: GeoIpOperationFactory.java    From bender with Apache License 2.0 6 votes vote down vote up
@Override
public void setConf(AbstractConfig config) {
  this.config = (GeoIpOperationConfig) config;
  AmazonS3Client client = this.s3Factory.newInstance();

  AmazonS3URI uri = new AmazonS3URI(this.config.getGeoLiteDb());
  GetObjectRequest req = new GetObjectRequest(uri.getBucket(), uri.getKey());
  S3Object obj = client.getObject(req);

  try {
    this.databaseReader =
        new DatabaseReader.Builder(obj.getObjectContent()).withCache(new CHMCache()).build();
  } catch (IOException e) {
    throw new ConfigurationException("Unable to read " + this.config.getGeoLiteDb(), e);
  }
}
 
Example #12
Source File: AbstractS3CacheBolt.java    From storm-crawler with Apache License 2.0 6 votes vote down vote up
/** Returns an S3 client given the configuration **/
public static AmazonS3Client getS3Client(Map conf) {
    AWSCredentialsProvider provider = new DefaultAWSCredentialsProviderChain();
    AWSCredentials credentials = provider.getCredentials();
    ClientConfiguration config = new ClientConfiguration();

    AmazonS3Client client = new AmazonS3Client(credentials, config);

    String regionName = ConfUtils.getString(conf, REGION);
    if (StringUtils.isNotBlank(regionName)) {
        client.setRegion(RegionUtils.getRegion(regionName));
    }

    String endpoint = ConfUtils.getString(conf, ENDPOINT);
    if (StringUtils.isNotBlank(endpoint)) {
        client.setEndpoint(endpoint);
    }
    return client;
}
 
Example #13
Source File: WarehouseExport.java    From usergrid with Apache License 2.0 6 votes vote down vote up
private void copyToS3( String fileName ) {

        String bucketName = ( String ) properties.get( BUCKET_PROPNAME );
        String accessId = ( String ) properties.get( ACCESS_ID_PROPNAME );
        String secretKey = ( String ) properties.get( SECRET_KEY_PROPNAME );

        Properties overrides = new Properties();
        overrides.setProperty( "s3" + ".identity", accessId );
        overrides.setProperty( "s3" + ".credential", secretKey );

        final Iterable<? extends Module> MODULES = ImmutableSet
                .of( new JavaUrlHttpCommandExecutorServiceModule(), new Log4JLoggingModule(),
                        new NettyPayloadModule() );

        AWSCredentials credentials = new BasicAWSCredentials(accessId, secretKey);
        ClientConfiguration clientConfig = new ClientConfiguration();
        clientConfig.setProtocol( Protocol.HTTP);

        AmazonS3Client s3Client = new AmazonS3Client(credentials, clientConfig);

        s3Client.createBucket( bucketName );
        File uploadFile = new File( fileName );
        PutObjectResult putObjectResult = s3Client.putObject( bucketName, uploadFile.getName(), uploadFile );
        logger.info("Uploaded file etag={}", putObjectResult.getETag());
    }
 
Example #14
Source File: S3StorageTest.java    From digdag with Apache License 2.0 6 votes vote down vote up
@Before
public void setUp()
        throws Exception
{
    assumeThat(TEST_S3_ENDPOINT, not(isEmptyOrNullString()));

    AWSCredentials credentials = new BasicAWSCredentials(TEST_S3_ACCESS_KEY_ID, TEST_S3_SECRET_ACCESS_KEY);
    AmazonS3Client s3 = new AmazonS3Client(credentials);
    s3.setEndpoint(TEST_S3_ENDPOINT);

    String bucket = UUID.randomUUID().toString();
    s3.createBucket(bucket);

    ConfigFactory cf = new ConfigFactory(objectMapper());
    Config config = cf.create()
        .set("endpoint", TEST_S3_ENDPOINT)
        .set("bucket", bucket)  // use unique bucket name
        .set("credentials.access-key-id", TEST_S3_ACCESS_KEY_ID)
        .set("credentials.secret-access-key", TEST_S3_SECRET_ACCESS_KEY)
        ;
    storage = new S3StorageFactory().newStorage(config);
}
 
Example #15
Source File: S3PacbotUtils.java    From pacbot with Apache License 2.0 6 votes vote down vote up
/**
 * @param awsS3Client
 * @param s3BucketName
 * @param accessType
 * @return
 */
public static Set<Permission> checkACLPermissions(AmazonS3Client awsS3Client, String s3BucketName, String accessType) {
	AccessControlList bucketAcl;
	Set<Permission> permissionList = new HashSet<>();
	try {
		bucketAcl = awsS3Client.getBucketAcl(s3BucketName);
		List<Grant> grants = bucketAcl.getGrantsAsList();
		if (!CollectionUtils.isNullOrEmpty(grants)) {
			permissionList = checkAnyGrantHasOpenToReadOrWriteAccess(grants, accessType);
		}
	} catch (AmazonS3Exception s3Exception) {
		logger.error("error : ", s3Exception);
		throw new RuleExecutionFailedExeption(s3Exception.getMessage());
	}
	return permissionList;
}
 
Example #16
Source File: KinesisClientManager.java    From presto-kinesis with Apache License 2.0 6 votes vote down vote up
@Inject
KinesisClientManager(KinesisConnectorConfig kinesisConnectorConfig)
{
    log.info("Creating new client for Consumer");
    if (nonEmpty(kinesisConnectorConfig.getAccessKey()) && nonEmpty(kinesisConnectorConfig.getSecretKey())) {
        this.kinesisAwsCredentials = new KinesisAwsCredentials(kinesisConnectorConfig.getAccessKey(), kinesisConnectorConfig.getSecretKey());
        this.client = new AmazonKinesisClient(this.kinesisAwsCredentials);
        this.amazonS3Client = new AmazonS3Client(this.kinesisAwsCredentials);
        this.dynamoDBClient = new AmazonDynamoDBClient(this.kinesisAwsCredentials);
    }
    else {
        this.kinesisAwsCredentials = null;
        DefaultAWSCredentialsProviderChain defaultChain = new DefaultAWSCredentialsProviderChain();
        this.client = new AmazonKinesisClient(defaultChain);
        this.amazonS3Client = new AmazonS3Client(defaultChain);
        this.dynamoDBClient = new AmazonDynamoDBClient(defaultChain);
    }

    this.client.setEndpoint("kinesis." + kinesisConnectorConfig.getAwsRegion() + ".amazonaws.com");
    this.dynamoDBClient.setEndpoint("dynamodb." + kinesisConnectorConfig.getAwsRegion() + ".amazonaws.com");
}
 
Example #17
Source File: HadoopFileUtils.java    From mrgeo with Apache License 2.0 6 votes vote down vote up
/**
 * Return an AmazonS3Client set up with the proper endpoint
 * defined in core-site.xml using a property like fs.s3a.endpoint.
 * This mimics code found in S3AFileSystem.
 *
 * @param conf
 * @param scheme
 * @return
 */
private static AmazonS3Client getS3Client(Configuration conf, String scheme)
{
  AmazonS3Client s3Client = new AmazonS3Client(new DefaultAWSCredentialsProviderChain());
  String endpointKey = "fs." + scheme.toLowerCase() + ".endpoint";
  String endPoint = conf.getTrimmed(endpointKey,"");
  log.debug("Using endpoint setting " + endpointKey);
  if (!endPoint.isEmpty()) {
    try {
      log.debug("Setting S3 client endpoint to " + endPoint);
      s3Client.setEndpoint(endPoint);
    } catch (IllegalArgumentException e) {
      String msg = "Incorrect endpoint: "  + e.getMessage();
      log.error(msg);
      throw new IllegalArgumentException(msg, e);
    }
  }
  return s3Client;
}
 
Example #18
Source File: PutS3Object.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
protected synchronized MultipartState getLocalStateIfInS3(final AmazonS3Client s3, final String bucket,
                                                    final String s3ObjectKey) throws IOException {
    MultipartState currState = getLocalState(s3ObjectKey);
    if (currState == null) {
        return null;
    }
    if (localUploadExistsInS3(s3, bucket, currState)) {
        getLogger().info("Local state for {} loaded with uploadId {} and {} partETags",
                new Object[]{s3ObjectKey, currState.getUploadId(), currState.getPartETags().size()});
        return currState;
    } else {
        getLogger().info("Local state for {} with uploadId {} does not exist in S3, deleting local state",
                new Object[]{s3ObjectKey, currState.getUploadId()});
        persistLocalState(s3ObjectKey, null);
        return null;
    }
}
 
Example #19
Source File: S3DataOutputStream.java    From stratosphere with Apache License 2.0 5 votes vote down vote up
S3DataOutputStream(final AmazonS3Client s3Client, final String bucket, final String object, final byte[] buf,
		final boolean useRRS) {

	this.s3Client = s3Client;
	this.bucket = bucket;
	this.object = object;
	this.buf = buf;
	this.useRRS = useRRS;
}
 
Example #20
Source File: S3PersistWriter.java    From streams with Apache License 2.0 5 votes vote down vote up
@Override
public void prepare(Object configurationObject) {

  lineWriterUtil = LineReadWriteUtil.getInstance(s3WriterConfiguration);

  // Connect to S3
  synchronized (this) {

    try {
      // if the user has chosen to not set the object mapper, then set a default object mapper for them.
      if (this.objectMapper == null) {
        this.objectMapper = StreamsJacksonMapper.getInstance();
      }

      // Create the credentials Object
      if (this.amazonS3Client == null) {
        AWSCredentials credentials = new BasicAWSCredentials(s3WriterConfiguration.getKey(), s3WriterConfiguration.getSecretKey());

        ClientConfiguration clientConfig = new ClientConfiguration();
        clientConfig.setProtocol(Protocol.valueOf(s3WriterConfiguration.getProtocol().toString()));

        // We do not want path style access
        S3ClientOptions clientOptions = new S3ClientOptions();
        clientOptions.setPathStyleAccess(false);

        this.amazonS3Client = new AmazonS3Client(credentials, clientConfig);
        if (StringUtils.isNotEmpty(s3WriterConfiguration.getRegion())) {
          this.amazonS3Client.setRegion(Region.getRegion(Regions.fromName(s3WriterConfiguration.getRegion())));
        }
        this.amazonS3Client.setS3ClientOptions(clientOptions);
      }
    } catch (Exception ex) {
      LOGGER.error("Exception while preparing the S3 client: {}", ex);
    }

    Preconditions.checkArgument(this.amazonS3Client != null);
  }
}
 
Example #21
Source File: AmazonS3Template.java    From spring-boot-starter-amazon-s3 with Apache License 2.0 5 votes vote down vote up
/**
 * Gets an Amazon S3 client from basic session credentials
 *
 * @return an authenticated Amazon S3 client
 */
public AmazonS3 getAmazonS3Client() {
    BasicSessionCredentials basicSessionCredentials = getBasicSessionCredentials();

    // Create a new S3 client using the basic session credentials of the service instance
    return new AmazonS3Client(basicSessionCredentials);
}
 
Example #22
Source File: AWSSdkClient.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
/***
 * Initialize the AWS SDK Client
 *
 * @param awsClusterSecurityManager The {@link AWSClusterSecurityManager} to fetch AWS credentials
 * @param region The Amazon AWS {@link Region}
 */
public AWSSdkClient(final AWSClusterSecurityManager awsClusterSecurityManager, final Region region) {
  this.amazonEC2Supplier = Suppliers.memoize(new Supplier<AmazonEC2>() {
    @Override
    public AmazonEC2 get() {
      AmazonEC2Client amazonEC2 = new AmazonEC2Client(awsClusterSecurityManager.getCredentialsProvider());
      amazonEC2.setRegion(region);
      return amazonEC2;
    }
  });
  this.amazonS3Supplier = Suppliers.memoize(new Supplier<AmazonS3>() {
    @Override
    public AmazonS3 get() {
      AmazonS3Client amazonS3 = new AmazonS3Client(awsClusterSecurityManager.getCredentialsProvider());
      amazonS3.setRegion(region);
      return amazonS3;
    }
  });
  this.amazonAutoScalingSupplier = Suppliers.memoize(new Supplier<AmazonAutoScaling>() {
    @Override
    public AmazonAutoScaling get() {
      AmazonAutoScalingClient amazonAutoScaling =
              new AmazonAutoScalingClient(awsClusterSecurityManager.getCredentialsProvider());
      amazonAutoScaling.setRegion(region);
      return amazonAutoScaling;
    }
  });
}
 
Example #23
Source File: RepositoryS3Backup.java    From document-management-system with GNU General Public License v2.0 5 votes vote down vote up
/**
 * Performs a recursive repository content export with metadata
 */
public static ImpExpStats backup(String token, String fldPath, String bucket, boolean metadata, Writer out,
                                 InfoDecorator deco) throws PathNotFoundException, AccessDeniedException, RepositoryException,
		FileNotFoundException, ParseException, NoSuchGroupException, IOException, DatabaseException,
		GeneralException {
	log.debug("backup({}, {}, {}, {}, {}, {})", new Object[]{token, fldPath, bucket, metadata, out, deco});
	ImpExpStats stats = null;

	if (running) {
		throw new GeneralException("Backup in progress");
	} else {
		running = true;

		try {
			if (!Config.AMAZON_ACCESS_KEY.equals("") && !Config.AMAZON_SECRET_KEY.equals("")) {
				AmazonS3 s3 = new AmazonS3Client(new BasicAWSCredentials(Config.AMAZON_ACCESS_KEY,
						Config.AMAZON_SECRET_KEY));

				if (!s3.doesBucketExist(bucket)) {
					s3.createBucket(bucket, Region.EU_Ireland);
				}

				stats = backupHelper(token, fldPath, s3, bucket, metadata, out, deco);
				log.info("Backup finished!");
			} else {
				throw new GeneralException("Missing Amazon Web Service keys");
			}
		} finally {
			running = false;
		}
	}

	log.debug("exportDocuments: {}", stats);
	return stats;
}
 
Example #24
Source File: AmazonS3ProxyFactoryTest.java    From spring-cloud-aws with Apache License 2.0 5 votes vote down vote up
@Test
void verifyPolymorphicHandling() {

	AmazonS3 amazonS3 = mock(AmazonS3.class);
	AmazonS3 proxy1 = AmazonS3ProxyFactory.createProxy(amazonS3);

	assertThat(AmazonS3.class.isAssignableFrom(proxy1.getClass())).isTrue();
	assertThat(AmazonS3Client.class.isAssignableFrom(proxy1.getClass())).isFalse();

	AmazonS3 amazonS3Client = AmazonS3ClientBuilder.standard()
			.withRegion(Regions.DEFAULT_REGION).build();
	AmazonS3 proxy2 = AmazonS3ProxyFactory.createProxy(amazonS3Client);

	assertThat(AmazonS3.class.isAssignableFrom(proxy2.getClass())).isTrue();
}
 
Example #25
Source File: SimpleStorageResource.java    From spring-cloud-aws with Apache License 2.0 5 votes vote down vote up
@Override
public URL getURL() throws IOException {
	Region region = this.amazonS3.getRegion().toAWSRegion();
	String encodedObjectName = URLEncoder.encode(this.objectName,
			StandardCharsets.UTF_8.toString());
	return new URL("https", region.getServiceEndpoint(AmazonS3Client.S3_SERVICE_NAME),
			"/" + this.bucketName + "/" + encodedObjectName);
}
 
Example #26
Source File: TowtruckApp.java    From data-highway with Apache License 2.0 5 votes vote down vote up
@Bean
AmazonS3 s3(
    @Value("${s3.endpoint.url}") String s3EndpointUrl,
    @Value("${s3.endpoint.signingRegion}") String signingRegion) {
  return AmazonS3Client
      .builder()
      .withCredentials(new DefaultAWSCredentialsProviderChain())
      .withEndpointConfiguration(new EndpointConfiguration(s3EndpointUrl, signingRegion))
      .build();
}
 
Example #27
Source File: S3Emitter.java    From amazon-kinesis-connectors with Apache License 2.0 5 votes vote down vote up
public S3Emitter(KinesisConnectorConfiguration configuration) {
    s3Bucket = configuration.S3_BUCKET;
    s3Endpoint = configuration.S3_ENDPOINT;
    s3client = new AmazonS3Client(configuration.AWS_CREDENTIALS_PROVIDER);
    if (s3Endpoint != null) {
        s3client.setEndpoint(s3Endpoint);
    }
}
 
Example #28
Source File: S3ClientImpl.java    From exhibitor with Apache License 2.0 5 votes vote down vote up
@Override
public void completeMultipartUpload(CompleteMultipartUploadRequest request) throws Exception
{
    RefCountedClient holder = client.get();
    AmazonS3Client amazonS3Client = holder.useClient();
    try
    {
        amazonS3Client.completeMultipartUpload(request);
    }
    finally
    {
        holder.release();
    }
}
 
Example #29
Source File: MCAWS.java    From aws-big-data-blog with Apache License 2.0 5 votes vote down vote up
public static void listBucketItems(String bucketName) {
System.out.println( "Connecting to AWS" );
System.out.println( "Listing files in bucket "+ bucketName );
AmazonS3 s3 = new AmazonS3Client();
Region usWest2 = Region.getRegion(Regions.US_WEST_2);
s3.setRegion(usWest2);
System.out.println("Listing buckets");
ObjectListing objectListing = s3.listObjects(new ListObjectsRequest()
	.withBucketName(bucketName));
for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) {
	System.out.println(" - " + objectSummary.getKey() + "  " +
	"(size = " + objectSummary.getSize() + ")");
	}
System.out.println();
}
 
Example #30
Source File: S3SwapStore.java    From writelatex-git-bridge with MIT License 5 votes vote down vote up
S3SwapStore(
        String accessKey,
        String secret,
        String bucketName
) {
    s3 = new AmazonS3Client(new BasicAWSCredentials(accessKey, secret));
    this.bucketName = bucketName;
}