Java Code Examples for com.amazonaws.services.s3.AmazonS3Client

The following are top voted examples for showing how to use com.amazonaws.services.s3.AmazonS3Client. These examples are extracted from open source projects. You can vote up the examples you like and your votes will be used in our system to generate more good examples.
Example 1
Project: alexa-morser-coder-skill   File: AudioUtils.java   View source code 9 votes vote down vote up
public static void uploadToS3() {
    // upload to s3 bucket
    AWSCredentials awsCredentials = SkillConfig.getAWSCredentials();
    AmazonS3Client s3Client = awsCredentials != null ? new AmazonS3Client(awsCredentials) : new AmazonS3Client();

    File folder = new File("c:/temp/morse/" + DOT + "/mp3/");
    File[] listOfFiles = folder.listFiles();

    for (File file : listOfFiles) {
        if (file.isFile()) {
            if (!s3Client.doesObjectExist("morseskill", DOT + "/" + file.getName())) {
                PutObjectRequest s3Put = new PutObjectRequest("morseskill", DOT + "/" + file.getName(), file).withCannedAcl(CannedAccessControlList.PublicRead);
                s3Client.putObject(s3Put);
                System.out.println("Upload complete: " + file.getName());
            }
            else {
                System.out.println("Skip as " + file.getName() + " already exists.");
            }
        }
    }
}
 
Example 2
Project: rxjava2-aws   File: SqsMessageTest.java   View source code 7 votes vote down vote up
@Test
public void shutdownOfSqsAndS3FactoryCreatedClientsOccursWhenS3DeleteObjectFails() {
    AmazonSQSClient sqs = Mockito.mock(AmazonSQSClient.class);
    AmazonS3Client s3 = Mockito.mock(AmazonS3Client.class);
    String s3Id = "123";
    SqsMessage m = new SqsMessage(RECEIPT_HANDLE, new byte[] {}, 1000, Optional.of(s3Id),
            new SqsMessage.Service(Optional.of(() -> s3), () -> sqs, Optional.of(s3), sqs,
                    QUEUE, Optional.of(BUCKET)));
    Mockito.when(sqs.deleteMessage(QUEUE, RECEIPT_HANDLE))
            .thenReturn(new DeleteMessageResult());
    Mockito.doThrow(new RuntimeException()).when(s3).deleteObject(BUCKET, s3Id);
    try {
        m.deleteMessage(Client.FROM_FACTORY);
        Assert.fail();
    } catch (RuntimeException e) {
        // do nothing
    }
    InOrder inorder = Mockito.inOrder(sqs, s3);
    inorder.verify(s3, Mockito.times(1)).deleteObject(BUCKET, s3Id);
    inorder.verify(s3, Mockito.times(1)).shutdown();
    inorder.verify(sqs, Mockito.times(1)).shutdown();
    Mockito.verifyNoMoreInteractions(sqs, s3);
}
 
Example 3
Project: dataset-lib   File: S3Provider.java   View source code 6 votes vote down vote up
public void remove(Object[] params) {
    AmazonS3 s3client = new AmazonS3Client(new ProfileCredentialsProvider());
    try {
        s3client.deleteObject(new DeleteObjectRequest(bucketName, params[0].toString()));
    } catch (AmazonServiceException ase) {
        System.out.println("Caught an AmazonServiceException, which " +
            "means your request made it " +
            "to Amazon S3, but was rejected with an error response" +
            " for some reason.");
        System.out.println("Error Message:" + ase.getMessage());
        System.out.println("HTTP Status Code:" + ase.getStatusCode());
        System.out.println("AWS Error Code:" + ase.getErrorCode());
        System.out.println("Error Type:" + ase.getErrorType());
        System.out.println("Request ID:" + ase.getRequestId());
    } catch (AmazonClientException ace) {
        System.out.println("Caught an AmazonClientException, which " +
            "means the client encountered " +
            "an internal error while trying to " +
            "communicate with S3, " +
            "such as not being able to access the network.");
        System.out.println("Error Message: " + ace.getMessage());
    }

}
 
Example 4
Project: s3-proxy-chunk-upload   File: S3Module.java   View source code 6 votes vote down vote up
@Override
public Seq<Binding<?>> bindings(Environment environment, Configuration configuration) {
    GlobalParams.AWS_ACCESS_KEY = Scala.orNull(configuration.getString("AWS_ACCESS_KEY", scala.Option.empty()));
    GlobalParams.AWS_SECRET_KEY = Scala.orNull(configuration.getString("AWS_SECRET_KEY", scala.Option.empty()));
    GlobalParams.AWS_S3_BUCKET = Scala.orNull(configuration.getString("AWS_S3_BUCKET", scala.Option.empty()));

    String accessKey = GlobalParams.AWS_ACCESS_KEY;
    String secretKey = GlobalParams.AWS_SECRET_KEY;
    s3Bucket = GlobalParams.AWS_S3_BUCKET;

    if ((accessKey != null) && (secretKey != null)) {
        awsCredentials = new BasicAWSCredentials(accessKey, secretKey);
        amazonS3 = new AmazonS3Client(awsCredentials);

        Logger.info("Using S3 Bucket: " + s3Bucket);
    }
    return seq(
            bind(S3Plugin.class).to(S3PluginImpl.class)
    );
}
 
Example 5
Project: hadoop   File: TestS3AConfiguration.java   View source code 6 votes vote down vote up
/**
 * Test if custom endpoint is picked up.
 * <p/>
 * The test expects TEST_ENDPOINT to be defined in the Configuration
 * describing the endpoint of the bucket to which TEST_FS_S3A_NAME points
 * (f.i. "s3-eu-west-1.amazonaws.com" if the bucket is located in Ireland).
 * Evidently, the bucket has to be hosted in the region denoted by the
 * endpoint for the test to succeed.
 * <p/>
 * More info and the list of endpoint identifiers:
 * http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region
 *
 * @throws Exception
 */
@Test
public void TestEndpoint() throws Exception {
  conf = new Configuration();
  String endpoint = conf.getTrimmed(TEST_ENDPOINT, "");
  if (endpoint.isEmpty()) {
    LOG.warn("Custom endpoint test skipped as " + TEST_ENDPOINT + "config " +
        "setting was not detected");
  } else {
    conf.set(Constants.ENDPOINT, endpoint);
    fs = S3ATestUtils.createTestFileSystem(conf);
    AmazonS3Client s3 = fs.getAmazonS3Client();
    String endPointRegion = "";
    // Differentiate handling of "s3-" and "s3." based endpoint identifiers
    String[] endpointParts = StringUtils.split(endpoint, '.');
    if (endpointParts.length == 3) {
      endPointRegion = endpointParts[0].substring(3);
    } else if (endpointParts.length == 4) {
      endPointRegion = endpointParts[1];
    } else {
      fail("Unexpected endpoint");
    }
    assertEquals("Endpoint config setting and bucket location differ: ",
        endPointRegion, s3.getBucketLocation(fs.getUri().getHost()));
  }
}
 
Example 6
Project: full-javaee-app   File: S3Object.java   View source code 6 votes vote down vote up
public static String webHookDump(InputStream stream, String school, String extension) {
    if (stream != null) {
        extension = extension == null || extension.isEmpty() ? ".xml" : extension.contains(".") ? extension : "." + extension;
        String fileName = "webhooks/" + school + "/" + school + "_" + Clock.getCurrentDateDashes() + "_" + Clock.getCurrentTime() + extension;
        AmazonS3 s3 = new AmazonS3Client();
        Region region = Region.getRegion(Regions.US_WEST_2);
        s3.setRegion(region);
        try {
            File file = CustomUtilities.inputStreamToFile(stream);
            s3.putObject(new PutObjectRequest(name, fileName, file));
            return CustomUtilities.fileToString(file);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    return "";
}
 
Example 7
Project: dremio-oss   File: S3FileSystem.java   View source code 6 votes vote down vote up
@Override
public AmazonS3Client load(S3ClientKey clientKey) throws Exception {
  logger.debug("Opening S3 client connection for {}", clientKey);
  ClientConfiguration clientConf = new ClientConfiguration();
  clientConf.setProtocol(clientKey.isSecure ? Protocol.HTTPS : Protocol.HTTP);
  // Proxy settings (if configured)
  clientConf.setProxyHost(clientKey.s3Config.get(Constants.PROXY_HOST));
  if (clientKey.s3Config.get(Constants.PROXY_PORT) != null) {
    clientConf.setProxyPort(Integer.valueOf(clientKey.s3Config.get(Constants.PROXY_PORT)));
  }
  clientConf.setProxyDomain(clientKey.s3Config.get(Constants.PROXY_DOMAIN));
  clientConf.setProxyUsername(clientKey.s3Config.get(Constants.PROXY_USERNAME));
  clientConf.setProxyPassword(clientKey.s3Config.get(Constants.PROXY_PASSWORD));
  clientConf.setProxyWorkstation(clientKey.s3Config.get(Constants.PROXY_WORKSTATION));

  if (clientKey.accessKey == null){
    return new AmazonS3Client(new AnonymousAWSCredentialsProvider(), clientConf);
  } else {
    return new AmazonS3Client(new BasicAWSCredentials(clientKey.accessKey, clientKey.secretKey), clientConf);
  }
}
 
Example 8
Project: generator-jhipster-storage   File: _StorageConfiguration.java   View source code 6 votes vote down vote up
/**
 * S3 储存客户端
 *
 * @return 客户端
 */
@Bean
@ConditionalOnProperty(value = "bigbug.storage.s3.enable", havingValue = "true")
AmazonS3Client amazonS3Client() {
    ClientConfiguration clientConfig = new ClientConfiguration();
    clientConfig.setProtocol(Protocol.HTTP);

    BasicAWSCredentials basicAWSCredentials =
            new BasicAWSCredentials(
                    storageProperties.getStorage().getS3().getAccessKey(),
                    storageProperties.getStorage().getS3().getSecretKey());

    return (AmazonS3Client) AmazonS3ClientBuilder.standard()
            .withClientConfiguration(clientConfig)
            .withEndpointConfiguration(
                    new AwsClientBuilder.EndpointConfiguration(
                            storageProperties.getStorage().getS3().getEndpoint(), Regions.DEFAULT_REGION.getName()))
            .withCredentials(new AWSStaticCredentialsProvider(basicAWSCredentials))
            .build();
}
 
Example 9
Project: nifi-minifi   File: S3ConfigurationCache.java   View source code 6 votes vote down vote up
/**
 * Creates a new S3 configuration cache.
 * @param bucket The S3 bucket.
 * @param prefix The S3 object prefix.
 * @param pathPattern The path pattern.
 * @param accessKey The (optional) S3 access key.
 * @param secretKey The (optional) S3 secret key.
 * @param region The AWS region (e.g. us-east-1).
 * @throws IOException Thrown if the configuration cannot be read.
 */
public S3ConfigurationCache(String bucket, String prefix, String pathPattern,
    String accessKey, String secretKey, String region) throws IOException {

  this.bucket = bucket;
  this.prefix = prefix;
  this.pathPattern = pathPattern;

  if (!StringUtils.isEmpty(accessKey)) {

    s3 = AmazonS3Client.builder()
      .withCredentials(new AWSStaticCredentialsProvider(
         new BasicAWSCredentials(accessKey, secretKey)))
        .withRegion(Regions.fromName(region))
        .build();

  } else {

    s3 = AmazonS3Client.builder()
        .withRegion(Regions.fromName(region))
        .build();
  }

}
 
Example 10
Project: stallion-core   File: S3StorageService.java   View source code 6 votes vote down vote up
public S3StorageService() {
    CloudStorageSettings settings = Settings.instance().getCloudStorage();
    if (settings == null) {
        throw new ConfigException("You are missing the section [cloudStorage]\naccessToken=...\nsecret=... in your stallion.toml");
    }
    if (empty(settings.getAccessToken())) {
        throw new ConfigException("You are missing the setting accessKey in the stallion.toml section [cloudStorage]");
    }
    if (empty(settings.getSecret())) {
        throw new ConfigException("You are missing the setting secret in the stallion.toml section [cloudStorage]");
    }
    accessToken = settings.getAccessToken();
    secret = settings.getSecret();
    AWSCredentials credentials = new BasicAWSCredentials(accessToken, secret);
    client =  new AmazonS3Client(credentials);
}
 
Example 11
Project: exhibitor   File: S3ClientImpl.java   View source code 6 votes vote down vote up
@Override
public PutObjectResult putObject(PutObjectRequest request) throws Exception
{
    RefCountedClient holder = client.get();
    AmazonS3Client amazonS3Client = holder.useClient();
    try
    {
        return amazonS3Client.putObject(request);
    }
    finally
    {
        holder.release();
    }
}
 
Example 12
Project: aliyun-oss-hadoop-fs   File: TestS3AConfiguration.java   View source code 6 votes vote down vote up
/**
 * Test if custom endpoint is picked up.
 * <p/>
 * The test expects TEST_ENDPOINT to be defined in the Configuration
 * describing the endpoint of the bucket to which TEST_FS_S3A_NAME points
 * (f.i. "s3-eu-west-1.amazonaws.com" if the bucket is located in Ireland).
 * Evidently, the bucket has to be hosted in the region denoted by the
 * endpoint for the test to succeed.
 * <p/>
 * More info and the list of endpoint identifiers:
 * http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region
 *
 * @throws Exception
 */
@Test
public void TestEndpoint() throws Exception {
  conf = new Configuration();
  String endpoint = conf.getTrimmed(TEST_ENDPOINT, "");
  if (endpoint.isEmpty()) {
    LOG.warn("Custom endpoint test skipped as " + TEST_ENDPOINT + "config " +
        "setting was not detected");
  } else {
    conf.set(Constants.ENDPOINT, endpoint);
    fs = S3ATestUtils.createTestFileSystem(conf);
    AmazonS3Client s3 = fs.getAmazonS3Client();
    String endPointRegion = "";
    // Differentiate handling of "s3-" and "s3." based endpoint identifiers
    String[] endpointParts = StringUtils.split(endpoint, '.');
    if (endpointParts.length == 3) {
      endPointRegion = endpointParts[0].substring(3);
    } else if (endpointParts.length == 4) {
      endPointRegion = endpointParts[1];
    } else {
      fail("Unexpected endpoint");
    }
    assertEquals("Endpoint config setting and bucket location differ: ",
        endPointRegion, s3.getBucketLocation(fs.getUri().getHost()));
  }
}
 
Example 13
Project: MCSFS   File: S3Store.java   View source code 6 votes vote down vote up
@Override
public void remove(String accessKey) throws Exception {
	LogUtils.debug(LOG_TAG, "Deleting file with access key: " + accessKey);
	AmazonS3 s3Client = new AmazonS3Client(new DefaultAWSCredentialsProviderChain());
	DeleteObjectsRequest multiObjectDeleteRequest = new DeleteObjectsRequest(bucketName);
	
	List<KeyVersion> keys = new ArrayList<KeyVersion>();
	keys.add(new KeyVersion(accessKey));
	keys.add(new KeyVersion(accessKey + "_key"));
	        
	multiObjectDeleteRequest.setKeys(keys);

	s3Client.deleteObjects(multiObjectDeleteRequest);

	LogUtils.debug(LOG_TAG, "Deleted file with access key: " + accessKey);
}
 
Example 14
Project: galaxy-fds-migration-tool   File: S3Source.java   View source code 6 votes vote down vote up
@Override
public void init(Configuration conf, String keyPrefix) {
  bucketName = conf.get(keyPrefix + S3_BUCKET_NAME);
  String endpoint = conf.get(keyPrefix + S3_ENDPOINT_NAME);
  String key = conf.get(keyPrefix + S3_ACCESS_KEY);
  String secret = conf.get(keyPrefix + S3_ACCESS_SECRET);

  System.setProperty(SDKGlobalConfiguration.ACCESS_KEY_SYSTEM_PROPERTY, key);
  System.setProperty(SDKGlobalConfiguration.SECRET_KEY_SYSTEM_PROPERTY, secret);
  AWSCredentialsProvider provider = new SystemPropertiesCredentialsProvider();
  client = new AmazonS3Client(provider);
  client.setEndpoint(endpoint);
  override = conf.getBoolean(keyPrefix + "override", true);
  acls = new AccessControlList();
  acls.grantPermission(GroupGrantee.AllUsers, Permission.FullControl);
  acls.grantPermission(GroupGrantee.AllUsers, Permission.Read);
  acls.grantPermission(GroupGrantee.AllUsers, Permission.Write);
}
 
Example 15
Project: ismartonline   File: AwsFileManager.java   View source code 6 votes vote down vote up
@Override
@SuppressWarnings("unused")
public List<AwsFileMiniModel> list(String prefix) {

	AmazonS3 s3client = new AmazonS3Client(new ProfileCredentialsProvider());
	List<AwsFileMiniModel> files = new ArrayList();

	ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(bucketName).withPrefix(prefix);
	ObjectListing objectListing;

	do {
		objectListing = s3client.listObjects(listObjectsRequest);

		for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) {
			System.out.println(" - " + objectSummary.getKey() + "  " + "(size = " + objectSummary.getSize() + ")"
					+ " (date = " + objectSummary.getLastModified() + ")");
			files.add(new AwsFileMiniModel(objectSummary.getKey(), objectSummary.getLastModified()));
		}
		listObjectsRequest.setMarker(objectListing.getNextMarker());
	} while (objectListing.isTruncated());

	return files;

}
 
Example 16
Project: kafka-connect-redshift   File: RedshiftSinkTask.java   View source code 6 votes vote down vote up
@Override
public void start(Map<String, String> properties) {
  configProperties = properties;
  config = new RedshiftSinkTaskConfig(configProperties);

  tempDir = new File(config.getTempOutputDir());
  tempDir.mkdirs();
  tempFiles = new HashMap<>();
  writers = new HashMap<>();

  List<String> fields = config.getFields();
  if (fields.size() == 1 && fields.get(0).equals("*")) {
    fields.clear();
  }
  serializer = new DefaultCopySerializer(fields);
  s3 = new AmazonS3Client(config.getAwsCredentials());
}
 
Example 17
Project: alexa-skills-kit-tellask-java   File: S3UtteranceReaderTest.java   View source code 6 votes vote down vote up
private AmazonS3Client givenS3Mock() {
    return Mockito.mock(AmazonS3Client.class, new Answer() {
        @Override
        public Object answer(final InvocationOnMock invocation) throws Throwable {
            if (invocation.getMethod().getName().equals("getObject")) {
                // second parameter is the path to the file
                final String resourcePath = invocation.getArguments()[1].toString();
                // extract leading path
                final String leadingPath = resourcePath.substring(0, resourcePath.indexOf(LOCALE));
                // extract trailing path
                final String trailingPath = resourcePath.substring(resourcePath.indexOf(LOCALE) + LOCALE.length() + 1);
                // we reroute this request to the resourceutterancereader
                final ResourceUtteranceReader resourceReader = new ResourceUtteranceReader(leadingPath);
                resourceReader.setResourceLocation(trailingPath);
                final S3Object s3Object = new S3Object();
                s3Object.setObjectContent(resourceReader.read(LOCALE));
                return s3Object;
            }
            return invocation.callRealMethod();
        }
    });
}
 
Example 18
Project: alexa-skills-kit-tellask-java   File: S3UtteranceReaderTest.java   View source code 6 votes vote down vote up
@Test
public void testConstructors() throws Exception {
    final S3UtteranceReader reader = new S3UtteranceReader("bucketName");
    Assert.assertEquals("bucketName", reader.getBucketName());

    final S3UtteranceReader reader2 = new S3UtteranceReader("bucketName", "/leading/path/");
    Assert.assertEquals("bucketName", reader2.getBucketName());
    Assert.assertEquals("leading/path/", reader2.getLeadingPath());

    final S3UtteranceReader reader3 = new S3UtteranceReader("bucketName", "leading/path", "trailing/test.yml");
    Assert.assertEquals("bucketName", reader3.getBucketName());
    Assert.assertEquals("leading/path/", reader3.getLeadingPath());
    Assert.assertEquals("/trailing/test.yml", reader3.getResourceLocation());

    exception.expect(IllegalArgumentException.class);
    new S3UtteranceReader(null, "bucketName", "leadingPath");

    exception.expect(IllegalArgumentException.class);
    new S3UtteranceReader("");

    exception.expect(IllegalArgumentException.class);
    new S3UtteranceReader("", "leadingPath");

    exception.expect(IllegalArgumentException.class);
    new S3UtteranceReader(new AmazonS3Client(), "", "leadingPath", S3UtteranceReader.DEFAULT_RESOURCE_LOCATION);
}
 
Example 19
Project: slim-map-reduce   File: AWSManager.java   View source code 6 votes vote down vote up
public AmazonS3 configureS3(){
	/*
	 * Create your credentials file at ~/.aws/credentials (C:\Users\USER_NAME\.aws\credentials for Windows users)
	 * and save the following lines after replacing the underlined values with your own.
	 *
	 * [default]
	 * aws_access_key_id = YOUR_ACCESS_KEY_ID
	 * aws_secret_access_key = YOUR_SECRET_ACCESS_KEY
	 */
	AWSCredentials credentials = null;
	try {
		credentials = new ProfileCredentialsProvider().getCredentials();
	} catch (Exception e) {
		throw new AmazonClientException(
				"Cannot load the credentials from the credential profiles file. " +
						"Please make sure that your credentials file is at the correct " +
						"location (~/.aws/credentials), and is in valid format.",
						e);
	}
	AmazonS3 s3 = new AmazonS3Client(credentials);
	Region usEast1 = Region.getRegion(Regions.US_EAST_1);
	s3.setRegion(usEast1);
	return s3;
}
 
Example 20
Project: bender   File: GeoIpOperationFactory.java   View source code 6 votes vote down vote up
@Override
public void setConf(AbstractConfig config) {
  this.config = (GeoIpOperationConfig) config;
  AmazonS3Client client = this.s3Factory.newInstance();

  AmazonS3URI uri = new AmazonS3URI(this.config.getGeoLiteDb());
  GetObjectRequest req = new GetObjectRequest(uri.getBucket(), uri.getKey());
  S3Object obj = client.getObject(req);

  try {
    this.databaseReader =
        new DatabaseReader.Builder(obj.getObjectContent()).withCache(new CHMCache()).build();
  } catch (IOException e) {
    throw new ConfigurationException("Unable to read " + this.config.getGeoLiteDb(), e);
  }
}
 
Example 21
Project: kafka-connect-s3   File: S3SinkTask.java   View source code 6 votes vote down vote up
@Override
public void start(Map<String, String> props) throws ConnectException {
  readConfig(props);

  // Use default credentials provider that looks in Env + Java properties + profile + instance role
  AmazonS3 s3Client = new AmazonS3Client();

  // If worker config sets explicit endpoint override (e.g. for testing) use that
  if (overrideS3Endpoint != "") {
    s3Client.setEndpoint(overrideS3Endpoint);
  }

  if (s3PathStyle) {
    s3Client.setS3ClientOptions(new S3ClientOptions().withPathStyleAccess(true));
  }

  s3 = new S3Writer(bucket, prefix, s3Client);

  // Recover initial assignments
  Set<TopicPartition> assignment = context.assignment();
  recoverAssignment(assignment);
}
 
Example 22
Project: bender   File: BaseHandlerS3Test.java   View source code 6 votes vote down vote up
@Before
public void setup() throws UnsupportedEncodingException, IOException {
  /*
   * Patch the handler to use this test's factory which produces a mock client.
   */
  S3MockClientFactory f;
  try {
    f = new S3MockClientFactory(tmpFolder);
  } catch (Exception e) {
    throw new RuntimeException("unable to start s3proxy", e);
  }

  AmazonS3Client client = f.newInstance();
  client.createBucket(S3_BUCKET);
  this.clientFactory = f;

  /*
   * Upload config file
   */
  String payload = IOUtils.toString(new InputStreamReader(
      this.getClass().getResourceAsStream("/config/handler_config.json"), "UTF-8"));
  client.putObject(S3_BUCKET, "bender/config.json", payload);

  /*
   * Export config file as env var
   */
  envVars.set("BENDER_CONFIG", "s3://" + S3_BUCKET + "/bender/config.json");
}
 
Example 23
Project: PlatePicks-Android   File: S3TransferHelper.java   View source code 6 votes vote down vote up
private S3TransferHelper(final Context context,
                        final AmazonS3Client s3Client,
                        final String bucket,
                        final String s3DirPrefix,
                        final String localTransferPath,
                        final LocalContentCache cache) {
    this.bucket = bucket;
    this.s3DirPrefix = s3DirPrefix == null ? "" : s3DirPrefix;
    this.localContentCache = cache;
    if (localTransferPath.endsWith(DIR_DELIMITER)) {
        this.localTransferPath = localTransferPath;
    } else {
        this.localTransferPath = localTransferPath + DIR_DELIMITER;
    }
    transferUtility = new TransferUtility(s3Client, context);
    transfersInProgress = new HashMap<>();
    managedFilesToTransfers = new HashMap<>();
    progressListeners = new HashMap<>();
}
 
Example 24
Project: rxjava-aws   File: SqsTest.java   View source code 6 votes vote down vote up
@SuppressWarnings("unchecked")
@Test
public void ensureIfSendToSqsFailsThatS3ObjectIsDeleted() {
	AmazonSQSClient sqs = Mockito.mock(AmazonSQSClient.class);
	AmazonS3Client s3 = Mockito.mock(AmazonS3Client.class);
	Mockito.when(sqs.sendMessage(Mockito.anyString(), Mockito.anyString())).thenThrow(RuntimeException.class);
	try {
		Sqs.sendToQueueUsingS3(sqs, "queueUrl", s3, "bucket", new byte[] { 1, 2 });
	} catch (RuntimeException e) {
		assertTrue(e instanceof CompositeException);
		InOrder inorder = Mockito.inOrder(sqs, s3);
		inorder.verify(s3, Mockito.times(1)).putObject(Mockito.anyString(), Mockito.anyString(), Mockito.any(),
				Mockito.any());
		inorder.verify(sqs, Mockito.times(1)).sendMessage(Mockito.anyString(), Mockito.anyString());
		inorder.verify(s3, Mockito.times(1)).deleteObject(Mockito.anyString(), Mockito.anyString());
		inorder.verifyNoMoreInteractions();
	}
}
 
Example 25
Project: big-c   File: TestS3AConfiguration.java   View source code 6 votes vote down vote up
/**
 * Test if custom endpoint is picked up.
 * <p/>
 * The test expects TEST_ENDPOINT to be defined in the Configuration
 * describing the endpoint of the bucket to which TEST_FS_S3A_NAME points
 * (f.i. "s3-eu-west-1.amazonaws.com" if the bucket is located in Ireland).
 * Evidently, the bucket has to be hosted in the region denoted by the
 * endpoint for the test to succeed.
 * <p/>
 * More info and the list of endpoint identifiers:
 * http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region
 *
 * @throws Exception
 */
@Test
public void TestEndpoint() throws Exception {
  conf = new Configuration();
  String endpoint = conf.getTrimmed(TEST_ENDPOINT, "");
  if (endpoint.isEmpty()) {
    LOG.warn("Custom endpoint test skipped as " + TEST_ENDPOINT + "config " +
        "setting was not detected");
  } else {
    conf.set(Constants.ENDPOINT, endpoint);
    fs = S3ATestUtils.createTestFileSystem(conf);
    AmazonS3Client s3 = fs.getAmazonS3Client();
    String endPointRegion = "";
    // Differentiate handling of "s3-" and "s3." based endpoint identifiers
    String[] endpointParts = StringUtils.split(endpoint, '.');
    if (endpointParts.length == 3) {
      endPointRegion = endpointParts[0].substring(3);
    } else if (endpointParts.length == 4) {
      endPointRegion = endpointParts[1];
    } else {
      fail("Unexpected endpoint");
    }
    assertEquals("Endpoint config setting and bucket location differ: ",
        endPointRegion, s3.getBucketLocation(fs.getUri().getHost()));
  }
}
 
Example 26
Project: aws-iot-mqtt-load-generator   File: LoadConfigsRegistry.java   View source code 6 votes vote down vote up
public static void deleteConfig(String id) throws LoadConfigException {
    if (configs.remove(id) == null) {
        throw new LoadConfigException("Could not find config " + id);
    }

    String root = SystemConfig.getLoadConfigRoot();

    if (isLocalFilesystem(root)) {
        File f = new File(root, id + ".json");
        if (!f.exists()) {
            LOG.log(Level.INFO, "Config {0} does not exist in storage {1} so doing nothing which is idempotent", new Object[]{id, root});
        }
        f.delete();
    } else if (isS3(root)) {
        AmazonS3 api = new AmazonS3Client(SystemConfig.getCredentials());

        SystemConfig.S3Info info = SystemConfig.getS3Info(SystemConfig.getConfigRoot());

        byte[] data;
        String key = info.prefixPath + "/" + id + ".json";
        api.deleteObject(info.bucket, key);
        LOG.log(Level.INFO, "successfully deleted MetricsConfiguration in S3 {0}/{1}", new Object[]{info.bucket, key});

    }
}
 
Example 27
Project: CloudCryptoSearch   File: CloudTester.java   View source code 6 votes vote down vote up
public static void main (String[] args) throws Exception {
	String credentials = new String("secretKey=MD/FGPCuKkq9EBCpV0qXbfTbaQByFdQYPiM9/BzA" +
			"\n" + "accessKey=AKIAICG6X3TDB3GIBQIQ");
	AmazonS3Client s3 = new AmazonS3Client(new PropertiesCredentials(new ByteArrayInputStream(credentials.getBytes())));
	File f = new File("C:\rfcs\rfc4912.txt");
	String s = readFileAsString(f);
	
	for (int i = 0; i < 1000; i++) {
		ObjectMetadata metadata = new ObjectMetadata();
		metadata.setContentLength(s.getBytes().length);
		s3.putObject("cryptosearch", "test/"+i, new ByteArrayInputStream(s.getBytes()), metadata);
	}
}
 
Example 28
Project: photon-model   File: AWSUtils.java   View source code 6 votes vote down vote up
public static AmazonS3Client getS3Client(AuthCredentialsServiceState credentials,
        String regionId) {

    ClientConfiguration configuration = new ClientConfiguration();
    configuration.withRetryPolicy(new RetryPolicy(new CustomRetryCondition(),
            DEFAULT_BACKOFF_STRATEGY,
            DEFAULT_MAX_ERROR_RETRY,
            false));

    AWSStaticCredentialsProvider awsStaticCredentialsProvider = new AWSStaticCredentialsProvider(
            new BasicAWSCredentials(credentials.privateKeyId,
                    EncryptionUtils.decrypt(credentials.privateKey)));

    AmazonS3ClientBuilder amazonS3ClientBuilder = AmazonS3ClientBuilder
            .standard()
            .withClientConfiguration(configuration)
            .withCredentials(awsStaticCredentialsProvider)
            .withRegion(regionId);

    if (isAwsClientMock()) {
        throw new IllegalArgumentException("AWS Mock does not support S3 client");
    }

    return (AmazonS3Client) amazonS3ClientBuilder.build();
}
 
Example 29
Project: cerberus-lifecycle-cli   File: CerberusModule.java   View source code 6 votes vote down vote up
private Optional<String> findBucket(final String environmentName) {
    AmazonS3Client s3Client = new AmazonS3Client();
    List<Bucket> buckets = s3Client.listBuckets();

    String envBucket = null;
    for (final Bucket bucket : buckets) {
        if (StringUtils.contains(bucket.getName(), ConfigConstants.CONFIG_BUCKET_KEY)) {
            String[] parts = bucket.getName().split("-");
            if (StringUtils.equalsIgnoreCase(environmentName, parts[0])) {
                envBucket = bucket.getName();
                break;
            }
        }
    }

    return Optional.ofNullable(envBucket);
}
 
Example 30
Project: rxjava-aws   File: Sqs.java   View source code 6 votes vote down vote up
private static Observable<SqsMessage> get(AmazonSQSClient sqs, String queueName,
        Optional<String> bucketName, Optional<AmazonS3Client> s3, Service service,
        int waitTimeSeconds) {
    return Observable.defer(() -> {
        String queueUrl = sqs.getQueueUrl(queueName).getQueueUrl();
        return Observable
                .just(sqs.receiveMessage(request(queueName, waitTimeSeconds)) //
                        .getMessages() //
                        .stream() //
                        .map(m -> Sqs.getNextMessage(m, queueUrl, bucketName, s3, sqs, service)) //
                        .collect(Collectors.toList())) //
                .concatWith(Observable
                        .defer(() -> Observable.just(sqs.receiveMessage(request(queueName, 0)) //
                                .getMessages() //
                                .stream() //
                                .map(m -> Sqs.getNextMessage(m, queueUrl, bucketName, s3, sqs,
                                        service)) //
                        .collect(Collectors.toList()))) //
                        .repeat())
                .takeWhile(list -> !list.isEmpty()) //
                .flatMapIterable(Functions.identity()) //
                .filter(opt -> opt.isPresent()).map(opt -> opt.get());
    });//
}
 
Example 31
Project: cloudkeeper   File: ITS3StagingArea.java   View source code 6 votes vote down vote up
public void setup() {
    AWSCredentialsProvider awsCredentialsProvider = new DefaultAWSCredentialsProviderChain();
    try {
        awsCredentialsProvider.getCredentials();

        s3Bucket = System.getenv("CLOUDKEEPER_S3_TEST_BUCKET");
        if (s3Bucket == null) {
            s3Bucket = System.getProperty("xyz.cloudkeeper.s3.testbucket");
        }

        if (s3Bucket != null) {
            executorService = Executors.newScheduledThreadPool(4);

            AmazonS3 s3Client = new AmazonS3Client(awsCredentialsProvider);
            s3Connection = new S3ConnectionBuilder(s3Client, executorService).build();
            skipTest = false;

            cleanS3(s3Connection, s3Bucket);
        }
    } catch (AmazonClientException exception) {
        credentialsException = exception;
    }
}
 
Example 32
Project: rxjava-aws   File: SqsMessageTest.java   View source code 6 votes vote down vote up
@Test
public void deleteMessageFromFactoryWhenS3FactoryExists() {
    AmazonSQSClient sqs = Mockito.mock(AmazonSQSClient.class);
    AmazonS3Client s3 = Mockito.mock(AmazonS3Client.class);
    String s3Id = "123";
    SqsMessage m = new SqsMessage(RECEIPT_HANDLE, new byte[] {}, 1000, Optional.of(s3Id),
            new SqsMessage.Service(Optional.of(() -> s3), () -> sqs, Optional.of(s3), sqs,
                    QUEUE, Optional.of(BUCKET)));
    Mockito.when(sqs.deleteMessage(QUEUE, RECEIPT_HANDLE))
            .thenReturn(new DeleteMessageResult());
    m.deleteMessage(Client.FROM_FACTORY);
    InOrder inorder = Mockito.inOrder(sqs, s3);
    inorder.verify(s3, Mockito.times(1)).deleteObject(BUCKET, s3Id);
    inorder.verify(sqs, Mockito.times(1)).deleteMessage(QUEUE, RECEIPT_HANDLE);
    inorder.verify(s3, Mockito.times(1)).shutdown();
    inorder.verify(sqs, Mockito.times(1)).shutdown();
    Mockito.verifyNoMoreInteractions(sqs, s3);
}
 
Example 33
Project: alexa-morser-coder-skill   File: ImageUtils.java   View source code 6 votes vote down vote up
protected static String uploadFileToS3(BufferedImage image, String word, Boolean codeOnly) throws IOException {
    ByteArrayInputStream bis = null;
    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    try {
        String bucket = SkillConfig.getS3BucketName();
        String fileKey = getFileKey(word, codeOnly);
        ImageIO.write(image, "png", bos);
        byte[] bImageData = bos.toByteArray();
        bis = new ByteArrayInputStream(bImageData);
        // upload to s3 bucket
        AWSCredentials awsCredentials = SkillConfig.getAWSCredentials();
        AmazonS3Client s3Client = awsCredentials != null ? new AmazonS3Client(awsCredentials) : new AmazonS3Client();
        PutObjectRequest s3Put = new PutObjectRequest(bucket, fileKey, bis, null).withCannedAcl(CannedAccessControlList.PublicRead);
        s3Client.putObject(s3Put);
        return getS3Url(word, codeOnly);
    } finally {
        bos.close();
        if (bis != null) bis.close();
    }
}
 
Example 34
Project: rxjava-aws   File: SqsMessageTest.java   View source code 6 votes vote down vote up
@Test
public void shutdownOfSqsAndS3FactoryCreatedClientsOccursWhenS3DeleteObjectFails() {
    AmazonSQSClient sqs = Mockito.mock(AmazonSQSClient.class);
    AmazonS3Client s3 = Mockito.mock(AmazonS3Client.class);
    String s3Id = "123";
    SqsMessage m = new SqsMessage(RECEIPT_HANDLE, new byte[] {}, 1000, Optional.of(s3Id),
            new SqsMessage.Service(Optional.of(() -> s3), () -> sqs, Optional.of(s3), sqs,
                    QUEUE, Optional.of(BUCKET)));
    Mockito.when(sqs.deleteMessage(QUEUE, RECEIPT_HANDLE))
            .thenReturn(new DeleteMessageResult());
    Mockito.doThrow(new RuntimeException()).when(s3).deleteObject(BUCKET, s3Id);
    try {
        m.deleteMessage(Client.FROM_FACTORY);
        Assert.fail();
    } catch (RuntimeException e) {
        // do nothing
    }
    InOrder inorder = Mockito.inOrder(sqs, s3);
    inorder.verify(s3, Mockito.times(1)).deleteObject(BUCKET, s3Id);
    inorder.verify(s3, Mockito.times(1)).shutdown();
    inorder.verify(sqs, Mockito.times(1)).shutdown();
    Mockito.verifyNoMoreInteractions(sqs, s3);
}
 
Example 35
Project: dcos-cassandra-service   File: S3StorageDriver.java   View source code 6 votes vote down vote up
String getPrefixKey(BackupRestoreContext ctx) throws URISyntaxException {
    URI uri = new URI(ctx.getExternalLocation());
    String[] segments = uri.getPath().split("/");

    int startIndex = uri.getScheme().equals(AmazonS3Client.S3_SERVICE_NAME) ? 1 : 2;
    String prefixKey = "";
    for (int i=startIndex; i<segments.length; i++) {
        prefixKey += segments[i];
        if (i < segments.length - 1) {
            prefixKey += "/";
        }
    }

    prefixKey = (prefixKey.length() > 0 && !prefixKey.endsWith("/")) ? prefixKey + "/" : prefixKey;
    prefixKey += ctx.getName(); // append backup name

    return prefixKey;
}
 
Example 36
Project: dcos-cassandra-service   File: S3StorageDriver.java   View source code 6 votes vote down vote up
String getEndpoint(BackupRestoreContext ctx) throws URISyntaxException {
    URI uri = new URI(ctx.getExternalLocation());
    String scheme = uri.getScheme();
    if (scheme.equals(AmazonS3Client.S3_SERVICE_NAME)) {
        return Constants.S3_HOSTNAME;
    } else {
        String endpoint = scheme + "://" + uri.getHost();

        int port = uri.getPort();
        if (port != -1) {
            endpoint += ":" + Integer.toString(port);
        }

        return endpoint;
    }
}
 
Example 37
Project: full-javaee-app   File: S3Object.java   View source code 6 votes vote down vote up
public static String webHookDump(InputStream stream, String school, String extension) {
    if (stream != null) {
        extension = extension == null || extension.isEmpty() ? ".xml" : extension.contains(".") ? extension : "." + extension;
        String fileName = "webhooks/" + school + "/" + school + "_" + Clock.getCurrentDateDashes() + "_" + Clock.getCurrentTime() + extension;
        AmazonS3 s3 = new AmazonS3Client();
        Region region = Region.getRegion(Regions.US_WEST_2);
        s3.setRegion(region);
        try {
            File file = CustomUtilities.inputStreamToFile(stream);
            s3.putObject(new PutObjectRequest(name, fileName, file));
            return CustomUtilities.fileToString(file);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    return "";
}
 
Example 38
Project: dcos-cassandra-service   File: S3StorageDriver.java   View source code 6 votes vote down vote up
private static Map<String, Long> listSnapshotFiles(AmazonS3Client amazonS3Client,
                                                   String bucketName,
                                                   String backupName) {
    Map<String, Long> snapshotFiles = new HashMap<>();
    final ListObjectsV2Request req = new ListObjectsV2Request()
            .withBucketName(bucketName)
            .withPrefix(backupName);
    ListObjectsV2Result result;
    do {
        result = amazonS3Client.listObjectsV2(req);
        for (S3ObjectSummary objectSummary :
                result.getObjectSummaries()) {
            snapshotFiles.put ( objectSummary.getKey ( ), objectSummary.getSize());
        }
        req.setContinuationToken(result.getNextContinuationToken());
    } while(result.isTruncated());

    return snapshotFiles;
}
 
Example 39
Project: Reer   File: S3Client.java   View source code 5 votes vote down vote up
/**
 * Constructor without privided credentials to deleguate to the default provider chain.
 * @since 3.1
 */
@Incubating
public S3Client(S3ConnectionProperties s3ConnectionProperties) {
    this.s3ConnectionProperties = s3ConnectionProperties;
    amazonS3Client = new AmazonS3Client(createConnectionProperties());
    setAmazonS3ConnectionEndpoint();
}
 
Example 40
Project: Android-Client   File: NetworkStore.java   View source code 5 votes vote down vote up
NetworkStore() {

        kwalaCookieStore = new KwalaCookieStore(KwalaApplication.getInstance());
        CookieManager cookieManager = new CookieManager(kwalaCookieStore, CookiePolicy.ACCEPT_ORIGINAL_SERVER);
        CookieHandler.setDefault(cookieManager);

        okHttpClient = new OkHttpClient.Builder()
                .cookieJar(new JavaNetCookieJar(cookieManager))
                .authenticator(authenticator)
                .readTimeout(30, TimeUnit.SECONDS)
                .writeTimeout(30, TimeUnit.SECONDS)
                .build();

        KwalaApplication application = KwalaApplication.getInstance();

        // Initialize the Amazon Cognito credentials provider
        CognitoCachingCredentialsProvider credentialsProvider = new CognitoCachingCredentialsProvider(application,
                KwalaConstants.Network.AWS_IDENTITY_POOL_ID,
                Regions.US_EAST_1
        );

        // Initialize Amazon S3 and transfer utility
        s3Client = new AmazonS3Client(credentialsProvider);
        transferUtility = new TransferUtility(s3Client, application);
    }