Java Code Examples for com.amazonaws.services.s3.AmazonS3Client#createBucket()

The following examples show how to use com.amazonaws.services.s3.AmazonS3Client#createBucket() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: S3StorageIT.java    From digdag with Apache License 2.0 6 votes vote down vote up
@Before
public void setUp()
        throws Exception
{
    assumeThat(TEST_S3_ENDPOINT, not(isEmptyOrNullString()));

    projectDir = folder.getRoot().toPath().resolve("foobar");
    config = folder.newFile().toPath();

    client = DigdagClient.builder()
            .host(server.host())
            .port(server.port())
            .build();

    AWSCredentials credentials = new BasicAWSCredentials(TEST_S3_ACCESS_KEY_ID, TEST_S3_SECRET_ACCESS_KEY);
    s3 = new AmazonS3Client(credentials);
    s3.setEndpoint(TEST_S3_ENDPOINT);

    s3.createBucket(archiveBucket);
    s3.createBucket(logStorageBucket);
}
 
Example 2
Source File: S3StorageTest.java    From digdag with Apache License 2.0 6 votes vote down vote up
@Before
public void setUp()
        throws Exception
{
    assumeThat(TEST_S3_ENDPOINT, not(isEmptyOrNullString()));

    AWSCredentials credentials = new BasicAWSCredentials(TEST_S3_ACCESS_KEY_ID, TEST_S3_SECRET_ACCESS_KEY);
    AmazonS3Client s3 = new AmazonS3Client(credentials);
    s3.setEndpoint(TEST_S3_ENDPOINT);

    String bucket = UUID.randomUUID().toString();
    s3.createBucket(bucket);

    ConfigFactory cf = new ConfigFactory(objectMapper());
    Config config = cf.create()
        .set("endpoint", TEST_S3_ENDPOINT)
        .set("bucket", bucket)  // use unique bucket name
        .set("credentials.access-key-id", TEST_S3_ACCESS_KEY_ID)
        .set("credentials.secret-access-key", TEST_S3_SECRET_ACCESS_KEY)
        ;
    storage = new S3StorageFactory().newStorage(config);
}
 
Example 3
Source File: S3RecordReaderModuleAppTest.java    From attic-apex-malhar with Apache License 2.0 6 votes vote down vote up
@Before
public void setup() throws Exception
{
  client = new AmazonS3Client(new BasicAWSCredentials(accessKey, secretKey));
  client.createBucket(testMeta.bucketKey);
  inputDir = testMeta.baseDirectory + File.separator + "input";

  File file1 = new File(inputDir + File.separator + FILE_1);
  File file2 = new File(inputDir + File.separator + FILE_2);

  FileUtils.writeStringToFile(file1, FILE_1_DATA);
  FileUtils.writeStringToFile(file2, FILE_2_DATA);

  client.putObject(new PutObjectRequest(testMeta.bucketKey, "input/" + FILE_1, file1));
  client.putObject(new PutObjectRequest(testMeta.bucketKey, "input/" + FILE_2, file2));
  files = SCHEME + "://" + accessKey + ":" + secretKey + "@" + testMeta.bucketKey + "/input";
}
 
Example 4
Source File: S3InputModuleAppTest.java    From attic-apex-malhar with Apache License 2.0 6 votes vote down vote up
@Before
public void setup() throws Exception
{
  client = new AmazonS3Client(new BasicAWSCredentials(accessKey, secretKey));
  client.createBucket(testMeta.bucketKey);

  inputDir = testMeta.baseDirectory + File.separator + "input";
  outputDir = testMeta.baseDirectory + File.separator + "output";

  File file1 = new File(inputDir + File.separator + FILE_1);
  File file2 = new File(inputDir + File.separator + FILE_2);

  FileUtils.writeStringToFile(file1, FILE_1_DATA);
  FileUtils.writeStringToFile(file2, FILE_2_DATA);
  client.putObject(new PutObjectRequest(testMeta.bucketKey, "input/" + FILE_1, file1));
  client.putObject(new PutObjectRequest(testMeta.bucketKey, "input/" + FILE_2, file2));
  files = SCHEME + "://" + accessKey + ":" + secretKey + "@" + testMeta.bucketKey + "/input";
}
 
Example 5
Source File: CreateS3BucketTask.java    From aws-ant-tasks with Apache License 2.0 6 votes vote down vote up
public void execute() {
    AmazonS3Client client = getOrCreateClient(AmazonS3Client.class);
    try {
        System.out.println("Creating bucket with name " + bucketName
                + "...");
        client.createBucket(bucketName);
        System.out
                .println("Bucket " + bucketName + " successfuly created.");
    } catch (AmazonServiceException ase) {
        throw new BuildException(
                "AmazonServiceException: Errors in S3 while processing request."
                        + ase.getMessage());
    } catch (AmazonClientException ace) {
        throw new BuildException(
                "AmazonClientException: Errors encountered in the client while"
                        + " making the request or handling the response. "
                        + ace.getMessage());
    } catch (Exception e) {
        throw new BuildException(e.getMessage());
    }
}
 
Example 6
Source File: WarehouseExport.java    From usergrid with Apache License 2.0 6 votes vote down vote up
private void copyToS3( String fileName ) {

        String bucketName = ( String ) properties.get( BUCKET_PROPNAME );
        String accessId = ( String ) properties.get( ACCESS_ID_PROPNAME );
        String secretKey = ( String ) properties.get( SECRET_KEY_PROPNAME );

        Properties overrides = new Properties();
        overrides.setProperty( "s3" + ".identity", accessId );
        overrides.setProperty( "s3" + ".credential", secretKey );

        final Iterable<? extends Module> MODULES = ImmutableSet
                .of( new JavaUrlHttpCommandExecutorServiceModule(), new Log4JLoggingModule(),
                        new NettyPayloadModule() );

        AWSCredentials credentials = new BasicAWSCredentials(accessId, secretKey);
        ClientConfiguration clientConfig = new ClientConfiguration();
        clientConfig.setProtocol( Protocol.HTTP);

        AmazonS3Client s3Client = new AmazonS3Client(credentials, clientConfig);

        s3Client.createBucket( bucketName );
        File uploadFile = new File( fileName );
        PutObjectResult putObjectResult = s3Client.putObject( bucketName, uploadFile.getName(), uploadFile );
        logger.info("Uploaded file etag={}", putObjectResult.getETag());
    }
 
Example 7
Source File: S3Utils.java    From amazon-kinesis-connectors with Apache License 2.0 6 votes vote down vote up
/**
 * Create an Amazon S3 bucket if it does not exist.
 * 
 * @param client
 *        The {@link AmazonS3Client} with read and write permissions
 * @param bucketName
 *        The bucket to create
 * @throws IllegalStateException
 *         The bucket is not created before timeout occurs
 */
public static void createBucket(AmazonS3Client client, String bucketName) {
    if (!bucketExists(client, bucketName)) {
        CreateBucketRequest createBucketRequest = new CreateBucketRequest(bucketName);
        createBucketRequest.setRegion(Region.US_Standard.toString());
        client.createBucket(createBucketRequest);
    }
    long startTime = System.currentTimeMillis();
    long endTime = startTime + 60 * 1000;
    while (!bucketExists(client, bucketName) && endTime > System.currentTimeMillis()) {
        try {
            LOG.info("Waiting for Amazon S3 to create bucket " + bucketName);
            Thread.sleep(1000 * 10);
        } catch (InterruptedException e) {
        }
    }
    if (!bucketExists(client, bucketName)) {
        throw new IllegalStateException("Could not create bucket " + bucketName);
    }
    LOG.info("Created Amazon S3 bucket " + bucketName);
}
 
Example 8
Source File: BaseHandlerS3Test.java    From bender with Apache License 2.0 5 votes vote down vote up
@Before
public void setup() throws UnsupportedEncodingException, IOException {
  /*
   * Patch the handler to use this test's factory which produces a mock client.
   */
  S3MockClientFactory f;
  try {
    f = new S3MockClientFactory();
  } catch (Exception e) {
    throw new RuntimeException("unable to start s3proxy", e);
  }

  AmazonS3Client client = f.newInstance();
  client.createBucket(S3_BUCKET);
  this.clientFactory = f;

  /*
   * Upload config file
   */
  String payload = IOUtils.toString(new InputStreamReader(
      this.getClass().getResourceAsStream("/config/handler_config.json"), "UTF-8"));
  client.putObject(S3_BUCKET, "bender/config.json", payload);

  /*
   * Export config file as env var
   */
  envVars.set("BENDER_CONFIG", "s3://" + S3_BUCKET + "/bender/config.json");
}
 
Example 9
Source File: S3WaitIT.java    From digdag with Apache License 2.0 5 votes vote down vote up
@Before
public void setUp()
        throws Exception
{
    assumeThat(TEST_S3_ENDPOINT, not(isEmptyOrNullString()));

    proxyServer = TestUtils.startRequestFailingProxy(10);

    server = TemporaryDigdagServer.builder()
            .environment(ImmutableMap.of(
                    "http_proxy", "http://" + proxyServer.getListenAddress().getHostString() + ":" + proxyServer.getListenAddress().getPort())
            )
            .configuration(
                    "digdag.secret-encryption-key = " + Base64.getEncoder().encodeToString(RandomUtils.nextBytes(16)))
            .build();

    server.start();

    projectDir = folder.getRoot().toPath().resolve("foobar");

    client = DigdagClient.builder()
            .host(server.host())
            .port(server.port())
            .build();

    bucket = UUID.randomUUID().toString();

    AWSCredentials credentials = new BasicAWSCredentials(TEST_S3_ACCESS_KEY_ID, TEST_S3_SECRET_ACCESS_KEY);
    s3 = new AmazonS3Client(credentials);
    s3.setEndpoint(TEST_S3_ENDPOINT);
    s3.createBucket(bucket);
}
 
Example 10
Source File: S3FileStoreIntegrationTest.java    From Cheddar with Apache License 2.0 5 votes vote down vote up
@Before
public void before() {
    amazonS3Client = new AmazonS3Client();

    // create bucket if not present
    final String fullBucketName = BUCKET_SCHEMA + "-" + BUCKET_NAME;
    if (!amazonS3Client.doesBucketExist(fullBucketName)) {
        amazonS3Client.createBucket(fullBucketName);
    }
}
 
Example 11
Source File: DownloadFromS3TaskTests.java    From aws-ant-tasks with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setUp() throws IOException {
    testFile1 = AWSTestUtils.createRandomFile(TEST_FILE_1, TESTFILE_SUFFIX);
    testFile2 = AWSTestUtils.createRandomFile(TEST_FILE_2, TESTFILE_SUFFIX);
    testFile3 = AWSTestUtils.createRandomFile(TEST_FILE_3, TESTFILE_SUFFIX);
    client = new AmazonS3Client();
    client.createBucket(BUCKET_NAME);
    client.putObject(BUCKET_NAME, KEY_PREFIX + testFile1.getName(),
            testFile1);
    client.putObject(BUCKET_NAME, KEY_PREFIX + testFile2.getName(),
            testFile2);
    client.putObject(BUCKET_NAME, KEY_PREFIX + testFile3.getName(),
            testFile3);
}
 
Example 12
Source File: UploadFileSetToS3TaskTests.java    From aws-ant-tasks with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setUp() throws IOException {
    testFile1 = AWSTestUtils.createRandomFile(TEST_FILE_1, TESTFILE_SUFFIX);
    testFile2 = AWSTestUtils.createRandomFile(TEST_FILE_2, TESTFILE_SUFFIX);
    testFile3 = AWSTestUtils.createRandomFile(TEST_FILE_3, TESTFILE_SUFFIX);
    fileName1 = testFile1.getName();
    fileName2 = testFile2.getName();
    fileName3 = testFile3.getName();

    client = new AmazonS3Client();
    client.createBucket(BUCKET_NAME);
}
 
Example 13
Source File: S3Utils.java    From wildfly-camel with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
public static void createBucket(AmazonS3Client client, String bucketName) throws Exception {

    client.createBucket(bucketName);

    HeadBucketRequest request = new HeadBucketRequest(bucketName);
    Waiter<HeadBucketRequest> waiter = client.waiters().bucketExists();
    Future<Void> future = waiter.runAsync(new WaiterParameters<HeadBucketRequest>(request), new NoOpWaiterHandler());
    future.get(1, TimeUnit.MINUTES);
}