Java Code Examples for com.amazonaws.services.s3.AmazonS3ClientBuilder#defaultClient()

The following examples show how to use com.amazonaws.services.s3.AmazonS3ClientBuilder#defaultClient() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: pinpoint_export_endpoints.java    From aws-doc-sdk-examples with Apache License 2.0 6 votes vote down vote up
public static void downloadFromS3(String s3BucketName, List<String> objectKeys,
                                  String downloadDirectory) {

    // Initializes the Amazon S3 client.
    AmazonS3 s3Client = AmazonS3ClientBuilder.defaultClient();

    try {
        // Downloads each object to the specified file path.
        for (String key : objectKeys) {
            S3Object object = s3Client.getObject(s3BucketName, key);
            String endpointsFileName = key.substring(key.lastIndexOf("/"));
            Path filePath = Paths.get(downloadDirectory + endpointsFileName);

            System.out.format("Downloading %s to %s . . .\n",
                    filePath.getFileName(), filePath.getParent());

            writeObjectToFile(filePath, object);
        }
        System.out.println("Download finished.");
    } catch (AmazonServiceException | NullPointerException e) {
        System.err.println(e.getMessage());
        System.exit(1);
    }

}
 
Example 2
Source File: S3UtilProgram.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private static void numberOfLinesInFilesWithFullAndNamePrefix(ParameterTool params) {
	final String bucket = params.getRequired("bucket");
	final String s3prefix = params.getRequired("s3prefix");
	final String s3filePrefix = params.get("s3filePrefix", "");
	int parallelism = params.getInt("parallelism", 10);

	List<String> files = listByFullPathPrefix(bucket, s3prefix);

	ExecutorService executor = Executors.newFixedThreadPool(parallelism);
	AmazonS3 s3client = AmazonS3ClientBuilder.defaultClient();
	List<CompletableFuture<Integer>> requests =
		submitLineCountingRequestsForFilesAsync(executor, s3client, bucket, files, s3filePrefix);
	int count = waitAndComputeTotalLineCountResult(requests);

	executor.shutdownNow();
	s3client.shutdown();
	System.out.print(count);
}
 
Example 3
Source File: pinpoint_list_endpoint_ids.java    From aws-doc-sdk-examples with Apache License 2.0 6 votes vote down vote up
private static List<String> getEndpointIds(String s3bucketName, List<String> endpointFileKeys) {

        List<String> endpointIds = new ArrayList<>();

        // Initializes the Amazon S3 client.
        AmazonS3 s3Client = AmazonS3ClientBuilder.defaultClient();

        // Gets the endpoint IDs from the exported endpoints files.
        try {
            for (String key : endpointFileKeys) {
                S3Object endpointFile = s3Client.getObject(s3bucketName, key);
                endpointIds.addAll(getEndpointIdsFromFile(endpointFile));
            }
        } catch (AmazonServiceException e) {
            System.err.println(e.getMessage());
            System.exit(1);
        }

        return endpointIds;
    }
 
Example 4
Source File: HbaseRecordHandler.java    From aws-athena-query-federation with Apache License 2.0 5 votes vote down vote up
public HbaseRecordHandler()
{
    this(AmazonS3ClientBuilder.defaultClient(),
            AWSSecretsManagerClientBuilder.defaultClient(),
            AmazonAthenaClientBuilder.defaultClient(),
            new HbaseConnectionFactory());
}
 
Example 5
Source File: AwsPrivateKeyStore.java    From athenz with Apache License 2.0 5 votes vote down vote up
private static AmazonS3 initAmazonS3() {
    String s3Region = System.getProperty(ATHENZ_PROP_AWS_S3_REGION);
    ///CLOVER:OFF
    if (null != s3Region && !s3Region.isEmpty()) {
        return AmazonS3ClientBuilder.standard().withRegion(s3Region).build();
   }
    return AmazonS3ClientBuilder.defaultClient();
    ///CLOVER:ON
}
 
Example 6
Source File: S3ArchiveImageHandler.java    From smart-security-camera with GNU General Public License v3.0 5 votes vote down vote up
@Override
public Parameters handleRequest(Parameters parameters, Context context) {

    context.getLogger().log("Input Function [" + context.getFunctionName() + "], Parameters [" + parameters + "]");

    // The archive location of the snapshot will be decided by the alert
    // flag
    String newFilename;
    if (parameters.getSendAlert()) {
        newFilename =  parameters.getS3Key().replace("upload/", "archive/alerts/");
    } else {
        newFilename = parameters.getS3Key().replace("upload/", "archive/falsepositives/");
    }

    // Ensure that the first two hyphens are used to create sub-directories
    // in the file path
    newFilename = newFilename.replaceFirst("-", "/");
    newFilename = newFilename.replaceFirst("-", "/");

    // Using the S3 client, first copy the file to the archive, and then
    // delete the original
    AmazonS3 client = AmazonS3ClientBuilder.defaultClient();
    CopyObjectRequest copyObjectRequest = new CopyObjectRequest(parameters.getS3Bucket(), parameters.getS3Key(), parameters.getS3Bucket(), newFilename);
    client.copyObject(copyObjectRequest);
    DeleteObjectRequest deleteObjectRequest = new DeleteObjectRequest(parameters.getS3Bucket(), parameters.getS3Key());
    client.deleteObject(deleteObjectRequest);

    // Place the new location in the parameters
    parameters.setS3ArchivedKey(newFilename);

    context.getLogger().log("Output Function [" + context.getFunctionName() + "], Parameters [" + parameters + "]");

    return parameters;
}
 
Example 7
Source File: ThumbnailHandler.java    From blog-tutorials with MIT License 5 votes vote down vote up
@Override
public Void handleRequest(S3Event s3Event, Context context) {
  String bucket = s3Event.getRecords().get(0).getS3().getBucket().getName();
  String key = s3Event.getRecords().get(0).getS3().getObject().getKey();
  System.out.println("Going to create a thumbnail for: " + bucket + "/" + key);

  AmazonS3 s3Client = AmazonS3ClientBuilder.defaultClient();
  System.out.println("Connection to S3 established");

  try {
    File tempFile = File.createTempFile(key, ".tmp");
    s3Client.getObject(new GetObjectRequest(bucket, key), tempFile);
    System.out.println("Successfully read S3 object to local temp file");

    BufferedImage img = new BufferedImage(THUMBNAIL_SIZE, THUMBNAIL_SIZE, BufferedImage.TYPE_INT_RGB);
    img.createGraphics().drawImage(ImageIO.read(tempFile).getScaledInstance(100, 100, Image.SCALE_SMOOTH), 0, 0, null);
    File resizedTempFile = File.createTempFile(key, ".resized.tmp");
    ImageIO.write(img, "png", resizedTempFile);
    System.out.println("Successfully created resized image");

    String targetKey = THUMBNAIL_PREFIX + key.replace("uploads/", "");
    s3Client.putObject(bucket, targetKey, resizedTempFile);
    System.out.println("Successfully uploaded resized image with key " + targetKey);
  } catch (IOException e) {
    e.printStackTrace();
  }

  return null;
}
 
Example 8
Source File: Lambda.java    From alexa-skills-kit-tester-java with Apache License 2.0 5 votes vote down vote up
@Override
public void handleRequest(final InputStream input, final OutputStream output, final Context context) throws IOException {
    final String inputS = IOUtils.toString(Optional.ofNullable(input).orElse(new ByteArrayInputStream("{}".getBytes())));
    final JsonNode root = om.readTree(inputS);

    final String bucket = Optional.ofNullable(root.get(S3_BUCKET_PROPERTY)).map(JsonNode::textValue).filter(StringUtils::isNotBlank).orElse(System.getenv(S3_BUCKET_PROPERTY));
    Validate.notBlank(bucket, S3_BUCKET_PROPERTY + " hasn't been set in the request payload nor as an environment variable.");

    final String key = Optional.ofNullable(root.get(S3_KEY_PROPERTY)).map(JsonNode::textValue).filter(StringUtils::isNotBlank)
            .orElse(System.getenv(S3_KEY_PROPERTY));
    final String region = Optional.ofNullable(root.get(S3_REGION_PROPERTY)).map(JsonNode::textValue).filter(StringUtils::isNotBlank)
            .orElse(System.getenv(S3_REGION_PROPERTY));

    final AmazonS3 s3client = StringUtils.isNotBlank(region) ? AmazonS3ClientBuilder.standard().withRegion(region).build() : AmazonS3ClientBuilder.defaultClient();

    final ListObjectsRequest listRequest = new ListObjectsRequest().withBucketName(bucket).withPrefix(Optional.ofNullable(key).map(k -> k + (k.endsWith("/") ? "" : "/")).orElse(""));

    log.info("[INFO] Reading out *.yml conversation script files in folder '" + listRequest.getPrefix() + "' in bucket '" + listRequest.getBucketName() + "'");

    final List<S3ObjectSummary> conversationScripts = s3client.listObjects(listRequest).getObjectSummaries().stream()
            .filter(os -> os.getKey().toLowerCase().endsWith(".yml")).collect(Collectors.toList());

    log.info("[INFO] Found " + conversationScripts.size() + " conversation script files in bucket '" + bucket + "'");

    for (final S3ObjectSummary conversationScript : conversationScripts) {
        log.info("[INFO] Load conversation script file " + conversationScript.getKey() + " from S3 bucket " + bucket);

        AlexaClient.create(s3client.getObject(bucket, conversationScript.getKey()).getObjectContent())
                .build()
                .startScript();
    }
    output.write("{ \"OK\" }".getBytes());
}
 
Example 9
Source File: S3UtilProgram.java    From flink with Apache License 2.0 5 votes vote down vote up
private static void numberOfLinesInFile(ParameterTool params) {
	final String bucket = params.getRequired("bucket");
	final String s3file = params.getRequired("s3file");
	AmazonS3 s3client = AmazonS3ClientBuilder.defaultClient();
	System.out.print(S3QueryUtil.queryFile(s3client, bucket, s3file, countQuery));
	s3client.shutdown();
}
 
Example 10
Source File: S3UtilProgram.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static void numberOfLinesInFile(ParameterTool params) {
	final String bucket = params.getRequired("bucket");
	final String s3file = params.getRequired("s3file");
	AmazonS3 s3client = AmazonS3ClientBuilder.defaultClient();
	System.out.print(S3QueryUtil.queryFile(s3client, bucket, s3file, countQuery));
	s3client.shutdown();
}
 
Example 11
Source File: DocDBRecordHandler.java    From aws-athena-query-federation with Apache License 2.0 5 votes vote down vote up
public DocDBRecordHandler()
{
    this(AmazonS3ClientBuilder.defaultClient(),
            AWSSecretsManagerClientBuilder.defaultClient(),
            AmazonAthenaClientBuilder.defaultClient(),
            new DocDBConnectionFactory());
}
 
Example 12
Source File: CloudwatchRecordHandler.java    From aws-athena-query-federation with Apache License 2.0 5 votes vote down vote up
public CloudwatchRecordHandler()
{
    this(AmazonS3ClientBuilder.defaultClient(),
            AWSSecretsManagerClientBuilder.defaultClient(),
            AmazonAthenaClientBuilder.defaultClient(),
            AWSLogsClientBuilder.defaultClient());
}
 
Example 13
Source File: pinpoint_export_endpoints.java    From aws-doc-sdk-examples with Apache License 2.0 4 votes vote down vote up
public static List<String> exportEndpointsToS3(String s3BucketName, String iamExportRoleArn,
                                               String applicationId) {

    // The S3 path that Amazon Pinpoint exports the endpoints to.
    SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd-HH_mm:ss.SSS_z");
    String endpointsKeyPrefix = "exports/" + applicationId + "_" + dateFormat.format(new Date
            ());
    String s3UrlPrefix = "s3://" + s3BucketName + "/" + endpointsKeyPrefix + "/";

    // Defines the export job that Amazon Pinpoint runs.
    ExportJobRequest exportJobRequest = new ExportJobRequest()
            .withS3UrlPrefix(s3UrlPrefix)
            .withRoleArn(iamExportRoleArn);
    CreateExportJobRequest createExportJobRequest = new CreateExportJobRequest()
            .withApplicationId(applicationId)
            .withExportJobRequest(exportJobRequest);

    // Initializes the Amazon Pinpoint client.
    AmazonPinpoint pinpointClient = AmazonPinpointClientBuilder.standard()
            .withRegion(Regions.US_EAST_1).build();

    System.out.format("Exporting endpoints from Amazon Pinpoint application %s to Amazon S3 " +
            "bucket %s . . .\n", applicationId, s3BucketName);

    List<String> objectKeys = null;

    try {
        // Runs the export job with Amazon Pinpoint.
        CreateExportJobResult exportResult =
                pinpointClient.createExportJob(createExportJobRequest);

        // Prints the export job status to the console while the job runs.
        String jobId = exportResult.getExportJobResponse().getId();
        printExportJobStatus(pinpointClient, applicationId, jobId);

        // Initializes the Amazon S3 client.
        AmazonS3 s3Client = AmazonS3ClientBuilder.defaultClient();

        // Lists the objects created by Amazon Pinpoint.
        objectKeys = s3Client
                .listObjectsV2(s3BucketName, endpointsKeyPrefix)
                .getObjectSummaries()
                .stream()
                .map(S3ObjectSummary::getKey)
                .collect(Collectors.toList());

    } catch (AmazonServiceException e) {
        System.err.println(e.getMessage());
        System.exit(1);
    }

    return objectKeys;
}
 
Example 14
Source File: MySqlRecordHandler.java    From aws-athena-query-federation with Apache License 2.0 4 votes vote down vote up
public MySqlRecordHandler(final DatabaseConnectionConfig databaseConnectionConfig)
{
    this(databaseConnectionConfig, AmazonS3ClientBuilder.defaultClient(), AWSSecretsManagerClientBuilder.defaultClient(), AmazonAthenaClientBuilder.defaultClient(),
            new GenericJdbcConnectionFactory(databaseConnectionConfig, MySqlMetadataHandler.JDBC_PROPERTIES), new MySqlQueryStringBuilder(MYSQL_QUOTE_CHARACTER));
}
 
Example 15
Source File: PostGreSqlRecordHandler.java    From aws-athena-query-federation with Apache License 2.0 4 votes vote down vote up
public PostGreSqlRecordHandler(final DatabaseConnectionConfig databaseConnectionConfig)
{
    this(databaseConnectionConfig, AmazonS3ClientBuilder.defaultClient(), AWSSecretsManagerClientBuilder.defaultClient(), AmazonAthenaClientBuilder.defaultClient(),
            new GenericJdbcConnectionFactory(databaseConnectionConfig, PostGreSqlMetadataHandler.JDBC_PROPERTIES), new PostGreSqlQueryStringBuilder(POSTGRES_QUOTE_CHARACTER));
}
 
Example 16
Source File: CommandRecordPersistence.java    From aws-service-catalog-terraform-reference-architecture with Apache License 2.0 4 votes vote down vote up
public CommandRecordPersistence(String bucketName) {
    this.s3 = AmazonS3ClientBuilder.defaultClient();
    this.bucketName = bucketName;
}
 
Example 17
Source File: rekognition-image-java-display-bounding-boxes.java    From aws-doc-sdk-examples with Apache License 2.0 4 votes vote down vote up
public static void main(String arg[]) throws Exception {
    //Change the value of bucket to the S3 bucket that contains your image file.
    //Change the value of photo to your image file name.
    String photo = "input.png";
    String bucket = "bucket";
    int height = 0;
    int width = 0;

    // Get the image from an S3 Bucket
    AmazonS3 s3client = AmazonS3ClientBuilder.defaultClient();

    com.amazonaws.services.s3.model.S3Object s3object = s3client.getObject(bucket, photo);
    S3ObjectInputStream inputStream = s3object.getObjectContent();
    BufferedImage image = ImageIO.read(inputStream);
    DetectFacesRequest request = new DetectFacesRequest()
            .withImage(new Image().withS3Object(new S3Object().withName(photo).withBucket(bucket)));

    width = image.getWidth();
    height = image.getHeight();

    // Call DetectFaces    
    AmazonRekognition amazonRekognition = AmazonRekognitionClientBuilder.defaultClient();
    DetectFacesResult result = amazonRekognition.detectFaces(request);
    
    //Show the bounding box info for each face.
    List<FaceDetail> faceDetails = result.getFaceDetails();
    for (FaceDetail face : faceDetails) {

        BoundingBox box = face.getBoundingBox();
        float left = width * box.getLeft();
        float top = height * box.getTop();
        System.out.println("Face:");

        System.out.println("Left: " + String.valueOf((int) left));
        System.out.println("Top: " + String.valueOf((int) top));
        System.out.println("Face Width: " + String.valueOf((int) (width * box.getWidth())));
        System.out.println("Face Height: " + String.valueOf((int) (height * box.getHeight())));
        System.out.println();

    }

    // Create frame and panel.
    JFrame frame = new JFrame("RotateImage");
    frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
    DisplayFaces panel = new DisplayFaces(result, image);
    panel.setPreferredSize(new Dimension(image.getWidth() / scale, image.getHeight() / scale));
    frame.setContentPane(panel);
    frame.pack();
    frame.setVisible(true);

}
 
Example 18
Source File: AmazonS3InstallationService.java    From java-slack-sdk with MIT License 4 votes vote down vote up
protected AmazonS3 createS3Client() {
    return AmazonS3ClientBuilder.defaultClient();
}
 
Example 19
Source File: ExampleRecordHandler.java    From aws-athena-query-federation with Apache License 2.0 4 votes vote down vote up
public ExampleRecordHandler()
{
    this(AmazonS3ClientBuilder.defaultClient(), AWSSecretsManagerClientBuilder.defaultClient(), AmazonAthenaClientBuilder.defaultClient());
}
 
Example 20
Source File: TPCDSRecordHandler.java    From aws-athena-query-federation with Apache License 2.0 4 votes vote down vote up
public TPCDSRecordHandler()
{
    super(AmazonS3ClientBuilder.defaultClient(), AWSSecretsManagerClientBuilder.defaultClient(), AmazonAthenaClientBuilder.defaultClient(), SOURCE_TYPE);
}