com.amazonaws.services.s3.transfer.TransferManager Java Examples

The following examples show how to use com.amazonaws.services.s3.transfer.TransferManager. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ReadUtilsTest.java    From micro-server with Apache License 2.0 7 votes vote down vote up
@Test
@SneakyThrows
public void getFileInputStream() {
    TransferManager transferManager = mock(TransferManager.class);
    Download download = mock(Download.class);
    when(transferManager.download(anyString(), anyString(), any())).thenReturn(download);

    ReadUtils readUtils = new ReadUtils(transferManager,System.getProperty("java.io.tmpdir"));

    InputStream fileInputStream = readUtils.getFileInputStream("bucket", "key");
    assertNotNull(fileInputStream);

    verify(transferManager, times(1)).download(anyString(), anyString(), any(File.class));
    verify(download, times(1)).waitForCompletion();

    fileInputStream.close();
}
 
Example #2
Source File: S3S3CopierTest.java    From circus-train with Apache License 2.0 6 votes vote down vote up
@Test
public void copyCheckTransferManagerIsShutdownWhenSubmittingJobExceptionsAreThrown() throws Exception {
  client.putObject("source", "data", inputData);
  Path sourceBaseLocation = new Path("s3://source/");
  Path replicaLocation = new Path("s3://target/");
  List<Path> sourceSubLocations = new ArrayList<>();

  TransferManagerFactory mockedTransferManagerFactory = Mockito.mock(TransferManagerFactory.class);
  TransferManager mockedTransferManager = Mockito.mock(TransferManager.class);
  when(mockedTransferManagerFactory.newInstance(any(AmazonS3.class), eq(s3S3CopierOptions)))
      .thenReturn(mockedTransferManager);
  when(mockedTransferManager
      .copy(any(CopyObjectRequest.class), any(AmazonS3.class), any(TransferStateChangeListener.class)))
          .thenThrow(new AmazonServiceException("MyCause"));
  S3S3Copier s3s3Copier = new S3S3Copier(sourceBaseLocation, sourceSubLocations, replicaLocation, s3ClientFactory,
      mockedTransferManagerFactory, listObjectsRequestFactory, registry, s3S3CopierOptions);
  try {
    s3s3Copier.copy();
    fail("exception should have been thrown");
  } catch (CircusTrainException e) {
    verify(mockedTransferManager).shutdownNow();
    assertThat(e.getCause().getMessage(), startsWith("MyCause"));
  }
}
 
Example #3
Source File: S3S3CopierTest.java    From circus-train with Apache License 2.0 6 votes vote down vote up
@Test
public void copyDefaultCopierOptions() throws Exception {
  client.putObject("source", "data", inputData);
  Path sourceBaseLocation = new Path("s3://source/");
  Path replicaLocation = new Path("s3://target/");
  List<Path> sourceSubLocations = new ArrayList<>();

  TransferManagerFactory mockedTransferManagerFactory = Mockito.mock(TransferManagerFactory.class);
  TransferManager mockedTransferManager = Mockito.mock(TransferManager.class);
  when(mockedTransferManagerFactory.newInstance(any(AmazonS3.class), eq(s3S3CopierOptions)))
      .thenReturn(mockedTransferManager);
  Copy copy = Mockito.mock(Copy.class);
  when(mockedTransferManager
      .copy(any(CopyObjectRequest.class), any(AmazonS3.class), any(TransferStateChangeListener.class)))
          .thenReturn(copy);
  TransferProgress transferProgress = new TransferProgress();
  when(copy.getProgress()).thenReturn(transferProgress);

  S3S3Copier s3s3Copier = new S3S3Copier(sourceBaseLocation, sourceSubLocations, replicaLocation, s3ClientFactory,
      mockedTransferManagerFactory, listObjectsRequestFactory, registry, s3S3CopierOptions);
  s3s3Copier.copy();
  ArgumentCaptor<CopyObjectRequest> argument = ArgumentCaptor.forClass(CopyObjectRequest.class);
  verify(mockedTransferManager).copy(argument.capture(), any(AmazonS3.class), any(TransferStateChangeListener.class));
  CopyObjectRequest copyObjectRequest = argument.getValue();
  assertNull(copyObjectRequest.getNewObjectMetadata());
}
 
Example #4
Source File: XferMgrProgress.java    From dlp-dataflow-deidentification with Apache License 2.0 6 votes vote down vote up
public static void uploadDirWithSubprogress(
    String dir_path, String bucket_name, String key_prefix, boolean recursive, boolean pause) {
  System.out.println(
      "directory: " + dir_path + (recursive ? " (recursive)" : "") + (pause ? " (pause)" : ""));

  TransferManager xfer_mgr = new TransferManager();
  try {
    MultipleFileUpload multi_upload =
        xfer_mgr.uploadDirectory(bucket_name, key_prefix, new File(dir_path), recursive);
    // loop with Transfer.isDone()
    XferMgrProgress.showMultiUploadProgress(multi_upload);
    // or block with Transfer.waitForCompletion()
    XferMgrProgress.waitForCompletion(multi_upload);
  } catch (AmazonServiceException e) {
    System.err.println(e.getErrorMessage());
    System.exit(1);
  }
  xfer_mgr.shutdownNow();
}
 
Example #5
Source File: S3S3CopierTest.java    From circus-train with Apache License 2.0 6 votes vote down vote up
@Test
public void copyCheckTransferManagerIsShutdown() throws Exception {
  client.putObject("source", "data", inputData);
  Path sourceBaseLocation = new Path("s3://source/");
  Path replicaLocation = new Path("s3://target/");
  List<Path> sourceSubLocations = new ArrayList<>();

  TransferManagerFactory mockedTransferManagerFactory = Mockito.mock(TransferManagerFactory.class);
  TransferManager mockedTransferManager = Mockito.mock(TransferManager.class);
  when(mockedTransferManagerFactory.newInstance(any(AmazonS3.class), eq(s3S3CopierOptions)))
      .thenReturn(mockedTransferManager);
  Copy copy = Mockito.mock(Copy.class);
  when(mockedTransferManager
      .copy(any(CopyObjectRequest.class), any(AmazonS3.class), any(TransferStateChangeListener.class)))
          .thenReturn(copy);
  TransferProgress transferProgress = new TransferProgress();
  when(copy.getProgress()).thenReturn(transferProgress);
  S3S3Copier s3s3Copier = new S3S3Copier(sourceBaseLocation, sourceSubLocations, replicaLocation, s3ClientFactory,
      mockedTransferManagerFactory, listObjectsRequestFactory, registry, s3S3CopierOptions);
  s3s3Copier.copy();
  verify(mockedTransferManager).shutdownNow();
}
 
Example #6
Source File: S3UtilProgram.java    From flink with Apache License 2.0 6 votes vote down vote up
private static void downloadByFullPathAndFileNamePrefix(ParameterTool params) {
	final String bucket = params.getRequired("bucket");
	final String s3prefix = params.getRequired("s3prefix");
	final String localFolder = params.getRequired("localFolder");
	final String s3filePrefix = params.get("s3filePrefix", "");
	TransferManager tx = TransferManagerBuilder.defaultTransferManager();
	Predicate<String> keyPredicate = getKeyFilterByFileNamePrefix(s3filePrefix);
	KeyFilter keyFilter = s3filePrefix.isEmpty() ? KeyFilter.INCLUDE_ALL :
		objectSummary -> keyPredicate.test(objectSummary.getKey());
	try {
		tx.downloadDirectory(bucket, s3prefix, new File(localFolder), keyFilter).waitForCompletion();
	} catch (InterruptedException e) {
		System.out.println("Transfer interrupted");
	} finally {
		tx.shutdownNow();
	}
}
 
Example #7
Source File: S3UtilProgram.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private static void downloadByFullPathAndFileNamePrefix(ParameterTool params) {
	final String bucket = params.getRequired("bucket");
	final String s3prefix = params.getRequired("s3prefix");
	final String localFolder = params.getRequired("localFolder");
	final String s3filePrefix = params.get("s3filePrefix", "");
	TransferManager tx = TransferManagerBuilder.defaultTransferManager();
	Predicate<String> keyPredicate = getKeyFilterByFileNamePrefix(s3filePrefix);
	KeyFilter keyFilter = s3filePrefix.isEmpty() ? KeyFilter.INCLUDE_ALL :
		objectSummary -> keyPredicate.test(objectSummary.getKey());
	try {
		tx.downloadDirectory(bucket, s3prefix, new File(localFolder), keyFilter).waitForCompletion();
	} catch (InterruptedException e) {
		System.out.println("Transfer interrupted");
	} finally {
		tx.shutdownNow();
	}
}
 
Example #8
Source File: XferMgrUpload.java    From aws-doc-sdk-examples with Apache License 2.0 6 votes vote down vote up
public static void uploadDir(String dir_path, String bucket_name,
                             String key_prefix, boolean recursive, boolean pause) {
    System.out.println("directory: " + dir_path + (recursive ?
            " (recursive)" : "") + (pause ? " (pause)" : ""));

    // snippet-start:[s3.java1.s3_xfer_mgr_upload.directory]
    TransferManager xfer_mgr = TransferManagerBuilder.standard().build();
    try {
        MultipleFileUpload xfer = xfer_mgr.uploadDirectory(bucket_name,
                key_prefix, new File(dir_path), recursive);
        // loop with Transfer.isDone()
        XferMgrProgress.showTransferProgress(xfer);
        // or block with Transfer.waitForCompletion()
        XferMgrProgress.waitForCompletion(xfer);
    } catch (AmazonServiceException e) {
        System.err.println(e.getErrorMessage());
        System.exit(1);
    }
    xfer_mgr.shutdownNow();
    // snippet-end:[s3.java1.s3_xfer_mgr_upload.directory]
}
 
Example #9
Source File: S3BlobStore.java    From nexus-blobstore-s3 with Eclipse Public License 1.0 6 votes vote down vote up
@Override
@Guarded(by = STARTED)
public Blob create(final InputStream blobData, final Map<String, String> headers) {
  checkNotNull(blobData);

  return create(headers, destination -> {
      try (InputStream data = blobData) {
        MetricsInputStream input = new MetricsInputStream(data);
        TransferManager transferManager = new TransferManager(s3);
        transferManager.upload(getConfiguredBucket(), destination, input, new ObjectMetadata())
            .waitForCompletion();
        return input.getMetrics();
      } catch (InterruptedException e) {
        throw new BlobStoreException("error uploading blob", e, null);
      }
    });
}
 
Example #10
Source File: Downloads.java    From jobcacher-plugin with MIT License 6 votes vote down vote up
public void startDownload(TransferManager manager, File base, String pathPrefix, S3ObjectSummary summary) throws AmazonServiceException, IOException {
    // calculate target file name
    File targetFile = FileUtils.getFile(base, summary.getKey().substring(pathPrefix.length() + 1));

    // if target file exists, only download it if newer
    if (targetFile.lastModified() < summary.getLastModified().getTime()) {
        // ensure directory above file exists
        FileUtils.forceMkdir(targetFile.getParentFile());

        // Start the download
        Download download = manager.download(summary.getBucketName(), summary.getKey(), targetFile);

        // Keep for later
        startedDownloads.add(new Memo(download, targetFile, summary.getLastModified().getTime()));
    }
}
 
Example #11
Source File: S3StorageDriver.java    From dcos-cassandra-service with Apache License 2.0 6 votes vote down vote up
private void downloadFile(TransferManager tx,
                          String bucketName,
                          String sourcePrefixKey,
                          String destinationFile) throws Exception{
    try {
        final File snapshotFile = new File(destinationFile);
        // Only create parent directory once, if it doesn't exist.
        final File parentDir = new File(snapshotFile.getParent());
        if (!parentDir.isDirectory()) {
            final boolean parentDirCreated = parentDir.mkdirs();
            if (!parentDirCreated) {
                LOGGER.error(
                        "Error creating parent directory for file: {}. Skipping to next",
                        destinationFile);
                return;
            }
        }
        snapshotFile.createNewFile();
        final Download download = tx.download(bucketName, sourcePrefixKey, snapshotFile);
        download.waitForCompletion();
    } catch (Exception e) {
        LOGGER.error("Error downloading the file {} : {}", destinationFile, e);
        throw new Exception(e);
    }
}
 
Example #12
Source File: XferMgrDownload.java    From aws-doc-sdk-examples with Apache License 2.0 6 votes vote down vote up
public static void downloadFile(String bucket_name, String key_name,
                                String file_path, boolean pause) {
    System.out.println("Downloading to file: " + file_path +
            (pause ? " (pause)" : ""));

    // snippet-start:[s3.java1.s3_xfer_mgr_download.single]
    File f = new File(file_path);
    TransferManager xfer_mgr = TransferManagerBuilder.standard().build();
    try {
        Download xfer = xfer_mgr.download(bucket_name, key_name, f);
        // loop with Transfer.isDone()
        XferMgrProgress.showTransferProgress(xfer);
        // or block with Transfer.waitForCompletion()
        XferMgrProgress.waitForCompletion(xfer);
    } catch (AmazonServiceException e) {
        System.err.println(e.getErrorMessage());
        System.exit(1);
    }
    xfer_mgr.shutdownNow();
    // snippet-end:[s3.java1.s3_xfer_mgr_download.single]
}
 
Example #13
Source File: ReadUtilsTest.java    From micro-server with Apache License 2.0 6 votes vote down vote up
@Test
public void getInputStreamSupplier()
        throws AmazonClientException, InterruptedException, IOException {
    TransferManager transferManager = mock(TransferManager.class);
    Download download = mock(Download.class);

    when(transferManager.download(anyString(), anyString(), any())).thenReturn(download);

    File file = Files.createTempFile("micro-s3", "test")
                     .toFile();
    assertTrue(file.exists());
    ReadUtils utils = new ReadUtils(transferManager, "test");

    InputStream stream = utils.getInputStream("", "", () -> file);
    assertNotNull(stream);

    assertFalse(file.exists());
}
 
Example #14
Source File: S3RunnerTest.java    From micro-server with Apache License 2.0 6 votes vote down vote up
@Test
public void runAppAndBasicTest() {

    AmazonS3Client s3client = server.getSpringContext()
                                    .getBean(AmazonS3Client.class);
    assertThat(s3client != null, is(true));

    S3Configuration s3Configuration = server.getSpringContext()
                                            .getBean(S3Configuration.class);
    assertThat(s3Configuration.getAccessKey(), is(""));
    assertThat(s3Configuration.getSecretKey(), is(""));
    assertThat(s3Configuration.getSessionToken() == null, is(true));
    assertThat(s3Configuration.getRegion() == null, is(true));
    assertThat(s3Configuration.getUploadThreads(), is(5));
    assertThat(s3Configuration.getUploadThreadNamePrefix(), is("s3-transfer-manager-worker-"));

    S3Utils s3Utils = server.getSpringContext()
                            .getBean(S3Utils.class);
    assertThat(s3Utils != null, is(true));

    TransferManager tm = server.getSpringContext()
                               .getBean(TransferManager.class);
    assertThat(tm != null, is(true));

}
 
Example #15
Source File: AmazonS3Manager.java    From carina with Apache License 2.0 6 votes vote down vote up
/**
 * Method to download file from s3 to local file system
 * 
 * @param bucketName AWS S3 bucket name
 * @param key (example: android/apkFolder/ApkName.apk)
 * @param file (local file name)
 * @param pollingInterval (polling interval in sec for S3 download status determination)
 */
public void download(final String bucketName, final String key, final File file, long pollingInterval) {
    LOGGER.info("App will be downloaded from s3.");
    LOGGER.info(String.format("[Bucket name: %s] [Key: %s] [File: %s]", bucketName, key, file.getAbsolutePath()));
    DefaultAWSCredentialsProviderChain credentialProviderChain = new DefaultAWSCredentialsProviderChain();
    TransferManager tx = new TransferManager(
            credentialProviderChain.getCredentials());
    Download appDownload = tx.download(bucketName, key, file);
    try {
        LOGGER.info("Transfer: " + appDownload.getDescription());
        LOGGER.info("	State: " + appDownload.getState());
        LOGGER.info("	Progress: ");
        // You can poll your transfer's status to check its progress
        while (!appDownload.isDone()) {
            LOGGER.info("		transferred: " + (int) (appDownload.getProgress().getPercentTransferred() + 0.5) + "%");
            CommonUtils.pause(pollingInterval);
        }
        LOGGER.info("	State: " + appDownload.getState());
        // appDownload.waitForCompletion();
    } catch (AmazonClientException e) {
        throw new RuntimeException("File wasn't downloaded from s3. See log: ".concat(e.getMessage()));
    }
    // tx.shutdownNow();
}
 
Example #16
Source File: S3UploadSystemTest.java    From micro-server with Apache License 2.0 6 votes vote down vote up
private static TransferManager createManager() {
    AWSCredentials credentials = new AWSCredentials() {

        @Override
        public String getAWSAccessKeyId() {
            return System.getProperty("s3.accessKey");
        }

        @Override
        public String getAWSSecretKey() {
            return System.getProperty("s3.secretKey");
        }

    };
    return new TransferManager(
                                  credentials);
}
 
Example #17
Source File: S3DaoImpl.java    From herd with Apache License 2.0 6 votes vote down vote up
/**
 * Gets a transfer manager with the specified parameters including proxy host, proxy port, S3 access key, S3 secret key, and max threads.
 *
 * @param params the parameters.
 *
 * @return a newly created transfer manager.
 */
private TransferManager getTransferManager(final S3FileTransferRequestParamsDto params)
{
    // We are returning a new transfer manager each time it is called. Although the Javadocs of TransferManager say to share a single instance
    // if possible, this could potentially be a problem if TransferManager.shutdown(true) is called and underlying resources are not present when needed
    // for subsequent transfers.
    if (params.getMaxThreads() == null)
    {
        // Create a transfer manager that will internally use an appropriate number of threads.
        return new TransferManager(getAmazonS3(params));
    }
    else
    {
        // Create a transfer manager with our own executor configured with the specified total threads.
        LOGGER.info("Creating a transfer manager. fixedThreadPoolSize={}", params.getMaxThreads());
        return new TransferManager(getAmazonS3(params), Executors.newFixedThreadPool(params.getMaxThreads()));
    }
}
 
Example #18
Source File: S3DaoImpl.java    From herd with Apache License 2.0 6 votes vote down vote up
@Override
public S3FileTransferResultsDto downloadFile(final S3FileTransferRequestParamsDto params) throws InterruptedException
{
    LOGGER.info("Downloading S3 file... s3Key=\"{}\" s3BucketName=\"{}\" localPath=\"{}\"", params.getS3KeyPrefix(), params.getS3BucketName(),
        params.getLocalPath());

    // Perform the transfer.
    S3FileTransferResultsDto results = performTransfer(params, new Transferer()
    {
        @Override
        public Transfer performTransfer(TransferManager transferManager)
        {
            return s3Operations.download(params.getS3BucketName(), params.getS3KeyPrefix(), new File(params.getLocalPath()), transferManager);
        }
    });

    LOGGER
        .info("Downloaded S3 file to the local system. s3Key=\"{}\" s3BucketName=\"{}\" localPath=\"{}\" totalBytesTransferred={} transferDuration=\"{}\"",
            params.getS3KeyPrefix(), params.getS3BucketName(), params.getLocalPath(), results.getTotalBytesTransferred(),
            HerdDateUtils.formatDuration(results.getDurationMillis()));

    logOverallTransferRate(results);

    return results;
}
 
Example #19
Source File: XferMgrCopy.java    From aws-doc-sdk-examples with Apache License 2.0 6 votes vote down vote up
public static void copyObjectSimple(String from_bucket, String from_key,
                                    String to_bucket, String to_key) {
    // snippet-start:[s3.java1.s3_xfer_mgr_copy.copy_object]
    System.out.println("Copying s3 object: " + from_key);
    System.out.println("      from bucket: " + from_bucket);
    System.out.println("     to s3 object: " + to_key);
    System.out.println("        in bucket: " + to_bucket);

    TransferManager xfer_mgr = TransferManagerBuilder.standard().build();
    try {
        Copy xfer = xfer_mgr.copy(from_bucket, from_key, to_bucket, to_key);
        // loop with Transfer.isDone()
        XferMgrProgress.showTransferProgress(xfer);
        // or block with Transfer.waitForCompletion()
        XferMgrProgress.waitForCompletion(xfer);
    } catch (AmazonServiceException e) {
        System.err.println(e.getErrorMessage());
        System.exit(1);
    }
    xfer_mgr.shutdownNow();
    // snippet-end:[s3.java1.s3_xfer_mgr_copy.copy_object]
}
 
Example #20
Source File: XferMgrDownload.java    From aws-doc-sdk-examples with Apache License 2.0 6 votes vote down vote up
public static void downloadDir(String bucket_name, String key_prefix,
                               String dir_path, boolean pause) {
    System.out.println("downloading to directory: " + dir_path +
            (pause ? " (pause)" : ""));

    // snippet-start:[s3.java1.s3_xfer_mgr_download.directory]
    TransferManager xfer_mgr = TransferManagerBuilder.standard().build();

    try {
        MultipleFileDownload xfer = xfer_mgr.downloadDirectory(
                bucket_name, key_prefix, new File(dir_path));
        // loop with Transfer.isDone()
        XferMgrProgress.showTransferProgress(xfer);
        // or block with Transfer.waitForCompletion()
        XferMgrProgress.waitForCompletion(xfer);
    } catch (AmazonServiceException e) {
        System.err.println(e.getErrorMessage());
        System.exit(1);
    }
    xfer_mgr.shutdownNow();
    // snippet-end:[s3.java1.s3_xfer_mgr_download.directory]
}
 
Example #21
Source File: MultipartUpload.java    From tutorials with MIT License 5 votes vote down vote up
public static void main(String[] args) throws Exception {
    String existingBucketName = "baeldung-bucket";
    String keyName = "my-picture.jpg";
    String filePath = "documents/my-picture.jpg";

    AmazonS3 amazonS3 = AmazonS3ClientBuilder
            .standard()
            .withCredentials(new DefaultAWSCredentialsProviderChain())
            .withRegion(Regions.DEFAULT_REGION)
            .build();

    int maxUploadThreads = 5;

    TransferManager tm = TransferManagerBuilder
            .standard()
            .withS3Client(amazonS3)
            .withMultipartUploadThreshold((long) (5 * 1024 * 1024))
            .withExecutorFactory(() -> Executors.newFixedThreadPool(maxUploadThreads))
            .build();

    ProgressListener progressListener =
            progressEvent -> System.out.println("Transferred bytes: " + progressEvent.getBytesTransferred());

    PutObjectRequest request = new PutObjectRequest(existingBucketName, keyName, new File(filePath));

    request.setGeneralProgressListener(progressListener);

    Upload upload = tm.upload(request);

    try {
        upload.waitForCompletion();
        System.out.println("Upload complete.");
    } catch (AmazonClientException e) {
        System.out.println("Error occurred while uploading file");
        e.printStackTrace();
    }
}
 
Example #22
Source File: TransferManagerFactoryTest.java    From circus-train with Apache License 2.0 5 votes vote down vote up
@Test
public void shouldCreateDefaultTransferManagerClient() {
  S3S3CopierOptions s3Options = new S3S3CopierOptions(new HashMap<String, Object>() {{
    put(S3S3CopierOptions.Keys.MULTIPART_COPY_THRESHOLD.keyName(), MULTIPART_COPY_THRESHOLD_VALUE);
    put(S3S3CopierOptions.Keys.MULTIPART_COPY_PART_SIZE.keyName(), MULTIPART_COPY_PART_SIZE);
  }});

  TransferManagerFactory factory = new TransferManagerFactory();
  TransferManager transferManager = factory.newInstance(mockClient, s3Options);
  assertThat(transferManager.getAmazonS3Client(), is(mockClient));

  TransferManagerConfiguration managerConfig = transferManager.getConfiguration();
  assertThat(managerConfig.getMultipartCopyPartSize(), is(MULTIPART_COPY_PART_SIZE));
  assertThat(managerConfig.getMultipartCopyThreshold(), is(MULTIPART_COPY_THRESHOLD_VALUE));
}
 
Example #23
Source File: S3SchemaUriResolver.java    From data-highway with Apache License 2.0 5 votes vote down vote up
@VisibleForTesting
S3SchemaUriResolver(
    TransferManager transferManager,
    String uriFormat,
    String bucket,
    String keyPrefix,
    boolean enableServerSideEncryption) {
  this.transferManager = transferManager;
  this.uriFormat = uriFormat;
  this.bucket = bucket;
  this.keyPrefix = keyPrefix;
  this.enableServerSideEncryption = enableServerSideEncryption;
}
 
Example #24
Source File: XferMgrUpload.java    From aws-doc-sdk-examples with Apache License 2.0 5 votes vote down vote up
public static void uploadFileList(String[] file_paths, String bucket_name,
                                  String key_prefix, boolean pause) {
    System.out.println("file list: " + Arrays.toString(file_paths) +
            (pause ? " (pause)" : ""));
    // convert the file paths to a list of File objects (required by the
    // uploadFileList method)
    // snippet-start:[s3.java1.s3_xfer_mgr_upload.list_of_files]
    ArrayList<File> files = new ArrayList<File>();
    for (String path : file_paths) {
        files.add(new File(path));
    }

    TransferManager xfer_mgr = TransferManagerBuilder.standard().build();
    try {
        MultipleFileUpload xfer = xfer_mgr.uploadFileList(bucket_name,
                key_prefix, new File("."), files);
        // loop with Transfer.isDone()
        XferMgrProgress.showTransferProgress(xfer);
        // or block with Transfer.waitForCompletion()
        XferMgrProgress.waitForCompletion(xfer);
    } catch (AmazonServiceException e) {
        System.err.println(e.getErrorMessage());
        System.exit(1);
    }
    xfer_mgr.shutdownNow();
    // snippet-end:[s3.java1.s3_xfer_mgr_upload.list_of_files]
}
 
Example #25
Source File: XferMgrUpload.java    From aws-doc-sdk-examples with Apache License 2.0 5 votes vote down vote up
public static void uploadFile(String file_path, String bucket_name,
                              String key_prefix, boolean pause) {
    System.out.println("file: " + file_path +
            (pause ? " (pause)" : ""));

    String key_name = null;
    if (key_prefix != null) {
        key_name = key_prefix + '/' + file_path;
    } else {
        key_name = file_path;
    }

    // snippet-start:[s3.java1.s3_xfer_mgr_upload.single]
    File f = new File(file_path);
    TransferManager xfer_mgr = TransferManagerBuilder.standard().build();
    try {
        Upload xfer = xfer_mgr.upload(bucket_name, key_name, f);
        // loop with Transfer.isDone()
        XferMgrProgress.showTransferProgress(xfer);
        //  or block with Transfer.waitForCompletion()
        XferMgrProgress.waitForCompletion(xfer);
    } catch (AmazonServiceException e) {
        System.err.println(e.getErrorMessage());
        System.exit(1);
    }
    xfer_mgr.shutdownNow();
    // snippet-end:[s3.java1.s3_xfer_mgr_upload.single]
}
 
Example #26
Source File: FileController.java    From full-teaching with Apache License 2.0 5 votes vote down vote up
private void productionFileSaver(String keyName, String folderName, File f) throws InterruptedException {
String bucketName = this.bucketAWS + "/" + folderName;
TransferManager tm = new TransferManager(this.amazonS3);        
      // TransferManager processes all transfers asynchronously, so this call will return immediately
      Upload upload = tm.upload(bucketName, keyName, f);
      try {
      	// Or you can block and wait for the upload to finish
      	upload.waitForCompletion();
      	System.out.println("Upload completed");
      } catch (AmazonClientException amazonClientException) {
      	System.out.println("Unable to upload file, upload was aborted.");
      	amazonClientException.printStackTrace();
      }
  }
 
Example #27
Source File: COSAPIClient.java    From stocator with Apache License 2.0 5 votes vote down vote up
private void initTransferManager() {
  TransferManagerConfiguration transferConfiguration =
      new TransferManagerConfiguration();
  transferConfiguration.setMinimumUploadPartSize(partSize);
  transferConfiguration.setMultipartUploadThreshold(multiPartThreshold);
  transferConfiguration.setMultipartCopyPartSize(partSize);
  transferConfiguration.setMultipartCopyThreshold(multiPartThreshold);

  transfers = new TransferManager(mClient, unboundedThreadPool);
  transfers.setConfiguration(transferConfiguration);
}
 
Example #28
Source File: COSOutputStream.java    From stocator with Apache License 2.0 5 votes vote down vote up
/**
 * Constructor for an output stream of an object in COS
 *
 * @param bucketName the bucket the object resides in
 * @param key the key of the object to read
 * @param client the COS client to use for operations
 * @param contentType the content type written to the output stream
 * @param metadata the object`s metadata
 * @param transfersT TransferManager
 * @param etag the etag to be used in atomic write (null if no etag exists)
 * @param atomicWriteEnabled if true the putObject
 * @param fsT COSAPIClient
 *
 * @throws IOException if error
 */
public COSOutputStream(String bucketName, String key, AmazonS3 client, String contentType,
    Map<String, String> metadata, TransferManager transfersT,
    COSAPIClient fsT, String etag, Boolean atomicWriteEnabled) throws IOException {
  mBucketName = bucketName;
  mEtag = etag;
  mAtomicWriteEnabled = atomicWriteEnabled;
  transfers = transfersT;
  fs = fsT;
  // Remove the bucket name prefix from key path
  if (key.startsWith(bucketName + "/")) {
    mKey = key.substring(bucketName.length() + 1);
  } else {
    mKey = key;
  }
  mContentType = contentType;
  mMetadata = metadata;
  try {
    String tmpPrefix = (key.replaceAll("/", "-")).replaceAll(":", "-");
    mBackupFile = fs.createTmpFileForWrite("output-" + tmpPrefix);
    LOG.trace("OutputStream for key '{}' writing to tempfile: {}", key, mBackupFile);
    mBackupOutputStream = new BufferedOutputStream(new FileOutputStream(mBackupFile), 32768);
  } catch (IOException e) {
    LOG.error(e.getMessage());
    throw e;
  }
}
 
Example #29
Source File: S3Utils.java    From micro-server with Apache License 2.0 5 votes vote down vote up
@Autowired
public S3Utils(AmazonS3Client client, TransferManager transferManager,
        @Value("${s3.tmp.dir:#{systemProperties['java.io.tmpdir']}}") String tmpDirectory,
        @Value("${s3.aes256.enabled:false}") boolean aes256Encryption,
        @Qualifier("s3UploadExecutorService") ExecutorService uploaderService) {
    this.client = client;
    this.transferManager = transferManager;
    this.tmpDirectory = tmpDirectory;
    this.uploaderService = uploaderService;
    this.aes256Encryption = aes256Encryption;
    this.readUtils = new ReadUtils(transferManager, tmpDirectory);
}
 
Example #30
Source File: S3DaoImpl.java    From herd with Apache License 2.0 5 votes vote down vote up
@Override
public S3FileTransferResultsDto uploadFile(final S3FileTransferRequestParamsDto params) throws InterruptedException
{
    LOGGER.info("Uploading local file to S3... localPath=\"{}\" s3Key=\"{}\" s3BucketName=\"{}\"", params.getLocalPath(), params.getS3KeyPrefix(),
        params.getS3BucketName());

    // Perform the transfer.
    S3FileTransferResultsDto results = performTransfer(params, new Transferer()
    {
        @Override
        public Transfer performTransfer(TransferManager transferManager)
        {
            // Get a handle to the local file.
            File localFile = new File(params.getLocalPath());

            // Create and prepare the metadata.
            ObjectMetadata metadata = new ObjectMetadata();
            prepareMetadata(params, metadata);

            // Create a put request and a transfer manager with the parameters and the metadata.
            PutObjectRequest putObjectRequest = new PutObjectRequest(params.getS3BucketName(), params.getS3KeyPrefix(), localFile);
            putObjectRequest.setMetadata(metadata);

            return s3Operations.upload(putObjectRequest, transferManager);
        }
    });

    LOGGER.info("Uploaded local file to the S3. localPath=\"{}\" s3Key=\"{}\" s3BucketName=\"{}\" totalBytesTransferred={} transferDuration=\"{}\"",
        params.getLocalPath(), params.getS3KeyPrefix(), params.getS3BucketName(), results.getTotalBytesTransferred(),
        HerdDateUtils.formatDuration(results.getDurationMillis()));

    logOverallTransferRate(results);

    return results;
}