com.amazonaws.services.s3.transfer.MultipleFileUpload Java Examples

The following examples show how to use com.amazonaws.services.s3.transfer.MultipleFileUpload. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: XferMgrProgress.java    From dlp-dataflow-deidentification with Apache License 2.0 6 votes vote down vote up
public static void uploadDirWithSubprogress(
    String dir_path, String bucket_name, String key_prefix, boolean recursive, boolean pause) {
  System.out.println(
      "directory: " + dir_path + (recursive ? " (recursive)" : "") + (pause ? " (pause)" : ""));

  TransferManager xfer_mgr = new TransferManager();
  try {
    MultipleFileUpload multi_upload =
        xfer_mgr.uploadDirectory(bucket_name, key_prefix, new File(dir_path), recursive);
    // loop with Transfer.isDone()
    XferMgrProgress.showMultiUploadProgress(multi_upload);
    // or block with Transfer.waitForCompletion()
    XferMgrProgress.waitForCompletion(multi_upload);
  } catch (AmazonServiceException e) {
    System.err.println(e.getErrorMessage());
    System.exit(1);
  }
  xfer_mgr.shutdownNow();
}
 
Example #2
Source File: XferMgrUpload.java    From aws-doc-sdk-examples with Apache License 2.0 6 votes vote down vote up
public static void uploadDir(String dir_path, String bucket_name,
                             String key_prefix, boolean recursive, boolean pause) {
    System.out.println("directory: " + dir_path + (recursive ?
            " (recursive)" : "") + (pause ? " (pause)" : ""));

    // snippet-start:[s3.java1.s3_xfer_mgr_upload.directory]
    TransferManager xfer_mgr = TransferManagerBuilder.standard().build();
    try {
        MultipleFileUpload xfer = xfer_mgr.uploadDirectory(bucket_name,
                key_prefix, new File(dir_path), recursive);
        // loop with Transfer.isDone()
        XferMgrProgress.showTransferProgress(xfer);
        // or block with Transfer.waitForCompletion()
        XferMgrProgress.waitForCompletion(xfer);
    } catch (AmazonServiceException e) {
        System.err.println(e.getErrorMessage());
        System.exit(1);
    }
    xfer_mgr.shutdownNow();
    // snippet-end:[s3.java1.s3_xfer_mgr_upload.directory]
}
 
Example #3
Source File: S3StorageDriver.java    From dcos-cassandra-service with Apache License 2.0 5 votes vote down vote up
private void uploadDirectory(TransferManager tx,
                             String bucketName,
                             String key,
                             String keyspaceName,
                             String cfName,
                             File snapshotDirectory) throws Exception {
    try {
        final String fileKey = key + "/" + keyspaceName + "/" + cfName + "/";
        final MultipleFileUpload myUpload = tx.uploadDirectory(bucketName, fileKey, snapshotDirectory, true);
        myUpload.waitForCompletion();
    } catch (Exception e) {
        LOGGER.error("Error occurred on uploading directory {} : {}", snapshotDirectory.getName(), e);
        throw new Exception(e);
    }
}
 
Example #4
Source File: S3UploadStepTransferManagerIntegrationTest.java    From pipeline-aws-plugin with Apache License 2.0 5 votes vote down vote up
@Test
public void useFileListUploaderWhenIncludePathPatternDefined() throws Exception {
	WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "S3UploadStepTest");
	job.setDefinition(new CpsFlowDefinition(""
			+ "node {\n"
			+ "  writeFile file: 'work/subdir/test.txt', text: 'Hello!'\n"
			+ "  s3Upload(bucket: 'test-bucket', includePathPattern: '**/*.txt', workingDir: 'work')"
			+ "}\n", true)
	);

	MultipleFileUpload upload = Mockito.mock(MultipleFileUpload.class);
	Mockito.when(transferManager.uploadFileList(Mockito.eq("test-bucket"), Mockito.eq(""), Mockito.any(File.class), Mockito.any(List.class), Mockito.any(ObjectMetadataProvider.class), Mockito.any(ObjectTaggingProvider.class)))
			.thenReturn(upload);
	Mockito.when(upload.getSubTransfers()).thenReturn(Collections.emptyList());

	jenkinsRule.assertBuildStatusSuccess(job.scheduleBuild2(0));

	ArgumentCaptor<List> captor = ArgumentCaptor.forClass(List.class);
	ArgumentCaptor<File> captorDirectory = ArgumentCaptor.forClass(File.class);

	Mockito.verify(transferManager).uploadFileList(
			Mockito.eq("test-bucket"),
			Mockito.eq(""),
			captorDirectory.capture(),
			captor.capture(),
			Mockito.any(ObjectMetadataProvider.class),
			Mockito.any(ObjectTaggingProvider.class));
	Mockito.verify(upload).getSubTransfers();
	Mockito.verify(upload).waitForCompletion();
	Mockito.verify(transferManager).shutdownNow();
	Mockito.verifyNoMoreInteractions(transferManager, upload);

	Assert.assertEquals(1, captor.getValue().size());
	Assert.assertEquals("test.txt", ((File)captor.getValue().get(0)).getName());
	Assertions.assertThat(((File)captor.getValue().get(0)).getPath()).matches("^.*subdir.test.txt$");
	Assertions.assertThat(captorDirectory.getValue().getPath()).endsWith("work");
	Assertions.assertThat(captorDirectory.getValue().getPath()).doesNotContain("subdir");
}
 
Example #5
Source File: XferMgrUpload.java    From aws-doc-sdk-examples with Apache License 2.0 5 votes vote down vote up
public static void uploadFileList(String[] file_paths, String bucket_name,
                                  String key_prefix, boolean pause) {
    System.out.println("file list: " + Arrays.toString(file_paths) +
            (pause ? " (pause)" : ""));
    // convert the file paths to a list of File objects (required by the
    // uploadFileList method)
    // snippet-start:[s3.java1.s3_xfer_mgr_upload.list_of_files]
    ArrayList<File> files = new ArrayList<File>();
    for (String path : file_paths) {
        files.add(new File(path));
    }

    TransferManager xfer_mgr = TransferManagerBuilder.standard().build();
    try {
        MultipleFileUpload xfer = xfer_mgr.uploadFileList(bucket_name,
                key_prefix, new File("."), files);
        // loop with Transfer.isDone()
        XferMgrProgress.showTransferProgress(xfer);
        // or block with Transfer.waitForCompletion()
        XferMgrProgress.waitForCompletion(xfer);
    } catch (AmazonServiceException e) {
        System.err.println(e.getErrorMessage());
        System.exit(1);
    }
    xfer_mgr.shutdownNow();
    // snippet-end:[s3.java1.s3_xfer_mgr_upload.list_of_files]
}
 
Example #6
Source File: S3FileManagerImpl.java    From entrada with GNU General Public License v3.0 5 votes vote down vote up
private boolean uploadDirectory(File location, S3Details dstDetails, boolean archive) {

    ObjectMetadataProvider metaDataProvider = (file, meta) -> {

      if (archive) {
        meta
            .setHeader(Headers.STORAGE_CLASS,
                StorageClass.fromValue(StringUtils.upperCase(archiveStorageClass)));
      }

      if (encrypt) {
        meta.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION);
      }
    };

    MultipleFileUpload upload = transferManager
        .uploadDirectory(dstDetails.getBucket(), dstDetails.getKey(), location, true,
            metaDataProvider);


    if (log.isDebugEnabled()) {
      ProgressListener progressListener = progressEvent -> log
          .debug("S3 Transferred bytes: " + progressEvent.getBytesTransferred());
      upload.addProgressListener(progressListener);
    }

    try {
      upload.waitForCompletion();
      return true;
    } catch (Exception e) {
      log.error("Error while uploading directory: {}", location, e);
    }

    return false;
  }
 
Example #7
Source File: MockS3OperationsImpl.java    From herd with Apache License 2.0 5 votes vote down vote up
@Override
public MultipleFileUpload uploadDirectory(String bucketName, String virtualDirectoryKeyPrefix, File directory, boolean includeSubdirectories,
    ObjectMetadataProvider metadataProvider, TransferManager transferManager)
{
    LOGGER.debug(
        "uploadDirectory(): bucketName = " + bucketName + ", virtualDirectoryKeyPrefix = " + virtualDirectoryKeyPrefix + ", directory = " + directory +
            ", includeSubdirectories = " + includeSubdirectories);

    List<File> files = new ArrayList<>();
    listFiles(directory, files, includeSubdirectories);

    return uploadFileList(bucketName, virtualDirectoryKeyPrefix, directory, files, metadataProvider, transferManager);
}
 
Example #8
Source File: S3UploadStep.java    From pipeline-aws-plugin with Apache License 2.0 4 votes vote down vote up
@Override
public Void invoke(File localFile, VirtualChannel channel) throws IOException, InterruptedException {
	TransferManager mgr = TransferManagerBuilder.standard()
			.withS3Client(AWSClientFactory.create(this.amazonS3ClientOptions.createAmazonS3ClientBuilder(), this.envVars))
			.build();
	final MultipleFileUpload fileUpload;
	ObjectMetadataProvider metadatasProvider = (file, meta) -> {
		if (meta != null) {
			if (RemoteListUploader.this.metadatas != null && RemoteListUploader.this.metadatas.size() > 0) {
				meta.setUserMetadata(RemoteListUploader.this.metadatas);
			}
			if (RemoteListUploader.this.acl != null) {
				meta.setHeader(Headers.S3_CANNED_ACL, RemoteListUploader.this.acl);
			}
			if (RemoteListUploader.this.cacheControl != null && !RemoteListUploader.this.cacheControl.isEmpty()) {
				meta.setCacheControl(RemoteListUploader.this.cacheControl);
			}
			if (RemoteListUploader.this.contentEncoding != null && !RemoteListUploader.this.contentEncoding.isEmpty()) {
				meta.setContentEncoding(RemoteListUploader.this.contentEncoding);
			}
			if (RemoteListUploader.this.contentType != null && !RemoteListUploader.this.contentType.isEmpty()) {
				meta.setContentType(RemoteListUploader.this.contentType);
			}
			if (RemoteListUploader.this.sseAlgorithm != null && !RemoteListUploader.this.sseAlgorithm.isEmpty()) {
				meta.setSSEAlgorithm(RemoteListUploader.this.sseAlgorithm);
			}
			if (RemoteListUploader.this.kmsId != null && !RemoteListUploader.this.kmsId.isEmpty()) {
				final SSEAwsKeyManagementParams sseAwsKeyManagementParams = new SSEAwsKeyManagementParams(RemoteListUploader.this.kmsId);
				meta.setSSEAlgorithm(sseAwsKeyManagementParams.getAwsKmsKeyId());
				meta.setHeader(
						Headers.SERVER_SIDE_ENCRYPTION_AWS_KMS_KEYID,
						sseAwsKeyManagementParams.getAwsKmsKeyId()
				);
			}

		}
	};

	ObjectTaggingProvider objectTaggingProvider =(uploadContext) -> {
		List<Tag> tagList = new ArrayList<Tag>();

		//add tags
		if(tags != null){
			for (Map.Entry<String, String> entry : tags.entrySet()) {
				Tag tag = new Tag(entry.getKey(), entry.getValue());
				tagList.add(tag);
			}
		}
		return new ObjectTagging(tagList);
	};

	try {
		fileUpload = mgr.uploadFileList(this.bucket, this.path, localFile, this.fileList, metadatasProvider, objectTaggingProvider);
		for (final Upload upload : fileUpload.getSubTransfers()) {
			upload.addProgressListener((ProgressListener) progressEvent -> {
				if (progressEvent.getEventType() == ProgressEventType.TRANSFER_COMPLETED_EVENT) {
					RemoteListUploader.this.taskListener.getLogger().println("Finished: " + upload.getDescription());
				}
			});
		}
		fileUpload.waitForCompletion();
	}
	finally {
		mgr.shutdownNow();
	}
	return null;
}
 
Example #9
Source File: S3OperationsImpl.java    From herd with Apache License 2.0 4 votes vote down vote up
@Override
public MultipleFileUpload uploadDirectory(String s3BucketName, String virtualDirectoryKeyPrefix, File directory, boolean includeSubdirectories,
    ObjectMetadataProvider metadataProvider, TransferManager transferManager)
{
    return transferManager.uploadDirectory(s3BucketName, virtualDirectoryKeyPrefix, directory, includeSubdirectories, metadataProvider);
}
 
Example #10
Source File: S3OperationsImpl.java    From herd with Apache License 2.0 4 votes vote down vote up
@Override
public MultipleFileUpload uploadFileList(String s3BucketName, String virtualDirectoryKeyPrefix, File directory, List<File> files,
    ObjectMetadataProvider metadataProvider, TransferManager transferManager)
{
    return transferManager.uploadFileList(s3BucketName, virtualDirectoryKeyPrefix, directory, files, metadataProvider);
}
 
Example #11
Source File: MockS3OperationsImpl.java    From herd with Apache License 2.0 4 votes vote down vote up
@Override
public MultipleFileUpload uploadFileList(String bucketName, String virtualDirectoryKeyPrefix, File directory, List<File> files,
    ObjectMetadataProvider metadataProvider, TransferManager transferManager)
{
    LOGGER.debug(
        "uploadFileList(): bucketName = " + bucketName + ", virtualDirectoryKeyPrefix = " + virtualDirectoryKeyPrefix + ", directory = " + directory +
            ", files = " + files);

    String directoryPath = directory.getAbsolutePath();

    long totalFileLength = 0;
    List<Upload> subTransfers = new ArrayList<>();
    for (File file : files)
    {
        // Get path to file relative to the specified directory
        String relativeFilePath = file.getAbsolutePath().substring(directoryPath.length());

        // Replace any backslashes (i.e. Windows separator) with a forward slash.
        relativeFilePath = relativeFilePath.replace("\\", "/");

        // Remove any leading slashes
        relativeFilePath = relativeFilePath.replaceAll("^/+", "");

        long fileLength = file.length();

        // Remove any trailing slashes
        virtualDirectoryKeyPrefix = virtualDirectoryKeyPrefix.replaceAll("/+$", "");

        String s3ObjectKey = virtualDirectoryKeyPrefix + "/" + relativeFilePath;
        totalFileLength += fileLength;

        PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, s3ObjectKey, file);

        ObjectMetadata objectMetadata = new ObjectMetadata();
        metadataProvider.provideObjectMetadata(null, objectMetadata);
        putObjectRequest.setMetadata(objectMetadata);

        putObject(putObjectRequest, transferManager.getAmazonS3Client());

        subTransfers.add(new UploadImpl(null, null, null, null));
    }

    TransferProgress progress = new TransferProgress();
    progress.setTotalBytesToTransfer(totalFileLength);
    progress.updateProgress(totalFileLength);

    MultipleFileUploadImpl multipleFileUpload = new MultipleFileUploadImpl(null, progress, null, virtualDirectoryKeyPrefix, bucketName, subTransfers);
    multipleFileUpload.setState(TransferState.Completed);
    return multipleFileUpload;
}
 
Example #12
Source File: S3Operations.java    From herd with Apache License 2.0 2 votes vote down vote up
/**
 * Uploads all files in the directory given to the bucket named, optionally recursing for all subdirectories.
 *
 * @param s3BucketName the S3 bucket name
 * @param virtualDirectoryKeyPrefix the key prefix of the virtual directory to upload to
 * @param directory the directory to upload
 * @param includeSubdirectories specified whether to include subdirectories in the upload. If true, files found in subdirectories will be included with an
 * appropriate concatenation to the key prefix
 * @param metadataProvider the callback of type <code>ObjectMetadataProvider</code> which is used to provide metadata for each file being uploaded
 * @param transferManager the transfer manager implementation to use
 *
 * @return the multiple file upload information
 */
public MultipleFileUpload uploadDirectory(String s3BucketName, String virtualDirectoryKeyPrefix, File directory, boolean includeSubdirectories,
    ObjectMetadataProvider metadataProvider, TransferManager transferManager);
 
Example #13
Source File: S3Operations.java    From herd with Apache License 2.0 2 votes vote down vote up
/**
 * Uploads all specified files to the bucket named, constructing relative keys depending on the common parent directory given.
 *
 * @param s3BucketName the S3 bucket name
 * @param virtualDirectoryKeyPrefix the key prefix of the virtual directory to upload to
 * @param directory the common parent directory of files to upload. The keys of the files in the list of files are constructed relative to this directory
 * and the virtualDirectoryKeyPrefix
 * @param files the list of files to upload. The keys of the files are calculated relative to the common parent directory and the virtualDirectoryKeyPrefix
 * @param metadataProvider the callback of type <code>ObjectMetadataProvider</code> which is used to provide metadata for each file being uploaded
 * @param transferManager the transfer manager implementation to use
 *
 * @return the multiple file upload information
 */
public MultipleFileUpload uploadFileList(String s3BucketName, String virtualDirectoryKeyPrefix, File directory, List<File> files,
    ObjectMetadataProvider metadataProvider, TransferManager transferManager);