Java Code Examples for com.amazonaws.services.s3.model.ObjectMetadata

The following are top voted examples for showing how to use com.amazonaws.services.s3.model.ObjectMetadata. These examples are extracted from open source projects. You can vote up the examples you like and your votes will be used in our system to generate more good examples.
Example 1
Project: s3-proxy-chunk-upload   File: AssetsS3.java   View source code 11 votes vote down vote up
/**
 * Upload a file which is in the assets bucket.
 * 
 * @param fileName File name
 * @param file File
 * @param contentType Content type for file
 * @return
 */
public static boolean uploadFile(String fileName, File file, String contentType) {
    try {
        if (S3Module.amazonS3 != null) {
            String bucket = S3Module.s3Bucket;
            ObjectMetadata metaData = new ObjectMetadata();
            if (contentType != null) {
                metaData.setContentType(contentType);
            }
            PutObjectRequest putObj = new PutObjectRequest(bucket,
                    fileName, file);
            putObj.setMetadata(metaData);
            putObj.withCannedAcl(CannedAccessControlList.PublicRead);
            S3Module.amazonS3.putObject(putObj);
            return true;
        } else {
            Logger.error("Could not save because amazonS3 was null");
            return false;
        }
    } catch (Exception e) {
        Logger.error("S3 Upload -" + e.getMessage());
        return false;
    }
}
 
Example 2
Project: S3Mock   File: AmazonClientUploadIT.java   View source code 6 votes vote down vote up
/**
 * Tests if the Metadata of an existing file can be retrieved.
 */
@Test
public void shouldGetObjectMetadata() {
  final String nonExistingFileName = "nonExistingFileName";
  final File uploadFile = new File(UPLOAD_FILE_NAME);
  s3Client.createBucket(BUCKET_NAME);

  final PutObjectResult putObjectResult =
      s3Client.putObject(new PutObjectRequest(BUCKET_NAME, UPLOAD_FILE_NAME, uploadFile));
  final ObjectMetadata metadataExisting =
      s3Client.getObjectMetadata(BUCKET_NAME, UPLOAD_FILE_NAME);

  assertThat("The ETags should be identically!", metadataExisting.getETag(),
      is(putObjectResult.getETag()));

  thrown.expect(AmazonS3Exception.class);
  thrown.expectMessage(containsString("Status Code: 404"));
  s3Client.getObjectMetadata(BUCKET_NAME, nonExistingFileName);
}
 
Example 3
Project: qpp-conversion-tool   File: StorageServiceImpl.java   View source code 6 votes vote down vote up
/**
 * Stores the {@link InputStream} as an object in the S3 bucket.
 *
 * @param keyName The requested key name for the object.
 * @param inStream The {@link InputStream} to write out to an object in S3.
 * @param size The size of the {@link InputStream}.
 * @return A {@link CompletableFuture} that will eventually contain the S3 object key.
 */
@Override
public CompletableFuture<String> store(String keyName, InputStream inStream, long size) {
	final String bucketName = environment.getProperty(Constants.BUCKET_NAME_ENV_VARIABLE);
	final String kmsKey = environment.getProperty(Constants.KMS_KEY_ENV_VARIABLE);
	if (Strings.isNullOrEmpty(bucketName) || Strings.isNullOrEmpty(kmsKey)) {
		API_LOG.warn("No bucket name is specified or no KMS key specified.");
		return CompletableFuture.completedFuture("");
	}

	ObjectMetadata s3ObjectMetadata = new ObjectMetadata();
	s3ObjectMetadata.setContentLength(size);

	PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, keyName, inStream, s3ObjectMetadata)
		.withSSEAwsKeyManagementParams(new SSEAwsKeyManagementParams(kmsKey));

	API_LOG.info("Writing object {} to S3 bucket {}", keyName, bucketName);
	return actOnItem(putObjectRequest);
}
 
Example 4
Project: elasticsearch_my   File: S3BlobContainer.java   View source code 6 votes vote down vote up
@Override
public void move(String sourceBlobName, String targetBlobName) throws IOException {
    try {
        CopyObjectRequest request = new CopyObjectRequest(blobStore.bucket(), buildKey(sourceBlobName),
            blobStore.bucket(), buildKey(targetBlobName));

        if (blobStore.serverSideEncryption()) {
            ObjectMetadata objectMetadata = new ObjectMetadata();
            objectMetadata.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION);
            request.setNewObjectMetadata(objectMetadata);
        }

        SocketAccess.doPrivilegedVoid(() -> {
            blobStore.client().copyObject(request);
            blobStore.client().deleteObject(blobStore.bucket(), buildKey(sourceBlobName));
        });

    } catch (AmazonS3Exception e) {
        throw new IOException(e);
    }
}
 
Example 5
Project: S3Mock   File: AmazonClientUploadIT.java   View source code 6 votes vote down vote up
/**
 * Puts an Object; Copies that object to a new bucket; Downloads the object from the new
 * bucket; compares checksums
 * of original and copied object
 *
 * @throws Exception if an Exception occurs
 */
@Test
public void shouldCopyObjectEncrypted() throws Exception {
  final File uploadFile = new File(UPLOAD_FILE_NAME);
  final String sourceKey = UPLOAD_FILE_NAME;
  final String destinationBucketName = "destinationBucket";
  final String destinationKey = "copyOf/" + sourceKey;

  s3Client.putObject(new PutObjectRequest(BUCKET_NAME, sourceKey, uploadFile));

  final CopyObjectRequest copyObjectRequest =
      new CopyObjectRequest(BUCKET_NAME, sourceKey, destinationBucketName, destinationKey);
  copyObjectRequest.setSSEAwsKeyManagementParams(new SSEAwsKeyManagementParams(TEST_ENC_KEYREF));

  final CopyObjectResult copyObjectResult = s3Client.copyObject(copyObjectRequest);

  final ObjectMetadata metadata =
      s3Client.getObjectMetadata(destinationBucketName, destinationKey);

  final InputStream uploadFileIS = new FileInputStream(uploadFile);
  final String uploadHash = HashUtil.getDigest(TEST_ENC_KEYREF, uploadFileIS);
  assertThat("ETag should match", copyObjectResult.getETag(), is(uploadHash));
  assertThat("Files should have the same length", metadata.getContentLength(),
      is(uploadFile.length()));
}
 
Example 6
Project: gradle-s3-build-cache   File: AwsS3BuildCacheService.java   View source code 6 votes vote down vote up
@Override
public void store(BuildCacheKey key, BuildCacheEntryWriter writer) {
  logger.info("Start storing cache entry '{}' in S3 bucket", key.getHashCode());
  ObjectMetadata meta = new ObjectMetadata();
  meta.setContentType(BUILD_CACHE_CONTENT_TYPE);

  try (ByteArrayOutputStream os = new ByteArrayOutputStream()) {
    writer.writeTo(os);
    meta.setContentLength(os.size());
    try (InputStream is = new ByteArrayInputStream(os.toByteArray())) {
        PutObjectRequest request = getPutObjectRequest(key, meta, is);
        if(this.reducedRedundancy) {
          request.withStorageClass(StorageClass.ReducedRedundancy);
        }
        s3.putObject(request);
    }
  } catch (IOException e) {
    throw new BuildCacheException("Error while storing cache object in S3 bucket", e);
  }
}
 
Example 7
Project: S3Mock   File: AmazonClientUploadIT.java   View source code 6 votes vote down vote up
/**
 * Tests if Object can be uploaded with KMS
 */
@Test
public void shouldUploadWithEncryption() {
  final File uploadFile = new File(UPLOAD_FILE_NAME);
  final String objectKey = UPLOAD_FILE_NAME;
  s3Client.createBucket(BUCKET_NAME);
  final PutObjectRequest putObjectRequest =
      new PutObjectRequest(BUCKET_NAME, objectKey, uploadFile);
  putObjectRequest.setSSEAwsKeyManagementParams(new SSEAwsKeyManagementParams(TEST_ENC_KEYREF));

  s3Client.putObject(putObjectRequest);

  final GetObjectMetadataRequest getObjectMetadataRequest =
      new GetObjectMetadataRequest(BUCKET_NAME, objectKey);

  final ObjectMetadata objectMetadata = s3Client.getObjectMetadata(getObjectMetadataRequest);

  assertThat(objectMetadata.getContentLength(), is(uploadFile.length()));
}
 
Example 8
Project: urlshortner   File: UrlShortnerService.java   View source code 6 votes vote down vote up
/**
 * This method deletes the url for code
 * 
 * @param code
 */
public void deleteShortUrl(String code) {
	try {
		// get the object
		ObjectMetadata metaData = this.s3Client.getObjectMetadata(this.bucket, code);
		String url = metaData.getUserMetaDataOf("url");
		logger.info("The url to be deleted {}", url);
		this.s3Client.deleteObject(this.bucket, code);
		this.s3Client.deleteObject(this.bucket + "-dummy", code + Base64.encodeBase64String(url.getBytes()));

	} catch (AmazonS3Exception ex) {
		if (ex.getStatusCode() == HttpStatus.SC_NOT_FOUND) {
			return;
		}
		logger.warn("Unable to get object status", ex);
		throw ex;
	}

}
 
Example 9
Project: nexus-blobstore-s3   File: S3BlobStore.java   View source code 6 votes vote down vote up
@Override
@Guarded(by = STARTED)
public Blob create(final InputStream blobData, final Map<String, String> headers) {
  checkNotNull(blobData);

  return create(headers, destination -> {
      try (InputStream data = blobData) {
        MetricsInputStream input = new MetricsInputStream(data);
        TransferManager transferManager = new TransferManager(s3);
        transferManager.upload(getConfiguredBucket(), destination, input, new ObjectMetadata())
            .waitForCompletion();
        return input.getMetrics();
      } catch (InterruptedException e) {
        throw new BlobStoreException("error uploading blob", e, null);
      }
    });
}
 
Example 10
Project: hollow-reference-implementation   File: S3Publisher.java   View source code 6 votes vote down vote up
/**
 * Write a list of all of the state versions to S3.
 * @param newVersion
 */
private synchronized void updateSnapshotIndex(Long newVersion) {
	/// insert the new version into the list
	int idx = Collections.binarySearch(snapshotIndex, newVersion);
	int insertionPoint = Math.abs(idx) - 1;
	snapshotIndex.add(insertionPoint, newVersion);
	
	/// build a binary representation of the list -- gap encoded variable-length integers
	byte[] idxBytes = buidGapEncodedVarIntSnapshotIndex();
	
	/// indicate the Content-Length
    ObjectMetadata metadata = new ObjectMetadata();
    metadata.setHeader("Content-Length", (long)idxBytes.length);
	
    /// upload the new file content.
    try(InputStream is = new ByteArrayInputStream(idxBytes)) {
        Upload upload = s3TransferManager.upload(bucketName, getSnapshotIndexObjectName(blobNamespace), is, metadata);
        
        upload.waitForCompletion();
    } catch(Exception e) {
        throw new RuntimeException(e);
    }
}
 
Example 11
Project: hadoop   File: S3AFileSystem.java   View source code 6 votes vote down vote up
private void createEmptyObject(final String bucketName, final String objectName)
    throws AmazonClientException, AmazonServiceException {
  final InputStream im = new InputStream() {
    @Override
    public int read() throws IOException {
      return -1;
    }
  };

  final ObjectMetadata om = new ObjectMetadata();
  om.setContentLength(0L);
  if (StringUtils.isNotBlank(serverSideEncryptionAlgorithm)) {
    om.setServerSideEncryption(serverSideEncryptionAlgorithm);
  }
  PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, objectName, im, om);
  putObjectRequest.setCannedAcl(cannedACL);
  s3.putObject(putObjectRequest);
  statistics.incrementWriteOps(1);
}
 
Example 12
Project: ibm-cos-sdk-java   File: AmazonS3Client.java   View source code 6 votes vote down vote up
@Override
public PutObjectResult putObject(String bucketName, String key, String content)
        throws AmazonServiceException, SdkClientException {

    rejectNull(bucketName, "Bucket name must be provided");
    rejectNull(key, "Object key must be provided");
    rejectNull(content, "String content must be provided");

    byte[] contentBytes = content.getBytes(StringUtils.UTF8);

    InputStream is = new ByteArrayInputStream(contentBytes);
    ObjectMetadata metadata = new ObjectMetadata();
    metadata.setContentType("text/plain");
    metadata.setContentLength(contentBytes.length);

    return putObject(new PutObjectRequest(bucketName, key, is, metadata));
}
 
Example 13
Project: ibm-cos-sdk-java   File: CopyCallable.java   View source code 6 votes vote down vote up
private void populateMetadataWithEncryptionParams(ObjectMetadata source, ObjectMetadata destination) {
    Map<String, String> userMetadataSource = source.getUserMetadata();
    Map<String, String> userMetadataDestination = destination.getUserMetadata();

    String[] headersToCopy = { Headers.CRYPTO_CEK_ALGORITHM,
            Headers.CRYPTO_IV, Headers.CRYPTO_KEY, Headers.CRYPTO_KEY_V2,
            Headers.CRYPTO_KEYWRAP_ALGORITHM, Headers.CRYPTO_TAG_LENGTH,
            Headers.MATERIALS_DESCRIPTION,
            Headers.UNENCRYPTED_CONTENT_LENGTH,
            Headers.UNENCRYPTED_CONTENT_MD5 };

    if (userMetadataSource != null) {
        if(userMetadataDestination == null){
            userMetadataDestination= new HashMap<String,String>();
            destination.setUserMetadata(userMetadataDestination);
        }

        String headerValue;
        for(String header : headersToCopy){
            headerValue = userMetadataSource.get(header);
            if(headerValue != null){
                userMetadataDestination.put(header, headerValue);
            }
        }
    }
}
 
Example 14
Project: Reer   File: S3Client.java   View source code 5 votes vote down vote up
public void put(InputStream inputStream, Long contentLength, URI destination) {
    checkRequiredJigsawModuleIsOnPath();
    try {
        S3RegionalResource s3RegionalResource = new S3RegionalResource(destination);
        String bucketName = s3RegionalResource.getBucketName();
        String s3BucketKey = s3RegionalResource.getKey();
        configureClient(s3RegionalResource);

        ObjectMetadata objectMetadata = new ObjectMetadata();
        objectMetadata.setContentLength(contentLength);

        PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, s3BucketKey, inputStream, objectMetadata);
        LOGGER.debug("Attempting to put resource:[{}] into s3 bucket [{}]", s3BucketKey, bucketName);

        amazonS3Client.putObject(putObjectRequest);
    } catch (AmazonClientException e) {
        throw ResourceExceptions.putFailed(destination, e);
    }
}
 
Example 15
Project: pprxmtr   File: S3.java   View source code 5 votes vote down vote up
public static void storeFileInBucket(String filename, InputStream is, ObjectMetadata metadata) {
	String bucketName = properties.getProperty(S3_BUCKET_NAME);
	try {
		LOG.info("Storing file {} in S3 bucket {}.", filename, bucketName);
		S3.putObject(bucketName, filename, is, metadata);
		S3.setObjectAcl(bucketName, filename, CannedAccessControlList.PublicRead);
	} catch (AmazonServiceException e) {
		LOG.error("Exception occured when fetching file from S3 bucket {}.", bucketName, e);
	}
}
 
Example 16
Project: elasticsearch_my   File: DefaultS3OutputStream.java   View source code 5 votes vote down vote up
protected String doInitialize(S3BlobStore blobStore, String bucketName, String blobName, boolean serverSideEncryption) {
    InitiateMultipartUploadRequest request = new InitiateMultipartUploadRequest(bucketName, blobName)
            .withCannedACL(blobStore.getCannedACL())
            .withStorageClass(blobStore.getStorageClass());

    if (serverSideEncryption) {
        ObjectMetadata md = new ObjectMetadata();
        md.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION);
        request.setObjectMetadata(md);
    }

    return blobStore.client().initiateMultipartUpload(request).getUploadId();
}
 
Example 17
Project: elasticsearch_my   File: MockAmazonS3.java   View source code 5 votes vote down vote up
@Override
public ObjectMetadata getObjectMetadata(
        GetObjectMetadataRequest getObjectMetadataRequest)
        throws AmazonClientException, AmazonServiceException {
    String blobName = getObjectMetadataRequest.getKey();

    if (!blobs.containsKey(blobName)) {
        throw new AmazonS3Exception("[" + blobName + "] does not exist.");
    }

    return new ObjectMetadata(); // nothing is done with it
}
 
Example 18
Project: ooso   File: CommonUtilitiesTest.java   View source code 5 votes vote down vote up
@Test
public void storeObjectTest() throws Exception {
    S3Object object = s3Client.getObject(DUMMY_BUCKET_NAME, "pref1/dummy1");

    ObjectMetadata objectMetadata = object.getObjectMetadata();
    BufferedReader reader = new BufferedReader(new InputStreamReader(object.getObjectContent()));
    String storedContent = reader.readLine();
    reader.close();

    assertEquals(objectMetadata.getContentType(), Commons.TEXT_TYPE);
    assertEquals(objectMetadata.getContentLength(), KEY_CONTENT_MAPPING.get("pref1/dummy1").getBytes().length);
    assertEquals(storedContent, KEY_CONTENT_MAPPING.get("pref1/dummy1"));
}
 
Example 19
Project: ibm-cos-sdk-java   File: S3CryptoModuleBase.java   View source code 5 votes vote down vote up
protected final PutObjectRequest createInstructionPutRequest(
        String bucketName, String key, ContentCryptoMaterial cekMaterial) {
    byte[] bytes = cekMaterial.toJsonString(cryptoConfig.getCryptoMode())
                              .getBytes(UTF8);
    InputStream is = new ByteArrayInputStream(bytes);
    ObjectMetadata metadata = new ObjectMetadata();
    metadata.setContentLength(bytes.length);
    metadata.addUserMetadata(Headers.CRYPTO_INSTRUCTION_FILE, "");
    InstructionFileId ifileId = new S3ObjectId(bucketName, key)
            .instructionFileId();
    return new PutObjectRequest(ifileId.getBucket(), ifileId.getKey(),
        is, metadata);
}
 
Example 20
Project: S3Decorators   File: FailsafeS3DecoratorTest.java   View source code 5 votes vote down vote up
@Override
public ObjectMetadata getObjectMetadata(String bucketName, String key) throws AmazonServiceException {
  AmazonS3Exception exception = new AmazonS3Exception("Internal Error");
  exception.setStatusCode(500);
  exception.setErrorType(ErrorType.Service);
  throw exception;
}
 
Example 21
Project: radosgw-admin4j   File: RgwAdminImplTest.java   View source code 5 votes vote down vote up
@Test
public void removeBucket() throws Exception {
  String bucketName = "testremovebkbk" + UUID.randomUUID().toString();

  // remove bucket not exist
  Thread.sleep(3000);
  RGW_ADMIN.removeBucket(bucketName);

  testWithAUser(
      v -> {
        String userId = "testremovebk" + UUID.randomUUID().toString();

        User response = RGW_ADMIN.createUser(userId);
        AmazonS3 s3 =
            createS3(
                response.getS3Credentials().get(0).getAccessKey(),
                response.getS3Credentials().get(0).getSecretKey());
        s3.createBucket(bucketName);

        ByteArrayInputStream input = new ByteArrayInputStream("Hello World!".getBytes());
        s3.putObject(bucketName, "hello.txt", input, new ObjectMetadata());

        RGW_ADMIN.removeBucket(bucketName);

        try {
          s3.headBucket(new HeadBucketRequest(bucketName));
          fail();
        } catch (Exception e) {
          assertTrue("Not Found".equals(((AmazonS3Exception) e).getErrorMessage()));
        }
      });
}
 
Example 22
Project: S3Decorators   File: FailsafeS3DecoratorTest.java   View source code 5 votes vote down vote up
@Override
public ObjectMetadata getObjectMetadata(String bucketName, String key) throws AmazonServiceException {
  AmazonS3Exception exception = new AmazonS3Exception("Not authed");
  exception.setStatusCode(403);
  exception.setErrorType(ErrorType.Client);
  throw exception;
}
 
Example 23
Project: S3Decorators   File: HystrixS3DecoratorTest.java   View source code 5 votes vote down vote up
@Override
public ObjectMetadata getObjectMetadata(String bucketName, String key) throws AmazonServiceException {
  AmazonS3Exception exception = new AmazonS3Exception("Bad Auth");
  exception.setStatusCode(403);
  exception.setErrorType(ErrorType.Client);
  throw exception;
}
 
Example 24
Project: S3Mock   File: AmazonClientUploadIT.java   View source code 5 votes vote down vote up
/**
 * Verifies multipart copy.
 *
 * @throws InterruptedException
 */
@Test
public void multipartCopy() throws InterruptedException, IOException, NoSuchAlgorithmException {
  final int contentLen = 3 * _1MB;

  final ObjectMetadata objectMetadata = new ObjectMetadata();
  objectMetadata.setContentLength(contentLen);

  final String assumedSourceKey = UUID.randomUUID().toString();

  final Bucket sourceBucket = s3Client.createBucket(UUID.randomUUID().toString());
  final Bucket targetBucket = s3Client.createBucket(UUID.randomUUID().toString());

  final TransferManager transferManager = createTransferManager(_2MB, _1MB, _2MB, _1MB);

  final InputStream sourceInputStream = randomInputStream(contentLen);
  final Upload upload = transferManager
      .upload(sourceBucket.getName(), assumedSourceKey,
          sourceInputStream, objectMetadata);

  final UploadResult uploadResult = upload.waitForUploadResult();

  assertThat(uploadResult.getKey(), is(assumedSourceKey));

  final String assumedDestinationKey = UUID.randomUUID().toString();
  final Copy copy =
      transferManager.copy(sourceBucket.getName(), assumedSourceKey, targetBucket.getName(),
          assumedDestinationKey);
  final CopyResult copyResult = copy.waitForCopyResult();
  assertThat(copyResult.getDestinationKey(), is(assumedDestinationKey));

  final S3Object copiedObject = s3Client.getObject(targetBucket.getName(), assumedDestinationKey);

  assertThat("Hashes for source and target S3Object do not match.",
      HashUtil.getDigest(copiedObject.getObjectContent()) + "-1",
      is(uploadResult.getETag()));
}
 
Example 25
Project: circus-train   File: CopyMapper.java   View source code 5 votes vote down vote up
private S3UploadDescriptor describeUpload(FileStatus sourceFileStatus, Path targetPath) throws IOException {
  URI targetUri = targetPath.toUri();
  String bucketName = PathUtil.toBucketName(targetUri);
  String key = PathUtil.toBucketKey(targetUri);

  Path sourcePath = sourceFileStatus.getPath();

  ObjectMetadata metadata = new ObjectMetadata();
  metadata.setContentLength(sourceFileStatus.getLen());
  if (conf.getBoolean(ConfigurationVariable.S3_SERVER_SIDE_ENCRYPTION)) {
    metadata.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION);
  }
  return new S3UploadDescriptor(sourcePath, bucketName, key, metadata);
}
 
Example 26
Project: yass-android   File: YassBroadcastReceiver.java   View source code 5 votes vote down vote up
@Override
public Long doInBackground(Void... unused) {
    Log.d(TAG, "Upload Blob Task:");

    MainActivity.YassPreferences preferences = new MainActivity.YassPreferences(context);

    ConnectivityManager cm =
            (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
    NetworkInfo activeNetwork = cm.getActiveNetworkInfo();
    boolean isConnected = activeNetwork != null &&
              activeNetwork.isConnectedOrConnecting();
    if (!isConnected) {
        Log.d(TAG, "Skipping camera upload because network is disconnected");
        return null;
    }
    if (preferences.cameraUploadOnlyOnWifi && activeNetwork.getType() != ConnectivityManager.TYPE_WIFI) {
        Log.d(TAG, "Skipping camera upload because Wifi is disconnected");
        return null;
    }

    long serial;
    Uri uri;
    String fileName;
    long fileSize;
    SQLiteDatabase db = new YassDbHelper(context).getReadableDatabase();
    try {
        String[] projection = {
                "serial",
                "file_uri",
                "file_name",
                "file_size"
        };
        String selection = null;
        String[] selectionArgs = null;
        String groupBy = null;
        String having = null;
        String orderBy = "serial ASC";
        String limit = "1";
        Cursor cursor = db.query("camera_uploads", projection, selection, selectionArgs,
                groupBy, having, orderBy, limit);
        try {
            if (!cursor.moveToNext()) {
                Log.d(TAG, "Did not find image to upload");
                return null;
            }
            serial = cursor.getLong(cursor.getColumnIndexOrThrow("serial"));
            uri = Uri.parse(cursor.getString(cursor.getColumnIndexOrThrow("file_uri")));
            fileName = cursor.getString(cursor.getColumnIndexOrThrow("file_name"));
            fileSize = cursor.getLong(cursor.getColumnIndexOrThrow("file_size"));
        } finally {
            cursor.close();
        }
    } finally {
        db.close();
    }

    Log.d(TAG, "Found image to upload: " + fileName);

    ObjectMetadata metadata = new ObjectMetadata();
    metadata.setContentLength(fileSize);
    metadata.setContentType(context.getContentResolver().getType(uri));
    PutObjectResult result;
    try (InputStream is = context.getContentResolver().openInputStream(uri)) {
        // TODO: limited to 5 GB
        result = MainActivity.getS3Client(preferences).putObject(preferences.bucketName,
                "Camera Uploads/" + fileName, is, metadata);
    } catch (AmazonClientException | IOException e) {
        Log.e(TAG, "Could not upload file: " + e.getMessage());
        return null;
    }
    return serial;
}
 
Example 27
Project: ibm-cos-sdk-java   File: S3CryptoModuleBase.java   View source code 5 votes vote down vote up
/**
 * Returns the plaintext length from the request and metadata; or -1 if
 * unknown.
 */
protected final long plaintextLength(AbstractPutObjectRequest request,
        ObjectMetadata metadata) {
    if (request.getFile() != null) {
        return request.getFile().length();
    } else if (request.getInputStream() != null
            && metadata.getRawMetadataValue(Headers.CONTENT_LENGTH) != null) {
        return metadata.getContentLength();
    }
    return -1;
}
 
Example 28
Project: urlshortner   File: UrlShortnerService.java   View source code 5 votes vote down vote up
private void createDummyRecord(String url, String code) {
	// Add a dummy object for pointer
	byte[] dummyFileContentBytes = new String(code).getBytes(StandardCharsets.UTF_8);
	InputStream fileInputStream = new ByteArrayInputStream(dummyFileContentBytes);
	ObjectMetadata metadata = new ObjectMetadata();
	metadata.setContentType("text/html");
	metadata.setContentLength(dummyFileContentBytes.length);
	PutObjectRequest putObjectRequest = new PutObjectRequest(this.bucket + "-dummy", code
			+ Base64.encodeBase64String(url.getBytes()), fileInputStream, metadata);
	this.s3Client.putObject(putObjectRequest);
}
 
Example 29
Project: aws-photosharing-example   File: UploadThread.java   View source code 5 votes vote down vote up
@Override
public void run() {
    ObjectMetadata meta_data = new ObjectMetadata();
    if (p_content_type != null)
        meta_data.setContentType(p_content_type);

    meta_data.setContentLength(p_size);

    PutObjectRequest putObjectRequest = new PutObjectRequest(p_bucket_name, p_s3_key, p_file_stream, meta_data);
    putObjectRequest.setCannedAcl(CannedAccessControlList.PublicRead);
    PutObjectResult res = s3Client.putObject(putObjectRequest);       
}
 
Example 30
Project: opentest   File: GetS3Metadata.java   View source code 5 votes vote down vote up
@Override
public void run() {
    super.run();

    String awsCredentialsProfile = this.readStringArgument("awsProfile", "default");
    String bucket = this.readStringArgument("bucket");
    String objectKey = this.readStringArgument("objectKey");

    AmazonS3 s3Client = new AmazonS3Client(new ProfileCredentialsProvider(awsCredentialsProfile));
    ObjectMetadata metadata = s3Client.getObjectMetadata(
            new GetObjectMetadataRequest(bucket, objectKey));

    try {
        Date expirationTime = metadata.getExpirationTime();
        if (expirationTime != null) {
            this.writeOutput("expirationTime", metadata.getExpirationTime().getTime());
        } else {
            this.writeOutput("expirationTime", null);
        }
        this.writeOutput("lastModified", metadata.getLastModified().getTime());
        this.writeOutput("userMetadata", metadata.getUserMetadata());
        this.writeOutput("size", metadata.getContentLength());
        this.writeOutput("storageClass", metadata.getStorageClass());
        this.writeOutput("versionId", metadata.getVersionId());
    } catch (Exception ex) {
        throw new RuntimeException(String.format(
                "Failed to get object metadata for object key %s in bucket %s",
                objectKey,
                bucket), ex);
    }
}
 
Example 31
Project: nexus-blobstore-s3   File: S3PropertiesFile.java   View source code 5 votes vote down vote up
public void store() throws IOException {
  log.debug("Storing: {}/{}", bucket, key);

  ByteArrayOutputStream bufferStream = new ByteArrayOutputStream();
  store(bufferStream, null);
  byte[] buffer = bufferStream.toByteArray();

  ObjectMetadata metadata = new ObjectMetadata();
  metadata.setContentLength(buffer.length);
  s3.putObject(bucket, key, new ByteArrayInputStream(buffer), metadata);
}
 
Example 32
Project: s3-channels   File: S3ReadableObjectChannelBuilderTest.java   View source code 5 votes vote down vote up
@BeforeAll
static void initMock() {
    ObjectMetadata meta = new ObjectMetadata();
    meta.setContentLength(100);
    when(amazonS3.getObjectMetadata(anyString(), anyString()))
            .thenReturn(meta);
}
 
Example 33
Project: github-bucket   File: RepositoryS3.java   View source code 5 votes vote down vote up
private boolean walk(Iterator<S3ObjectSummary> iter, ObjectId file, String path) throws IOException {
    byte[] content;
    byte[] newHash;
    LOG.debug("Start processing file: {}", path);
    try (DigestInputStream is = new DigestInputStream(repository.open(file).openStream(), DigestUtils.getMd5Digest())) {
        // Get content
        content = IOUtils.toByteArray(is);
        // Get hash
        newHash = is.getMessageDigest().digest();
    }
    if (isUploadFile(iter, path, Hex.encodeHexString(newHash))) {
        LOG.info("Uploading file: {}", path);
        ObjectMetadata bucketMetadata = new ObjectMetadata();
        bucketMetadata.setContentMD5(Base64.encodeAsString(newHash));
        bucketMetadata.setContentLength(content.length);
        // Give Tika a few hints for the content detection
        Metadata tikaMetadata = new Metadata();
        tikaMetadata.set(Metadata.RESOURCE_NAME_KEY, FilenameUtils.getName(FilenameUtils.normalize(path)));
        // Fire!
        try (InputStream bis = TikaInputStream.get(content, tikaMetadata)) {
            bucketMetadata.setContentType(TIKA_DETECTOR.detect(bis, tikaMetadata).toString());
            s3.putObject(bucket.getName(), path, bis, bucketMetadata);
            return true;
        }
    }
    LOG.info("Skipping file (same checksum): {}", path);
    return false;
}
 
Example 34
Project: hollow-reference-implementation   File: S3Publisher.java   View source code 5 votes vote down vote up
public void publishSnapshot(Blob blob) {
    String objectName = getS3ObjectName(blobNamespace, "snapshot", blob.getToVersion());

    ObjectMetadata metadata = new ObjectMetadata();
    metadata.addUserMetadata("to_state", String.valueOf(blob.getToVersion()));
    metadata.setHeader("Content-Length", blob.getFile().length());
    
    uploadFile(blob.getFile(), objectName, metadata);
    
    /// now we update the snapshot index
    updateSnapshotIndex(blob.getToVersion());
}
 
Example 35
Project: hollow-reference-implementation   File: S3Publisher.java   View source code 5 votes vote down vote up
public void publishDelta(Blob blob) {
    String objectName = getS3ObjectName(blobNamespace, "delta", blob.getFromVersion());
    
    ObjectMetadata metadata = new ObjectMetadata();
    metadata.addUserMetadata("from_state", String.valueOf(blob.getFromVersion()));
    metadata.addUserMetadata("to_state", String.valueOf(blob.getToVersion()));
    metadata.setHeader("Content-Length", blob.getFile().length());
    
    uploadFile(blob.getFile(), objectName, metadata);
}
 
Example 36
Project: hollow-reference-implementation   File: S3Publisher.java   View source code 5 votes vote down vote up
public void publishReverseDelta(Blob blob) {
    String objectName = getS3ObjectName(blobNamespace, "reversedelta", blob.getFromVersion());
    
    ObjectMetadata metadata = new ObjectMetadata();
    metadata.addUserMetadata("from_state", String.valueOf(blob.getFromVersion()));
    metadata.addUserMetadata("to_state", String.valueOf(blob.getToVersion()));
    metadata.setHeader("Content-Length", blob.getFile().length());
    
    uploadFile(blob.getFile(), objectName, metadata);
}
 
Example 37
Project: hollow-reference-implementation   File: S3Publisher.java   View source code 5 votes vote down vote up
private void uploadFile(File file, String s3ObjectName, ObjectMetadata metadata) {
	try (InputStream is = new BufferedInputStream(new FileInputStream(file))) {
           Upload upload = s3TransferManager.upload(bucketName, s3ObjectName, is, metadata);
       
           upload.waitForCompletion();
       } catch (Exception e) {
           throw new RuntimeException(e);
       }
}
 
Example 38
Project: hollow-reference-implementation   File: S3BlobRetriever.java   View source code 5 votes vote down vote up
private Blob knownDeltaBlob(String fileType, long fromVersion) {
    String objectName = S3Publisher.getS3ObjectName(blobNamespace, fileType, fromVersion);
    ObjectMetadata objectMetadata = s3.getObjectMetadata(bucketName, objectName);
    long fromState = Long.parseLong(objectMetadata.getUserMetaDataOf("from_state"));
    long toState = Long.parseLong(objectMetadata.getUserMetaDataOf("to_state"));

    return new S3Blob(objectName, fromState, toState);
}
 
Example 39
Project: ibm-cos-sdk-java   File: SkipMd5CheckStrategy.java   View source code 5 votes vote down vote up
private boolean skipClientSideValidationPerResponse(ObjectMetadata metadata) {
    if (metadata == null) {
        return true;
    }
    // If Etag is not provided or was computed from a multipart upload then skip the check, the
    // etag won't be the MD5 of the original content
    if (metadata.getETag() == null || isMultipartUploadETag(metadata.getETag())) {
        return true;
    }
    return metadataInvolvesSse(metadata);
}
 
Example 40
Project: hadoop   File: S3AFastOutputStream.java   View source code 5 votes vote down vote up
private void putObject() throws IOException {
  if (LOG.isDebugEnabled()) {
    LOG.debug("Executing regular upload for bucket '{}' key '{}'", bucket,
        key);
  }
  final ObjectMetadata om = createDefaultMetadata();
  om.setContentLength(buffer.size());
  final PutObjectRequest putObjectRequest = new PutObjectRequest(bucket, key,
      new ByteArrayInputStream(buffer.toByteArray()), om);
  putObjectRequest.setCannedAcl(cannedACL);
  putObjectRequest.setGeneralProgressListener(progressListener);
  ListenableFuture<PutObjectResult> putObjectResult =
      executorService.submit(new Callable<PutObjectResult>() {
        @Override
        public PutObjectResult call() throws Exception {
          return client.putObject(putObjectRequest);
        }
      });
  //wait for completion
  try {
    putObjectResult.get();
  } catch (InterruptedException ie) {
    LOG.warn("Interrupted object upload:" + ie, ie);
    Thread.currentThread().interrupt();
  } catch (ExecutionException ee) {
    throw new IOException("Regular upload failed", ee.getCause());
  }
}