org.jets3t.service.security.AWSCredentials Java Examples

The following examples show how to use org.jets3t.service.security.AWSCredentials. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: Jets3tNativeFileSystemStore.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@Override
public void initialize(URI uri, Configuration conf) throws IOException {
  S3Credentials s3Credentials = new S3Credentials();
  s3Credentials.initialize(uri, conf);
  try {
    AWSCredentials awsCredentials =
      new AWSCredentials(s3Credentials.getAccessKey(),
          s3Credentials.getSecretAccessKey());
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    handleException(e);
  }
  multipartEnabled =
      conf.getBoolean("fs.s3n.multipart.uploads.enabled", false);
  multipartBlockSize = Math.min(
      conf.getLong("fs.s3n.multipart.uploads.block.size", 64 * 1024 * 1024),
      MAX_PART_SIZE);
  multipartCopyBlockSize = Math.min(
      conf.getLong("fs.s3n.multipart.copy.block.size", MAX_PART_SIZE),
      MAX_PART_SIZE);
  serverSideEncryptionAlgorithm = conf.get("fs.s3n.server-side-encryption-algorithm");

  bucket = new S3Bucket(uri.getHost());
}
 
Example #2
Source File: Jets3tFileSystemStore.java    From hadoop-gpu with Apache License 2.0 6 votes vote down vote up
public void initialize(URI uri, Configuration conf) throws IOException {
  
  this.conf = conf;
  
  S3Credentials s3Credentials = new S3Credentials();
  s3Credentials.initialize(uri, conf);
  try {
    AWSCredentials awsCredentials =
      new AWSCredentials(s3Credentials.getAccessKey(),
          s3Credentials.getSecretAccessKey());
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    if (e.getCause() instanceof IOException) {
      throw (IOException) e.getCause();
    }
    throw new S3Exception(e);
  }
  bucket = new S3Bucket(uri.getHost());

  this.bufferSize = conf.getInt("io.file.buffer.size", 4096);
}
 
Example #3
Source File: Jets3tNativeFileSystemStore.java    From hadoop-gpu with Apache License 2.0 6 votes vote down vote up
public void initialize(URI uri, Configuration conf) throws IOException {
  S3Credentials s3Credentials = new S3Credentials();
  s3Credentials.initialize(uri, conf);
  try {
    AWSCredentials awsCredentials =
      new AWSCredentials(s3Credentials.getAccessKey(),
          s3Credentials.getSecretAccessKey());
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    if (e.getCause() instanceof IOException) {
      throw (IOException) e.getCause();
    }
    throw new S3Exception(e);
  }
  bucket = new S3Bucket(uri.getHost());
}
 
Example #4
Source File: Jets3tFileSystemStore.java    From RDFS with Apache License 2.0 6 votes vote down vote up
public void initialize(URI uri, Configuration conf) throws IOException {
  
  this.conf = conf;
  
  S3Credentials s3Credentials = new S3Credentials();
  s3Credentials.initialize(uri, conf);
  try {
    AWSCredentials awsCredentials =
      new AWSCredentials(s3Credentials.getAccessKey(),
          s3Credentials.getSecretAccessKey());
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    if (e.getCause() instanceof IOException) {
      throw (IOException) e.getCause();
    }
    throw new S3Exception(e);
  }
  bucket = new S3Bucket(uri.getHost());

  this.bufferSize = conf.getInt("io.file.buffer.size", 4096);
}
 
Example #5
Source File: Jets3tNativeFileSystemStore.java    From RDFS with Apache License 2.0 6 votes vote down vote up
public void initialize(URI uri, Configuration conf) throws IOException {
  S3Credentials s3Credentials = new S3Credentials();
  s3Credentials.initialize(uri, conf);
  try {
    AWSCredentials awsCredentials =
      new AWSCredentials(s3Credentials.getAccessKey(),
          s3Credentials.getSecretAccessKey());
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    if (e.getCause() instanceof IOException) {
      throw (IOException) e.getCause();
    }
    throw new S3Exception(e);
  }
  bucket = new S3Bucket(uri.getHost());
}
 
Example #6
Source File: Jets3tFileSystemStore.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Override
public void initialize(URI uri, Configuration conf) throws IOException {
  
  this.conf = conf;
  
  S3Credentials s3Credentials = new S3Credentials();
  s3Credentials.initialize(uri, conf);
  try {
    AWSCredentials awsCredentials =
      new AWSCredentials(s3Credentials.getAccessKey(),
          s3Credentials.getSecretAccessKey());
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    if (e.getCause() instanceof IOException) {
      throw (IOException) e.getCause();
    }
    throw new S3Exception(e);
  }
  bucket = new S3Bucket(uri.getHost());

  this.bufferSize = conf.getInt(
                     S3FileSystemConfigKeys.S3_STREAM_BUFFER_SIZE_KEY,
                     S3FileSystemConfigKeys.S3_STREAM_BUFFER_SIZE_DEFAULT
      );
}
 
Example #7
Source File: Jets3tNativeFileSystemStore.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Override
public void initialize(URI uri, Configuration conf) throws IOException {
  S3Credentials s3Credentials = new S3Credentials();
  s3Credentials.initialize(uri, conf);
  try {
    AWSCredentials awsCredentials =
      new AWSCredentials(s3Credentials.getAccessKey(),
          s3Credentials.getSecretAccessKey());
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    handleException(e);
  }
  multipartEnabled =
      conf.getBoolean("fs.s3n.multipart.uploads.enabled", false);
  multipartBlockSize = Math.min(
      conf.getLong("fs.s3n.multipart.uploads.block.size", 64 * 1024 * 1024),
      MAX_PART_SIZE);
  multipartCopyBlockSize = Math.min(
      conf.getLong("fs.s3n.multipart.copy.block.size", MAX_PART_SIZE),
      MAX_PART_SIZE);
  serverSideEncryptionAlgorithm = conf.get("fs.s3n.server-side-encryption-algorithm");

  bucket = new S3Bucket(uri.getHost());
}
 
Example #8
Source File: Jets3tFileSystemStore.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@Override
public void initialize(URI uri, Configuration conf) throws IOException {
  
  this.conf = conf;
  
  S3Credentials s3Credentials = new S3Credentials();
  s3Credentials.initialize(uri, conf);
  try {
    AWSCredentials awsCredentials =
      new AWSCredentials(s3Credentials.getAccessKey(),
          s3Credentials.getSecretAccessKey());
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    if (e.getCause() instanceof IOException) {
      throw (IOException) e.getCause();
    }
    throw new S3Exception(e);
  }
  bucket = new S3Bucket(uri.getHost());

  this.bufferSize = conf.getInt(
                     S3FileSystemConfigKeys.S3_STREAM_BUFFER_SIZE_KEY,
                     S3FileSystemConfigKeys.S3_STREAM_BUFFER_SIZE_DEFAULT
      );
}
 
Example #9
Source File: S3FilenameGenerator.java    From red5-examples with Apache License 2.0 5 votes vote down vote up
public void init() {
	S3FilenameGenerator.awsCredentials = new AWSCredentials(accessKey, secretKey);
	logger.debug("Credentials: {}", awsCredentials.getFriendlyName());
	// check for the bucket name, if not found create it
	List<String> buckets = S3FilenameGenerator.getBucketList();
	if (!buckets.contains(bucketName)) {
		S3FilenameGenerator.createBucket();
	}
}
 
Example #10
Source File: JetS3tLiveTest.java    From tutorials with MIT License 5 votes vote down vote up
@BeforeClass
public static void connectS3() throws Exception {

    // Replace with your keys
    String awsAccessKey = "your access key";
    String awsSecretKey = "your secret key";

    // Create credentials
    AWSCredentials awsCredentials = new AWSCredentials(awsAccessKey, awsSecretKey);

    // Create service
    s3Service = new RestS3Service(awsCredentials);
}
 
Example #11
Source File: JetS3tLiveTest.java    From tutorials with MIT License 5 votes vote down vote up
@BeforeClass
public static void connectS3() throws Exception {

    // Replace with your keys
    String awsAccessKey = "your access key";
    String awsSecretKey = "your secret key";

    // Create credentials
    AWSCredentials awsCredentials = new AWSCredentials(awsAccessKey, awsSecretKey);

    // Create service
    s3Service = new RestS3Service(awsCredentials);
}
 
Example #12
Source File: S3PresignedUrlProvider.java    From cyberduck with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Generates a signed URL string that will grant access to an S3 resource (bucket or object) to whoever uses the URL
 * up until the time specified.
 *
 * @param host   Hostname
 * @param bucket the name of the bucket to include in the URL, must be a valid bucket name.
 * @param key    the name of the object to include in the URL, if null only the bucket name is used.
 * @param method HTTP method
 * @param expiry Milliseconds
 * @return a URL signed in such a way as to grant access to an S3 resource to whoever uses it.
 */
public String create(final Host host, final String user, final String secret,
                     final String bucket, String region, final String key,
                     final String method, final long expiry) {
    final S3Protocol.AuthenticationHeaderSignatureVersion signature;
    if(StringUtils.isBlank(region)) {
        // Only for AWS
        if(S3Session.isAwsHostname(host.getHostname())) {
            // Region is required for AWS4-HMAC-SHA256 signature
            region = "us-east-1";
            signature = S3Protocol.AuthenticationHeaderSignatureVersion.getDefault(host.getProtocol());
        }
        else {
            signature = S3Protocol.AuthenticationHeaderSignatureVersion.AWS2;
        }
    }
    else {
        // Only for AWS
        if(S3Session.isAwsHostname(host.getHostname())) {
            // Region is required for AWS4-HMAC-SHA256 signature
            signature = S3Protocol.AuthenticationHeaderSignatureVersion.getDefault(host.getProtocol());
        }
        else {
            signature = S3Protocol.AuthenticationHeaderSignatureVersion.AWS2;
        }
    }
    return new RestS3Service(new AWSCredentials(StringUtils.strip(user), StringUtils.strip(secret))) {
        @Override
        public String getEndpoint() {
            return host.getHostname();
        }
    }.createSignedUrlUsingSignatureVersion(
        signature.toString(),
        region, method, bucket, key, null, null, expiry / 1000, false, true, false);
}
 
Example #13
Source File: MigrationTool.java    From big-c with Apache License 2.0 4 votes vote down vote up
public void initialize(URI uri) throws IOException {
  
  
  
  try {
    String accessKey = null;
    String secretAccessKey = null;
    String userInfo = uri.getUserInfo();
    if (userInfo != null) {
      int index = userInfo.indexOf(':');
      if (index != -1) {
        accessKey = userInfo.substring(0, index);
        secretAccessKey = userInfo.substring(index + 1);
      } else {
        accessKey = userInfo;
      }
    }
    if (accessKey == null) {
      accessKey = getConf().get("fs.s3.awsAccessKeyId");
    }
    if (secretAccessKey == null) {
      secretAccessKey = getConf().get("fs.s3.awsSecretAccessKey");
    }
    if (accessKey == null && secretAccessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Access Key ID and Secret Access Key " +
                                         "must be specified as the username " +
                                         "or password (respectively) of a s3 URL, " +
                                         "or by setting the " +
                                         "fs.s3.awsAccessKeyId or " +                         
                                         "fs.s3.awsSecretAccessKey properties (respectively).");
    } else if (accessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Access Key ID must be specified " +
                                         "as the username of a s3 URL, or by setting the " +
                                         "fs.s3.awsAccessKeyId property.");
    } else if (secretAccessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Secret Access Key must be specified " +
                                         "as the password of a s3 URL, or by setting the " +
                                         "fs.s3.awsSecretAccessKey property.");         
    }
    AWSCredentials awsCredentials =
      new AWSCredentials(accessKey, secretAccessKey);
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    if (e.getCause() instanceof IOException) {
      throw (IOException) e.getCause();
    }
    throw new S3Exception(e);
  }
  bucket = new S3Bucket(uri.getHost());
}
 
Example #14
Source File: IndexCommonCrawl.java    From lumongo with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
	
	if (args.length != 4) {
		System.err.println("usage: awsPropertiesFile prefix lumongoServers indexName");
		System.err.println("usage: aws.properties 2010/09/25/9 10.0.0.1,10.0.0.2 ccrawl");
		System.exit(1);
	}
	
	LogUtil.loadLogConfig();
	
	String propFileName = args[0];
	String prefix = args[1];
	final String[] serverNames = args[2].split(",");
	final String indexName = args[3];
	
	final LumongoPoolConfig clientConfig = new LumongoPoolConfig();
	for (String serverName : serverNames) {
		clientConfig.addMember(serverName);
	}
	
	File propFile = new File(propFileName);
	
	PropertiesReader pr = new PropertiesReader(propFile);
	
	String awsAccessKey = pr.getString("awsAccessKey");
	String awsSecretKey = pr.getString("awsSecretKey");
	
	final AWSCredentials awsCredentials = new AWSCredentials(awsAccessKey, awsSecretKey);
	
	RestS3Service s3Service = new RestS3Service(awsCredentials);
	s3Service.setRequesterPaysEnabled(true);
	
	System.out.println("Fetching files list for prefix <" + prefix + ">");
	System.out.println("This can take awhile ...");
	
	S3Object[] objects = s3Service.listObjects("aws-publicdatasets", "common-crawl/crawl-002/" + prefix, null);
	System.out.println("Fetched info for <" + objects.length + "> files");
	
	lumongoWorkPool = new LumongoWorkPool(clientConfig);
	
	IndexConfig indexConfig = new IndexConfig(CONTENTS);
	indexConfig.addFieldConfig(FieldConfigBuilder.create(URL, FieldType.STRING).indexAs(DefaultAnalyzers.LC_KEYWORD));
	indexConfig.addFieldConfig(FieldConfigBuilder.create(TEXT_CONTENTS, FieldType.STRING).indexAs(DefaultAnalyzers.STANDARD));
	indexConfig.addFieldConfig(FieldConfigBuilder.create(TITLE, FieldType.STRING).indexAs(DefaultAnalyzers.STANDARD));
	
	CreateOrUpdateIndex createOrUpdateIndex = new CreateOrUpdateIndex(indexName, 16, indexConfig);
	
	lumongoWorkPool.createOrUpdateIndex(createOrUpdateIndex);
	
	ExecutorService pool = Executors.newFixedThreadPool(16);
	
	for (S3Object object : objects) {
		final String key = object.getKey();
		
		pool.execute(new Runnable() {
			@Override
			public void run() {
				try {
					handleFile(indexName, awsCredentials, key);
				}
				catch (Exception e) {
					log.error(e.getClass().getSimpleName() + ": ", e);
				}
			}
		});
		
	}
	
	pool.shutdown();
	lumongoWorkPool.shutdown();
	
	while (!pool.isTerminated()) {
		pool.awaitTermination(1, TimeUnit.MINUTES);
	}
	
}
 
Example #15
Source File: MigrationTool.java    From hadoop with Apache License 2.0 4 votes vote down vote up
public void initialize(URI uri) throws IOException {
  
  
  
  try {
    String accessKey = null;
    String secretAccessKey = null;
    String userInfo = uri.getUserInfo();
    if (userInfo != null) {
      int index = userInfo.indexOf(':');
      if (index != -1) {
        accessKey = userInfo.substring(0, index);
        secretAccessKey = userInfo.substring(index + 1);
      } else {
        accessKey = userInfo;
      }
    }
    if (accessKey == null) {
      accessKey = getConf().get("fs.s3.awsAccessKeyId");
    }
    if (secretAccessKey == null) {
      secretAccessKey = getConf().get("fs.s3.awsSecretAccessKey");
    }
    if (accessKey == null && secretAccessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Access Key ID and Secret Access Key " +
                                         "must be specified as the username " +
                                         "or password (respectively) of a s3 URL, " +
                                         "or by setting the " +
                                         "fs.s3.awsAccessKeyId or " +                         
                                         "fs.s3.awsSecretAccessKey properties (respectively).");
    } else if (accessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Access Key ID must be specified " +
                                         "as the username of a s3 URL, or by setting the " +
                                         "fs.s3.awsAccessKeyId property.");
    } else if (secretAccessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Secret Access Key must be specified " +
                                         "as the password of a s3 URL, or by setting the " +
                                         "fs.s3.awsSecretAccessKey property.");         
    }
    AWSCredentials awsCredentials =
      new AWSCredentials(accessKey, secretAccessKey);
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    if (e.getCause() instanceof IOException) {
      throw (IOException) e.getCause();
    }
    throw new S3Exception(e);
  }
  bucket = new S3Bucket(uri.getHost());
}
 
Example #16
Source File: MigrationTool.java    From RDFS with Apache License 2.0 4 votes vote down vote up
public void initialize(URI uri) throws IOException {
  
  
  
  try {
    String accessKey = null;
    String secretAccessKey = null;
    String userInfo = uri.getUserInfo();
    if (userInfo != null) {
      int index = userInfo.indexOf(':');
      if (index != -1) {
        accessKey = userInfo.substring(0, index);
        secretAccessKey = userInfo.substring(index + 1);
      } else {
        accessKey = userInfo;
      }
    }
    if (accessKey == null) {
      accessKey = getConf().get("fs.s3.awsAccessKeyId");
    }
    if (secretAccessKey == null) {
      secretAccessKey = getConf().get("fs.s3.awsSecretAccessKey");
    }
    if (accessKey == null && secretAccessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Access Key ID and Secret Access Key " +
                                         "must be specified as the username " +
                                         "or password (respectively) of a s3 URL, " +
                                         "or by setting the " +
                                         "fs.s3.awsAccessKeyId or " +                         
                                         "fs.s3.awsSecretAccessKey properties (respectively).");
    } else if (accessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Access Key ID must be specified " +
                                         "as the username of a s3 URL, or by setting the " +
                                         "fs.s3.awsAccessKeyId property.");
    } else if (secretAccessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Secret Access Key must be specified " +
                                         "as the password of a s3 URL, or by setting the " +
                                         "fs.s3.awsSecretAccessKey property.");         
    }
    AWSCredentials awsCredentials =
      new AWSCredentials(accessKey, secretAccessKey);
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    if (e.getCause() instanceof IOException) {
      throw (IOException) e.getCause();
    }
    throw new S3Exception(e);
  }
  bucket = new S3Bucket(uri.getHost());
}
 
Example #17
Source File: JetS3tStorage.java    From archistar-core with GNU General Public License v2.0 4 votes vote down vote up
public JetS3tStorage(int bftId, String awsAccessKey, String awsSecretKey, String bucketId) {
    awsCredentials = new AWSCredentials(awsAccessKey, awsSecretKey);
    this.bucketId = bucketId;
    this.internalBFTId = bftId;
    this.s3bucket = null;
}
 
Example #18
Source File: MigrationTool.java    From hadoop-gpu with Apache License 2.0 4 votes vote down vote up
public void initialize(URI uri) throws IOException {
  
  
  
  try {
    String accessKey = null;
    String secretAccessKey = null;
    String userInfo = uri.getUserInfo();
    if (userInfo != null) {
      int index = userInfo.indexOf(':');
      if (index != -1) {
        accessKey = userInfo.substring(0, index);
        secretAccessKey = userInfo.substring(index + 1);
      } else {
        accessKey = userInfo;
      }
    }
    if (accessKey == null) {
      accessKey = getConf().get("fs.s3.awsAccessKeyId");
    }
    if (secretAccessKey == null) {
      secretAccessKey = getConf().get("fs.s3.awsSecretAccessKey");
    }
    if (accessKey == null && secretAccessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Access Key ID and Secret Access Key " +
                                         "must be specified as the username " +
                                         "or password (respectively) of a s3 URL, " +
                                         "or by setting the " +
                                         "fs.s3.awsAccessKeyId or " +                         
                                         "fs.s3.awsSecretAccessKey properties (respectively).");
    } else if (accessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Access Key ID must be specified " +
                                         "as the username of a s3 URL, or by setting the " +
                                         "fs.s3.awsAccessKeyId property.");
    } else if (secretAccessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Secret Access Key must be specified " +
                                         "as the password of a s3 URL, or by setting the " +
                                         "fs.s3.awsSecretAccessKey property.");         
    }
    AWSCredentials awsCredentials =
      new AWSCredentials(accessKey, secretAccessKey);
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    if (e.getCause() instanceof IOException) {
      throw (IOException) e.getCause();
    }
    throw new S3Exception(e);
  }
  bucket = new S3Bucket(uri.getHost());
}