org.jets3t.service.impl.rest.httpclient.RestS3Service Java Examples
The following examples show how to use
org.jets3t.service.impl.rest.httpclient.RestS3Service.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: Jets3tNativeFileSystemStore.java From hadoop with Apache License 2.0 | 6 votes |
@Override public void initialize(URI uri, Configuration conf) throws IOException { S3Credentials s3Credentials = new S3Credentials(); s3Credentials.initialize(uri, conf); try { AWSCredentials awsCredentials = new AWSCredentials(s3Credentials.getAccessKey(), s3Credentials.getSecretAccessKey()); this.s3Service = new RestS3Service(awsCredentials); } catch (S3ServiceException e) { handleException(e); } multipartEnabled = conf.getBoolean("fs.s3n.multipart.uploads.enabled", false); multipartBlockSize = Math.min( conf.getLong("fs.s3n.multipart.uploads.block.size", 64 * 1024 * 1024), MAX_PART_SIZE); multipartCopyBlockSize = Math.min( conf.getLong("fs.s3n.multipart.copy.block.size", MAX_PART_SIZE), MAX_PART_SIZE); serverSideEncryptionAlgorithm = conf.get("fs.s3n.server-side-encryption-algorithm"); bucket = new S3Bucket(uri.getHost()); }
Example #2
Source File: Jets3tFileSystemStore.java From hadoop with Apache License 2.0 | 6 votes |
@Override public void initialize(URI uri, Configuration conf) throws IOException { this.conf = conf; S3Credentials s3Credentials = new S3Credentials(); s3Credentials.initialize(uri, conf); try { AWSCredentials awsCredentials = new AWSCredentials(s3Credentials.getAccessKey(), s3Credentials.getSecretAccessKey()); this.s3Service = new RestS3Service(awsCredentials); } catch (S3ServiceException e) { if (e.getCause() instanceof IOException) { throw (IOException) e.getCause(); } throw new S3Exception(e); } bucket = new S3Bucket(uri.getHost()); this.bufferSize = conf.getInt( S3FileSystemConfigKeys.S3_STREAM_BUFFER_SIZE_KEY, S3FileSystemConfigKeys.S3_STREAM_BUFFER_SIZE_DEFAULT ); }
Example #3
Source File: Jets3tFileSystemStore.java From hadoop-gpu with Apache License 2.0 | 6 votes |
public void initialize(URI uri, Configuration conf) throws IOException { this.conf = conf; S3Credentials s3Credentials = new S3Credentials(); s3Credentials.initialize(uri, conf); try { AWSCredentials awsCredentials = new AWSCredentials(s3Credentials.getAccessKey(), s3Credentials.getSecretAccessKey()); this.s3Service = new RestS3Service(awsCredentials); } catch (S3ServiceException e) { if (e.getCause() instanceof IOException) { throw (IOException) e.getCause(); } throw new S3Exception(e); } bucket = new S3Bucket(uri.getHost()); this.bufferSize = conf.getInt("io.file.buffer.size", 4096); }
Example #4
Source File: Jets3tNativeFileSystemStore.java From big-c with Apache License 2.0 | 6 votes |
@Override public void initialize(URI uri, Configuration conf) throws IOException { S3Credentials s3Credentials = new S3Credentials(); s3Credentials.initialize(uri, conf); try { AWSCredentials awsCredentials = new AWSCredentials(s3Credentials.getAccessKey(), s3Credentials.getSecretAccessKey()); this.s3Service = new RestS3Service(awsCredentials); } catch (S3ServiceException e) { handleException(e); } multipartEnabled = conf.getBoolean("fs.s3n.multipart.uploads.enabled", false); multipartBlockSize = Math.min( conf.getLong("fs.s3n.multipart.uploads.block.size", 64 * 1024 * 1024), MAX_PART_SIZE); multipartCopyBlockSize = Math.min( conf.getLong("fs.s3n.multipart.copy.block.size", MAX_PART_SIZE), MAX_PART_SIZE); serverSideEncryptionAlgorithm = conf.get("fs.s3n.server-side-encryption-algorithm"); bucket = new S3Bucket(uri.getHost()); }
Example #5
Source File: Jets3tFileSystemStore.java From big-c with Apache License 2.0 | 6 votes |
@Override public void initialize(URI uri, Configuration conf) throws IOException { this.conf = conf; S3Credentials s3Credentials = new S3Credentials(); s3Credentials.initialize(uri, conf); try { AWSCredentials awsCredentials = new AWSCredentials(s3Credentials.getAccessKey(), s3Credentials.getSecretAccessKey()); this.s3Service = new RestS3Service(awsCredentials); } catch (S3ServiceException e) { if (e.getCause() instanceof IOException) { throw (IOException) e.getCause(); } throw new S3Exception(e); } bucket = new S3Bucket(uri.getHost()); this.bufferSize = conf.getInt( S3FileSystemConfigKeys.S3_STREAM_BUFFER_SIZE_KEY, S3FileSystemConfigKeys.S3_STREAM_BUFFER_SIZE_DEFAULT ); }
Example #6
Source File: Jets3tNativeFileSystemStore.java From hadoop-gpu with Apache License 2.0 | 6 votes |
public void initialize(URI uri, Configuration conf) throws IOException { S3Credentials s3Credentials = new S3Credentials(); s3Credentials.initialize(uri, conf); try { AWSCredentials awsCredentials = new AWSCredentials(s3Credentials.getAccessKey(), s3Credentials.getSecretAccessKey()); this.s3Service = new RestS3Service(awsCredentials); } catch (S3ServiceException e) { if (e.getCause() instanceof IOException) { throw (IOException) e.getCause(); } throw new S3Exception(e); } bucket = new S3Bucket(uri.getHost()); }
Example #7
Source File: TestGrantAcl.java From suro with Apache License 2.0 | 6 votes |
@Test public void test() throws Exception { RestS3Service s3Service = mock(RestS3Service.class); AccessControlList acl = new AccessControlList(); doReturn(acl).when(s3Service).getObjectAcl("bucket", "key"); doNothing().when(s3Service).putObjectAcl("bucket", "key", acl); GrantAcl grantAcl = new GrantAcl(s3Service, "1,2,3", 1); S3Object obj = new S3Object("key"); obj.setBucketName("bucket"); obj.setAcl(GSAccessControlList.REST_CANNED_BUCKET_OWNER_FULL_CONTROL); assertTrue(grantAcl.grantAcl(obj)); Set<GrantAndPermission> grants = new HashSet<GrantAndPermission>(Arrays.asList(acl.getGrantAndPermissions())); assertEquals(grants.size(), 3); Set<GrantAndPermission> grantSet = new HashSet<GrantAndPermission>(); for (int i = 1; i <= 3; ++i) { grantSet.add(new GrantAndPermission(new CanonicalGrantee(Integer.toString(i)), Permission.PERMISSION_READ)); } }
Example #8
Source File: S3FilenameGenerator.java From red5-examples with Apache License 2.0 | 6 votes |
public static List<String> getBucketList() { logger.debug("Get the bucket list"); List<String> bucketList = new ArrayList<String>(3); try { S3Service s3Service = new RestS3Service(awsCredentials); S3Bucket[] buckets = s3Service.listAllBuckets(); for (S3Bucket bucket: buckets) { logger.debug("Bucket: {}", bucket.getName()); bucketList.add(bucket.getName()); } logger.debug("Bucket count: {}", buckets.length); } catch (S3ServiceException e) { logger.error("Error during bucket listing", e); } return bucketList; }
Example #9
Source File: Jets3tFileSystemStore.java From RDFS with Apache License 2.0 | 6 votes |
public void initialize(URI uri, Configuration conf) throws IOException { this.conf = conf; S3Credentials s3Credentials = new S3Credentials(); s3Credentials.initialize(uri, conf); try { AWSCredentials awsCredentials = new AWSCredentials(s3Credentials.getAccessKey(), s3Credentials.getSecretAccessKey()); this.s3Service = new RestS3Service(awsCredentials); } catch (S3ServiceException e) { if (e.getCause() instanceof IOException) { throw (IOException) e.getCause(); } throw new S3Exception(e); } bucket = new S3Bucket(uri.getHost()); this.bufferSize = conf.getInt("io.file.buffer.size", 4096); }
Example #10
Source File: Jets3tNativeFileSystemStore.java From RDFS with Apache License 2.0 | 6 votes |
public void initialize(URI uri, Configuration conf) throws IOException { S3Credentials s3Credentials = new S3Credentials(); s3Credentials.initialize(uri, conf); try { AWSCredentials awsCredentials = new AWSCredentials(s3Credentials.getAccessKey(), s3Credentials.getSecretAccessKey()); this.s3Service = new RestS3Service(awsCredentials); } catch (S3ServiceException e) { if (e.getCause() instanceof IOException) { throw (IOException) e.getCause(); } throw new S3Exception(e); } bucket = new S3Bucket(uri.getHost()); }
Example #11
Source File: S3FilenameGenerator.java From red5-examples with Apache License 2.0 | 5 votes |
public static void upload(String sessionId, String name) { logger.debug("Upload - session id: {} name: {}", sessionId, name); try { // find the file StringBuilder sb = new StringBuilder(recordPath); sb.append(sessionId); sb.append('/'); sb.append(name); sb.append(".flv"); String filePath = sb.toString(); logger.debug("File path: {}", filePath); File file = new File(filePath); if (file.exists()) { S3Service s3Service = new RestS3Service(awsCredentials); S3Bucket bucket = s3Service.createBucket(bucketName); S3Object sob = new S3Object(sessionId + "/" + name + ".flv"); // force bucket name sob.setBucketName(bucketName); // point at file sob.setDataInputFile(file); // set type sob.setContentType("video/x-flv"); // set auth / acl sob.setAcl(AccessControlList.REST_CANNED_PUBLIC_READ); logger.debug("Pre-upload: {}", sob); sob = s3Service.putObject(bucket, sob); logger.debug("Post-upload: {}", sob); } else { logger.warn("File was not found"); } file = null; } catch (S3ServiceException e) { logger.error("Error during upload", e); } }
Example #12
Source File: S3Consumer.java From suro with Apache License 2.0 | 5 votes |
@JsonCreator public S3Consumer( @JsonProperty("id") String id, @JsonProperty("s3Endpoint") String s3Endpoint, @JsonProperty("notice") Notice notice, @JsonProperty("recvTimeout") long timeout, @JsonProperty("concurrentDownload") int concurrentDownload, @JsonProperty("downloadPath") String downloadPath, @JsonProperty("recordParser") RecordParser recordParser, @JacksonInject AWSCredentialsProvider credentialProvider, @JacksonInject MessageRouter router, @JacksonInject ObjectMapper jsonMapper, @JacksonInject RestS3Service s3Service ) { this.id = id; this.s3Endpoint = s3Endpoint == null ? "s3.amazonaws.com" : s3Endpoint; this.notice = notice; this.timeout = timeout == 0 ? 1000 : timeout; this.concurrentDownload = concurrentDownload == 0 ? 5 : concurrentDownload; this.recordParser = recordParser; this.downloadPath = downloadPath == null ? "/logs/suro-s3consumer/" + id : downloadPath; this.credentialsProvider = credentialProvider; this.router = router; this.jsonMapper = jsonMapper; this.s3Service = s3Service; Preconditions.checkNotNull(notice, "notice is needed"); Preconditions.checkNotNull(recordParser, "recordParser is needed"); }
Example #13
Source File: JetS3tLiveTest.java From tutorials with MIT License | 5 votes |
@BeforeClass public static void connectS3() throws Exception { // Replace with your keys String awsAccessKey = "your access key"; String awsSecretKey = "your secret key"; // Create credentials AWSCredentials awsCredentials = new AWSCredentials(awsAccessKey, awsSecretKey); // Create service s3Service = new RestS3Service(awsCredentials); }
Example #14
Source File: JetS3tLiveTest.java From tutorials with MIT License | 5 votes |
@BeforeClass public static void connectS3() throws Exception { // Replace with your keys String awsAccessKey = "your access key"; String awsSecretKey = "your secret key"; // Create credentials AWSCredentials awsCredentials = new AWSCredentials(awsAccessKey, awsSecretKey); // Create service s3Service = new RestS3Service(awsCredentials); }
Example #15
Source File: S3PresignedUrlProvider.java From cyberduck with GNU General Public License v3.0 | 5 votes |
/** * Generates a signed URL string that will grant access to an S3 resource (bucket or object) to whoever uses the URL * up until the time specified. * * @param host Hostname * @param bucket the name of the bucket to include in the URL, must be a valid bucket name. * @param key the name of the object to include in the URL, if null only the bucket name is used. * @param method HTTP method * @param expiry Milliseconds * @return a URL signed in such a way as to grant access to an S3 resource to whoever uses it. */ public String create(final Host host, final String user, final String secret, final String bucket, String region, final String key, final String method, final long expiry) { final S3Protocol.AuthenticationHeaderSignatureVersion signature; if(StringUtils.isBlank(region)) { // Only for AWS if(S3Session.isAwsHostname(host.getHostname())) { // Region is required for AWS4-HMAC-SHA256 signature region = "us-east-1"; signature = S3Protocol.AuthenticationHeaderSignatureVersion.getDefault(host.getProtocol()); } else { signature = S3Protocol.AuthenticationHeaderSignatureVersion.AWS2; } } else { // Only for AWS if(S3Session.isAwsHostname(host.getHostname())) { // Region is required for AWS4-HMAC-SHA256 signature signature = S3Protocol.AuthenticationHeaderSignatureVersion.getDefault(host.getProtocol()); } else { signature = S3Protocol.AuthenticationHeaderSignatureVersion.AWS2; } } return new RestS3Service(new AWSCredentials(StringUtils.strip(user), StringUtils.strip(secret))) { @Override public String getEndpoint() { return host.getHostname(); } }.createSignedUrlUsingSignatureVersion( signature.toString(), region, method, bucket, key, null, null, expiry / 1000, false, true, false); }
Example #16
Source File: S3FilenameGenerator.java From red5-examples with Apache License 2.0 | 5 votes |
public static void createBucket() { logger.debug("Create bucket"); try { S3Service s3Service = new RestS3Service(awsCredentials); S3Bucket bucket = s3Service.createBucket(bucketName); logger.debug("Created bucket: {}", bucket.getName()); } catch (S3ServiceException e) { logger.error("Error creating bucket", e); } }
Example #17
Source File: S3ReaderTest.java From cc-warc-examples with MIT License | 5 votes |
public static void main(String[] args) throws IOException, S3ServiceException { // We're accessing a publicly available bucket so don't need to fill in our credentials S3Service s3s = new RestS3Service(null); // Let's grab a file out of the CommonCrawl S3 bucket String fn = "common-crawl/crawl-data/CC-MAIN-2013-48/segments/1386163035819/warc/CC-MAIN-20131204131715-00000-ip-10-33-133-15.ec2.internal.warc.gz"; S3Object f = s3s.getObject("aws-publicdatasets", fn, null, null, null, null, null, null); // The file name identifies the ArchiveReader and indicates if it should be decompressed ArchiveReader ar = WARCReaderFactory.get(fn, f.getDataInputStream(), true); // Once we have an ArchiveReader, we can work through each of the records it contains int i = 0; for(ArchiveRecord r : ar) { // The header file contains information such as the type of record, size, creation time, and URL System.out.println("Header: " + r.getHeader()); System.out.println("URL: " + r.getHeader().getUrl()); System.out.println(); // If we want to read the contents of the record, we can use the ArchiveRecord as an InputStream // Create a byte array that is as long as all the record's stated length byte[] rawData = new byte[r.available()]; r.read(rawData); // Note: potential optimization would be to have a large buffer only allocated once // Why don't we convert it to a string and print the start of it? Let's hope it's text! String content = new String(rawData); System.out.println(content.substring(0, Math.min(500, content.length()))); System.out.println((content.length() > 500 ? "..." : "")); // Pretty printing to make the output more readable System.out.println("=-=-=-=-=-=-=-=-="); if (i++ > 4) break; } }
Example #18
Source File: MigrationTool.java From RDFS with Apache License 2.0 | 4 votes |
public void initialize(URI uri) throws IOException { try { String accessKey = null; String secretAccessKey = null; String userInfo = uri.getUserInfo(); if (userInfo != null) { int index = userInfo.indexOf(':'); if (index != -1) { accessKey = userInfo.substring(0, index); secretAccessKey = userInfo.substring(index + 1); } else { accessKey = userInfo; } } if (accessKey == null) { accessKey = getConf().get("fs.s3.awsAccessKeyId"); } if (secretAccessKey == null) { secretAccessKey = getConf().get("fs.s3.awsSecretAccessKey"); } if (accessKey == null && secretAccessKey == null) { throw new IllegalArgumentException("AWS " + "Access Key ID and Secret Access Key " + "must be specified as the username " + "or password (respectively) of a s3 URL, " + "or by setting the " + "fs.s3.awsAccessKeyId or " + "fs.s3.awsSecretAccessKey properties (respectively)."); } else if (accessKey == null) { throw new IllegalArgumentException("AWS " + "Access Key ID must be specified " + "as the username of a s3 URL, or by setting the " + "fs.s3.awsAccessKeyId property."); } else if (secretAccessKey == null) { throw new IllegalArgumentException("AWS " + "Secret Access Key must be specified " + "as the password of a s3 URL, or by setting the " + "fs.s3.awsSecretAccessKey property."); } AWSCredentials awsCredentials = new AWSCredentials(accessKey, secretAccessKey); this.s3Service = new RestS3Service(awsCredentials); } catch (S3ServiceException e) { if (e.getCause() instanceof IOException) { throw (IOException) e.getCause(); } throw new S3Exception(e); } bucket = new S3Bucket(uri.getHost()); }
Example #19
Source File: S3FilenameGenerator.java From red5-examples with Apache License 2.0 | 4 votes |
@SuppressWarnings("deprecation") public String generateFilename(IScope scope, String name, String extension, GenerationType type) { logger.debug("Get stream directory: scope={}, name={}, type={}", new Object[]{scope, name, type.toString()}); StringBuilder path = new StringBuilder(); // get the session id IConnection conn = Red5.getConnectionLocal(); if (conn.hasAttribute("sessionId")) { String sessionId = conn.getStringAttribute("sessionId"); path.append(sessionId); path.append('/'); } // add resources name path.append(name); // add extension if we have one if (extension != null){ // add extension path.append(extension); } // determine whether its playback or record if (type.equals(GenerationType.PLAYBACK)) { logger.debug("Playback path used"); // look on s3 for the file first boolean found = false; try { S3Service s3Service = new RestS3Service(awsCredentials); S3Bucket bucket = s3Service.getBucket(bucketName); String objectKey = path.toString(); S3Object file = s3Service.getObject(bucket, objectKey); if (file != null) { S3Object details = s3Service.getObjectDetails(bucket, objectKey); logger.debug("Details - key: {} content type: {}", details.getKey(), details.getContentType()); path.insert(0, bucket.getLocation()); // set found flag found = true; } } catch (S3ServiceException e) { logger.warn("Error looking up media file", e); } // use local path if (!found) { logger.debug("File was not found on S3, using local playback location"); path.insert(0, playbackPath); } } else { logger.debug("Record path used"); path.insert(0, recordPath); } String fileName = path.toString(); logger.debug("Generated filename: {}", fileName); return fileName; }
Example #20
Source File: GrantAcl.java From suro with Apache License 2.0 | 4 votes |
public GrantAcl(RestS3Service s3Service, String s3Acl, int s3AclRetries) { this.s3Service = s3Service; this.s3Acl = s3Acl; this.s3AclRetries = s3AclRetries; }
Example #21
Source File: IndexCommonCrawl.java From lumongo with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { if (args.length != 4) { System.err.println("usage: awsPropertiesFile prefix lumongoServers indexName"); System.err.println("usage: aws.properties 2010/09/25/9 10.0.0.1,10.0.0.2 ccrawl"); System.exit(1); } LogUtil.loadLogConfig(); String propFileName = args[0]; String prefix = args[1]; final String[] serverNames = args[2].split(","); final String indexName = args[3]; final LumongoPoolConfig clientConfig = new LumongoPoolConfig(); for (String serverName : serverNames) { clientConfig.addMember(serverName); } File propFile = new File(propFileName); PropertiesReader pr = new PropertiesReader(propFile); String awsAccessKey = pr.getString("awsAccessKey"); String awsSecretKey = pr.getString("awsSecretKey"); final AWSCredentials awsCredentials = new AWSCredentials(awsAccessKey, awsSecretKey); RestS3Service s3Service = new RestS3Service(awsCredentials); s3Service.setRequesterPaysEnabled(true); System.out.println("Fetching files list for prefix <" + prefix + ">"); System.out.println("This can take awhile ..."); S3Object[] objects = s3Service.listObjects("aws-publicdatasets", "common-crawl/crawl-002/" + prefix, null); System.out.println("Fetched info for <" + objects.length + "> files"); lumongoWorkPool = new LumongoWorkPool(clientConfig); IndexConfig indexConfig = new IndexConfig(CONTENTS); indexConfig.addFieldConfig(FieldConfigBuilder.create(URL, FieldType.STRING).indexAs(DefaultAnalyzers.LC_KEYWORD)); indexConfig.addFieldConfig(FieldConfigBuilder.create(TEXT_CONTENTS, FieldType.STRING).indexAs(DefaultAnalyzers.STANDARD)); indexConfig.addFieldConfig(FieldConfigBuilder.create(TITLE, FieldType.STRING).indexAs(DefaultAnalyzers.STANDARD)); CreateOrUpdateIndex createOrUpdateIndex = new CreateOrUpdateIndex(indexName, 16, indexConfig); lumongoWorkPool.createOrUpdateIndex(createOrUpdateIndex); ExecutorService pool = Executors.newFixedThreadPool(16); for (S3Object object : objects) { final String key = object.getKey(); pool.execute(new Runnable() { @Override public void run() { try { handleFile(indexName, awsCredentials, key); } catch (Exception e) { log.error(e.getClass().getSimpleName() + ": ", e); } } }); } pool.shutdown(); lumongoWorkPool.shutdown(); while (!pool.isTerminated()) { pool.awaitTermination(1, TimeUnit.MINUTES); } }
Example #22
Source File: TestS3FileSink.java From suro with Apache License 2.0 | 4 votes |
private Injector getInjector() { return Guice.createInjector( new SuroSinkPlugin(), new AbstractModule() { @Override protected void configure() { bind(ObjectMapper.class).to(DefaultObjectMapper.class); bind(AWSCredentialsProvider.class) .toInstance(new AWSCredentialsProvider() { @Override public AWSCredentials getCredentials() { return new AWSCredentials() { @Override public String getAWSAccessKeyId() { return "accessKey"; } @Override public String getAWSSecretKey() { return "secretKey"; } }; } @Override public void refresh() { } }); MultipartUtils mpUtils = mock(MultipartUtils.class); try { doAnswer(new Answer() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { Thread.sleep(1000); return null; } }).when(mpUtils).uploadObjects( any(String.class), any(RestS3Service.class), any(List.class), any(S3ServiceEventListener.class)); bind(MultipartUtils.class).toInstance(mpUtils); } catch (Exception e) { Assert.fail(e.getMessage()); } bind(SpaceChecker.class).toInstance(mock(SpaceChecker.class)); } } ); }
Example #23
Source File: MigrationTool.java From big-c with Apache License 2.0 | 4 votes |
public void initialize(URI uri) throws IOException { try { String accessKey = null; String secretAccessKey = null; String userInfo = uri.getUserInfo(); if (userInfo != null) { int index = userInfo.indexOf(':'); if (index != -1) { accessKey = userInfo.substring(0, index); secretAccessKey = userInfo.substring(index + 1); } else { accessKey = userInfo; } } if (accessKey == null) { accessKey = getConf().get("fs.s3.awsAccessKeyId"); } if (secretAccessKey == null) { secretAccessKey = getConf().get("fs.s3.awsSecretAccessKey"); } if (accessKey == null && secretAccessKey == null) { throw new IllegalArgumentException("AWS " + "Access Key ID and Secret Access Key " + "must be specified as the username " + "or password (respectively) of a s3 URL, " + "or by setting the " + "fs.s3.awsAccessKeyId or " + "fs.s3.awsSecretAccessKey properties (respectively)."); } else if (accessKey == null) { throw new IllegalArgumentException("AWS " + "Access Key ID must be specified " + "as the username of a s3 URL, or by setting the " + "fs.s3.awsAccessKeyId property."); } else if (secretAccessKey == null) { throw new IllegalArgumentException("AWS " + "Secret Access Key must be specified " + "as the password of a s3 URL, or by setting the " + "fs.s3.awsSecretAccessKey property."); } AWSCredentials awsCredentials = new AWSCredentials(accessKey, secretAccessKey); this.s3Service = new RestS3Service(awsCredentials); } catch (S3ServiceException e) { if (e.getCause() instanceof IOException) { throw (IOException) e.getCause(); } throw new S3Exception(e); } bucket = new S3Bucket(uri.getHost()); }
Example #24
Source File: MigrationTool.java From hadoop with Apache License 2.0 | 4 votes |
public void initialize(URI uri) throws IOException { try { String accessKey = null; String secretAccessKey = null; String userInfo = uri.getUserInfo(); if (userInfo != null) { int index = userInfo.indexOf(':'); if (index != -1) { accessKey = userInfo.substring(0, index); secretAccessKey = userInfo.substring(index + 1); } else { accessKey = userInfo; } } if (accessKey == null) { accessKey = getConf().get("fs.s3.awsAccessKeyId"); } if (secretAccessKey == null) { secretAccessKey = getConf().get("fs.s3.awsSecretAccessKey"); } if (accessKey == null && secretAccessKey == null) { throw new IllegalArgumentException("AWS " + "Access Key ID and Secret Access Key " + "must be specified as the username " + "or password (respectively) of a s3 URL, " + "or by setting the " + "fs.s3.awsAccessKeyId or " + "fs.s3.awsSecretAccessKey properties (respectively)."); } else if (accessKey == null) { throw new IllegalArgumentException("AWS " + "Access Key ID must be specified " + "as the username of a s3 URL, or by setting the " + "fs.s3.awsAccessKeyId property."); } else if (secretAccessKey == null) { throw new IllegalArgumentException("AWS " + "Secret Access Key must be specified " + "as the password of a s3 URL, or by setting the " + "fs.s3.awsSecretAccessKey property."); } AWSCredentials awsCredentials = new AWSCredentials(accessKey, secretAccessKey); this.s3Service = new RestS3Service(awsCredentials); } catch (S3ServiceException e) { if (e.getCause() instanceof IOException) { throw (IOException) e.getCause(); } throw new S3Exception(e); } bucket = new S3Bucket(uri.getHost()); }
Example #25
Source File: MigrationTool.java From hadoop-gpu with Apache License 2.0 | 4 votes |
public void initialize(URI uri) throws IOException { try { String accessKey = null; String secretAccessKey = null; String userInfo = uri.getUserInfo(); if (userInfo != null) { int index = userInfo.indexOf(':'); if (index != -1) { accessKey = userInfo.substring(0, index); secretAccessKey = userInfo.substring(index + 1); } else { accessKey = userInfo; } } if (accessKey == null) { accessKey = getConf().get("fs.s3.awsAccessKeyId"); } if (secretAccessKey == null) { secretAccessKey = getConf().get("fs.s3.awsSecretAccessKey"); } if (accessKey == null && secretAccessKey == null) { throw new IllegalArgumentException("AWS " + "Access Key ID and Secret Access Key " + "must be specified as the username " + "or password (respectively) of a s3 URL, " + "or by setting the " + "fs.s3.awsAccessKeyId or " + "fs.s3.awsSecretAccessKey properties (respectively)."); } else if (accessKey == null) { throw new IllegalArgumentException("AWS " + "Access Key ID must be specified " + "as the username of a s3 URL, or by setting the " + "fs.s3.awsAccessKeyId property."); } else if (secretAccessKey == null) { throw new IllegalArgumentException("AWS " + "Secret Access Key must be specified " + "as the password of a s3 URL, or by setting the " + "fs.s3.awsSecretAccessKey property."); } AWSCredentials awsCredentials = new AWSCredentials(accessKey, secretAccessKey); this.s3Service = new RestS3Service(awsCredentials); } catch (S3ServiceException e) { if (e.getCause() instanceof IOException) { throw (IOException) e.getCause(); } throw new S3Exception(e); } bucket = new S3Bucket(uri.getHost()); }