org.apache.flink.testutils.s3.S3TestCredentials Java Examples

The following examples show how to use org.apache.flink.testutils.s3.S3TestCredentials. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: PrestoS3RecoverableWriterTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void checkCredentialsAndSetup() throws IOException {
	// check whether credentials exist
	S3TestCredentials.assumeCredentialsAvailable();

	// initialize configuration with valid credentials
	final Configuration conf = new Configuration();
	conf.setString("s3.access.key", S3TestCredentials.getS3AccessKey());
	conf.setString("s3.secret.key", S3TestCredentials.getS3SecretKey());

	conf.setLong(PART_UPLOAD_MIN_SIZE, PART_UPLOAD_MIN_SIZE_VALUE);
	conf.setInteger(MAX_CONCURRENT_UPLOADS, MAX_CONCURRENT_UPLOADS_VALUE);

	final String defaultTmpDir = conf.getString(CoreOptions.TMP_DIRS) + "s3_tmp_dir";
	conf.setString(CoreOptions.TMP_DIRS, defaultTmpDir);

	FileSystem.initialize(conf);
}
 
Example #2
Source File: HadoopS3RecoverableWriterExceptionITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void checkCredentialsAndSetup() throws IOException {
	// check whether credentials exist
	S3TestCredentials.assumeCredentialsAvailable();

	basePath = new Path(S3TestCredentials.getTestBucketUri() + "tests-" + UUID.randomUUID());

	// initialize configuration with valid credentials
	final Configuration conf = new Configuration();
	conf.setString("s3.access.key", S3TestCredentials.getS3AccessKey());
	conf.setString("s3.secret.key", S3TestCredentials.getS3SecretKey());

	conf.setLong(PART_UPLOAD_MIN_SIZE, PART_UPLOAD_MIN_SIZE_VALUE);
	conf.setInteger(MAX_CONCURRENT_UPLOADS, MAX_CONCURRENT_UPLOADS_VALUE);

	final String defaultTmpDir = TEMP_FOLDER.getRoot().getAbsolutePath() + "s3_tmp_dir";
	conf.setString(CoreOptions.TMP_DIRS, defaultTmpDir);

	FileSystem.initialize(conf);

	skipped = false;
}
 
Example #3
Source File: HadoopS3RecoverableWriterITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void checkCredentialsAndSetup() throws IOException {
	// check whether credentials exist
	S3TestCredentials.assumeCredentialsAvailable();

	basePath = new Path(S3TestCredentials.getTestBucketUri() + "tests-" + UUID.randomUUID());

	// initialize configuration with valid credentials
	final Configuration conf = new Configuration();
	conf.setString("s3.access.key", S3TestCredentials.getS3AccessKey());
	conf.setString("s3.secret.key", S3TestCredentials.getS3SecretKey());

	conf.setLong(PART_UPLOAD_MIN_SIZE, PART_UPLOAD_MIN_SIZE_VALUE);
	conf.setInteger(MAX_CONCURRENT_UPLOADS, MAX_CONCURRENT_UPLOADS_VALUE);

	final String defaultTmpDir = TEMP_FOLDER.getRoot().getAbsolutePath() + "s3_tmp_dir";
	conf.setString(CoreOptions.TMP_DIRS, defaultTmpDir);

	FileSystem.initialize(conf);

	skipped = false;
}
 
Example #4
Source File: PrestoS3RecoverableWriterTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void checkCredentialsAndSetup() throws IOException {
	// check whether credentials exist
	S3TestCredentials.assumeCredentialsAvailable();

	// initialize configuration with valid credentials
	final Configuration conf = new Configuration();
	conf.setString("s3.access.key", S3TestCredentials.getS3AccessKey());
	conf.setString("s3.secret.key", S3TestCredentials.getS3SecretKey());

	conf.setLong(PART_UPLOAD_MIN_SIZE, PART_UPLOAD_MIN_SIZE_VALUE);
	conf.setInteger(MAX_CONCURRENT_UPLOADS, MAX_CONCURRENT_UPLOADS_VALUE);

	final String defaultTmpDir = conf.getString(CoreOptions.TMP_DIRS) + "s3_tmp_dir";
	conf.setString(CoreOptions.TMP_DIRS, defaultTmpDir);

	FileSystem.initialize(conf);
}
 
Example #5
Source File: YarnFileStageTestS3ITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Verifies that nested directories are properly copied with to the given S3 path (using the
 * appropriate file system) during resource uploads for YARN.
 *
 * @param scheme
 * 		file system scheme
 * @param pathSuffix
 * 		test path suffix which will be the test's target path
 */
private void testRecursiveUploadForYarn(String scheme, String pathSuffix) throws Exception {
	++numRecursiveUploadTests;

	final Path basePath = new Path(S3TestCredentials.getTestBucketUriWithScheme(scheme) + TEST_DATA_DIR);
	final HadoopFileSystem fs = (HadoopFileSystem) basePath.getFileSystem();

	assumeFalse(fs.exists(basePath));

	try {
		final Path directory = new Path(basePath, pathSuffix);

		YarnFileStageTest.testCopyFromLocalRecursive(fs.getHadoopFileSystem(),
			new org.apache.hadoop.fs.Path(directory.toUri()), tempFolder, true);
	} finally {
		// clean up
		fs.delete(basePath, true);
	}
}
 
Example #6
Source File: YarnFileStageTestS3ITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Verifies that nested directories are properly copied with to the given S3 path (using the
 * appropriate file system) during resource uploads for YARN.
 *
 * @param scheme
 * 		file system scheme
 * @param pathSuffix
 * 		test path suffix which will be the test's target path
 */
private void testRecursiveUploadForYarn(String scheme, String pathSuffix) throws Exception {
	++numRecursiveUploadTests;

	final Path basePath = new Path(S3TestCredentials.getTestBucketUriWithScheme(scheme) + TEST_DATA_DIR);
	final HadoopFileSystem fs = (HadoopFileSystem) basePath.getFileSystem();

	assumeFalse(fs.exists(basePath));

	try {
		final Path directory = new Path(basePath, pathSuffix);

		YarnFileStageTest.testRegisterMultipleLocalResources(fs.getHadoopFileSystem(),
			new org.apache.hadoop.fs.Path(directory.toUri()), Path.CUR_DIR, tempFolder, false, false);
	} finally {
		// clean up
		fs.delete(basePath, true);
	}
}
 
Example #7
Source File: HadoopS3RecoverableWriterITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void checkCredentialsAndSetup() throws IOException {
	// check whether credentials exist
	S3TestCredentials.assumeCredentialsAvailable();

	basePath = new Path(S3TestCredentials.getTestBucketUri() + "tests-" + UUID.randomUUID());

	// initialize configuration with valid credentials
	final Configuration conf = new Configuration();
	conf.setString("s3.access.key", S3TestCredentials.getS3AccessKey());
	conf.setString("s3.secret.key", S3TestCredentials.getS3SecretKey());

	conf.setLong(PART_UPLOAD_MIN_SIZE, PART_UPLOAD_MIN_SIZE_VALUE);
	conf.setInteger(MAX_CONCURRENT_UPLOADS, MAX_CONCURRENT_UPLOADS_VALUE);

	final String defaultTmpDir = TEMP_FOLDER.getRoot().getAbsolutePath() + "s3_tmp_dir";
	conf.setString(CoreOptions.TMP_DIRS, defaultTmpDir);

	FileSystem.initialize(conf);

	skipped = false;
}
 
Example #8
Source File: HadoopS3RecoverableWriterExceptionITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void checkCredentialsAndSetup() throws IOException {
	// check whether credentials exist
	S3TestCredentials.assumeCredentialsAvailable();

	basePath = new Path(S3TestCredentials.getTestBucketUri() + "tests-" + UUID.randomUUID());

	// initialize configuration with valid credentials
	final Configuration conf = new Configuration();
	conf.setString("s3.access.key", S3TestCredentials.getS3AccessKey());
	conf.setString("s3.secret.key", S3TestCredentials.getS3SecretKey());

	conf.setLong(PART_UPLOAD_MIN_SIZE, PART_UPLOAD_MIN_SIZE_VALUE);
	conf.setInteger(MAX_CONCURRENT_UPLOADS, MAX_CONCURRENT_UPLOADS_VALUE);

	final String defaultTmpDir = TEMP_FOLDER.getRoot().getAbsolutePath() + "s3_tmp_dir";
	conf.setString(CoreOptions.TMP_DIRS, defaultTmpDir);

	FileSystem.initialize(conf);

	skipped = false;
}
 
Example #9
Source File: HadoopS3RecoverableWriterITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void checkCredentialsAndSetup() throws IOException {
	// check whether credentials exist
	S3TestCredentials.assumeCredentialsAvailable();

	basePath = new Path(S3TestCredentials.getTestBucketUri() + "tests-" + UUID.randomUUID());

	// initialize configuration with valid credentials
	final Configuration conf = new Configuration();
	conf.setString("s3.access.key", S3TestCredentials.getS3AccessKey());
	conf.setString("s3.secret.key", S3TestCredentials.getS3SecretKey());

	conf.setLong(PART_UPLOAD_MIN_SIZE, PART_UPLOAD_MIN_SIZE_VALUE);
	conf.setInteger(MAX_CONCURRENT_UPLOADS, MAX_CONCURRENT_UPLOADS_VALUE);

	final String defaultTmpDir = TEMP_FOLDER.getRoot().getAbsolutePath() + "s3_tmp_dir";
	conf.setString(CoreOptions.TMP_DIRS, defaultTmpDir);

	FileSystem.initialize(conf);

	skipped = false;
}
 
Example #10
Source File: HadoopS3RecoverableWriterExceptionITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void checkCredentialsAndSetup() throws IOException {
	// check whether credentials exist
	S3TestCredentials.assumeCredentialsAvailable();

	basePath = new Path(S3TestCredentials.getTestBucketUri() + "tests-" + UUID.randomUUID());

	// initialize configuration with valid credentials
	final Configuration conf = new Configuration();
	conf.setString("s3.access.key", S3TestCredentials.getS3AccessKey());
	conf.setString("s3.secret.key", S3TestCredentials.getS3SecretKey());

	conf.setLong(PART_UPLOAD_MIN_SIZE, PART_UPLOAD_MIN_SIZE_VALUE);
	conf.setInteger(MAX_CONCURRENT_UPLOADS, MAX_CONCURRENT_UPLOADS_VALUE);

	final String defaultTmpDir = TEMP_FOLDER.getRoot().getAbsolutePath() + "s3_tmp_dir";
	conf.setString(CoreOptions.TMP_DIRS, defaultTmpDir);

	FileSystem.initialize(conf);

	skipped = false;
}
 
Example #11
Source File: YarnFileStageTestS3ITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Verifies that nested directories are properly copied with to the given S3 path (using the
 * appropriate file system) during resource uploads for YARN.
 *
 * @param scheme
 * 		file system scheme
 * @param pathSuffix
 * 		test path suffix which will be the test's target path
 */
private void testRecursiveUploadForYarn(String scheme, String pathSuffix) throws Exception {
	++numRecursiveUploadTests;

	final Path basePath = new Path(S3TestCredentials.getTestBucketUriWithScheme(scheme) + TEST_DATA_DIR);
	final HadoopFileSystem fs = (HadoopFileSystem) basePath.getFileSystem();

	assumeFalse(fs.exists(basePath));

	try {
		final Path directory = new Path(basePath, pathSuffix);

		YarnFileStageTest.testCopyFromLocalRecursive(fs.getHadoopFileSystem(),
			new org.apache.hadoop.fs.Path(directory.toUri()), tempFolder, true);
	} finally {
		// clean up
		fs.delete(basePath, true);
	}
}
 
Example #12
Source File: PrestoS3RecoverableWriterTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void checkCredentialsAndSetup() throws IOException {
	// check whether credentials exist
	S3TestCredentials.assumeCredentialsAvailable();

	// initialize configuration with valid credentials
	final Configuration conf = new Configuration();
	conf.setString("s3.access.key", S3TestCredentials.getS3AccessKey());
	conf.setString("s3.secret.key", S3TestCredentials.getS3SecretKey());

	conf.setLong(PART_UPLOAD_MIN_SIZE, PART_UPLOAD_MIN_SIZE_VALUE);
	conf.setInteger(MAX_CONCURRENT_UPLOADS, MAX_CONCURRENT_UPLOADS_VALUE);

	final String defaultTmpDir = conf.getString(CoreOptions.TMP_DIRS) + "s3_tmp_dir";
	conf.setString(CoreOptions.TMP_DIRS, defaultTmpDir);

	FileSystem.initialize(conf);
}
 
Example #13
Source File: HadoopS3FileSystemBehaviorITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void checkCredentialsAndSetup() throws IOException {
	// check whether credentials exist
	S3TestCredentials.assumeCredentialsAvailable();

	// initialize configuration with valid credentials
	final Configuration conf = new Configuration();
	conf.setString("s3.access.key", S3TestCredentials.getS3AccessKey());
	conf.setString("s3.secret.key", S3TestCredentials.getS3SecretKey());
	FileSystem.initialize(conf);
}
 
Example #14
Source File: YarnFileStageTestS3ITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Create a Hadoop config file containing S3 access credentials.
 *
 * <p>Note that we cannot use them as part of the URL since this may fail if the credentials
 * contain a "/" (see <a href="https://issues.apache.org/jira/browse/HADOOP-3733">HADOOP-3733</a>).
 */
private static void setupCustomHadoopConfig() throws IOException {
	File hadoopConfig = TEMP_FOLDER.newFile();
	Map<String /* key */, String /* value */> parameters = new HashMap<>();

	// set all different S3 fs implementation variants' configuration keys
	parameters.put("fs.s3a.access.key", S3TestCredentials.getS3AccessKey());
	parameters.put("fs.s3a.secret.key", S3TestCredentials.getS3SecretKey());

	parameters.put("fs.s3.awsAccessKeyId", S3TestCredentials.getS3AccessKey());
	parameters.put("fs.s3.awsSecretAccessKey", S3TestCredentials.getS3SecretKey());

	parameters.put("fs.s3n.awsAccessKeyId", S3TestCredentials.getS3AccessKey());
	parameters.put("fs.s3n.awsSecretAccessKey", S3TestCredentials.getS3SecretKey());

	try (PrintStream out = new PrintStream(new FileOutputStream(hadoopConfig))) {
		out.println("<?xml version=\"1.0\"?>");
		out.println("<?xml-stylesheet type=\"text/xsl\" href=\"configuration.xsl\"?>");
		out.println("<configuration>");
		for (Map.Entry<String, String> entry : parameters.entrySet()) {
			out.println("\t<property>");
			out.println("\t\t<name>" + entry.getKey() + "</name>");
			out.println("\t\t<value>" + entry.getValue() + "</value>");
			out.println("\t</property>");
		}
		out.println("</configuration>");
	}

	final Configuration conf = new Configuration();
	conf.setString(ConfigConstants.HDFS_SITE_CONFIG, hadoopConfig.getAbsolutePath());

	FileSystem.initialize(conf);
}
 
Example #15
Source File: YarnFileStageTestS3ITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Create a Hadoop config file containing S3 access credentials.
 *
 * <p>Note that we cannot use them as part of the URL since this may fail if the credentials
 * contain a "/" (see <a href="https://issues.apache.org/jira/browse/HADOOP-3733">HADOOP-3733</a>).
 */
private static void setupCustomHadoopConfig() throws IOException {
	File hadoopConfig = TEMP_FOLDER.newFile();
	Map<String /* key */, String /* value */> parameters = new HashMap<>();

	// set all different S3 fs implementation variants' configuration keys
	parameters.put("fs.s3a.access.key", S3TestCredentials.getS3AccessKey());
	parameters.put("fs.s3a.secret.key", S3TestCredentials.getS3SecretKey());

	parameters.put("fs.s3.awsAccessKeyId", S3TestCredentials.getS3AccessKey());
	parameters.put("fs.s3.awsSecretAccessKey", S3TestCredentials.getS3SecretKey());

	parameters.put("fs.s3n.awsAccessKeyId", S3TestCredentials.getS3AccessKey());
	parameters.put("fs.s3n.awsSecretAccessKey", S3TestCredentials.getS3SecretKey());

	try (PrintStream out = new PrintStream(new FileOutputStream(hadoopConfig))) {
		out.println("<?xml version=\"1.0\"?>");
		out.println("<?xml-stylesheet type=\"text/xsl\" href=\"configuration.xsl\"?>");
		out.println("<configuration>");
		for (Map.Entry<String, String> entry : parameters.entrySet()) {
			out.println("\t<property>");
			out.println("\t\t<name>" + entry.getKey() + "</name>");
			out.println("\t\t<value>" + entry.getValue() + "</value>");
			out.println("\t</property>");
		}
		out.println("</configuration>");
	}

	final Configuration conf = new Configuration();
	conf.setString(ConfigConstants.HDFS_SITE_CONFIG, hadoopConfig.getAbsolutePath());
	conf.set(CoreOptions.ALLOWED_FALLBACK_FILESYSTEMS, "s3;s3a;s3n");

	FileSystem.initialize(conf);
}
 
Example #16
Source File: YarnFileStageTestS3ITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void checkCredentialsAndSetup() throws IOException {
	// check whether credentials exist
	S3TestCredentials.assumeCredentialsAvailable();

	skipTest = false;

	setupCustomHadoopConfig();
}
 
Example #17
Source File: PrestoS3FileSystemBehaviorITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void checkCredentialsAndSetup() throws IOException {
	// check whether credentials exist
	S3TestCredentials.assumeCredentialsAvailable();

	// initialize configuration with valid credentials
	final Configuration conf = new Configuration();
	conf.setString("s3.access.key", S3TestCredentials.getS3AccessKey());
	conf.setString("s3.secret.key", S3TestCredentials.getS3SecretKey());
	FileSystem.initialize(conf);
}
 
Example #18
Source File: PrestoS3FileSystemBehaviorITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void checkCredentialsAndSetup() throws IOException {
	// check whether credentials exist
	S3TestCredentials.assumeCredentialsAvailable();

	// initialize configuration with valid credentials
	final Configuration conf = new Configuration();
	conf.setString("s3.access.key", S3TestCredentials.getS3AccessKey());
	conf.setString("s3.secret.key", S3TestCredentials.getS3SecretKey());
	FileSystem.initialize(conf);
}
 
Example #19
Source File: HadoopS3FileSystemBehaviorITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void checkCredentialsAndSetup() throws IOException {
	// check whether credentials exist
	S3TestCredentials.assumeCredentialsAvailable();

	// initialize configuration with valid credentials
	final Configuration conf = new Configuration();
	conf.setString("s3.access.key", S3TestCredentials.getS3AccessKey());
	conf.setString("s3.secret.key", S3TestCredentials.getS3SecretKey());
	FileSystem.initialize(conf);
}
 
Example #20
Source File: YarnFileStageTestS3ITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Create a Hadoop config file containing S3 access credentials.
 *
 * <p>Note that we cannot use them as part of the URL since this may fail if the credentials
 * contain a "/" (see <a href="https://issues.apache.org/jira/browse/HADOOP-3733">HADOOP-3733</a>).
 */
private static void setupCustomHadoopConfig() throws IOException {
	File hadoopConfig = TEMP_FOLDER.newFile();
	Map<String /* key */, String /* value */> parameters = new HashMap<>();

	// set all different S3 fs implementation variants' configuration keys
	parameters.put("fs.s3a.access.key", S3TestCredentials.getS3AccessKey());
	parameters.put("fs.s3a.secret.key", S3TestCredentials.getS3SecretKey());

	parameters.put("fs.s3.awsAccessKeyId", S3TestCredentials.getS3AccessKey());
	parameters.put("fs.s3.awsSecretAccessKey", S3TestCredentials.getS3SecretKey());

	parameters.put("fs.s3n.awsAccessKeyId", S3TestCredentials.getS3AccessKey());
	parameters.put("fs.s3n.awsSecretAccessKey", S3TestCredentials.getS3SecretKey());

	try (PrintStream out = new PrintStream(new FileOutputStream(hadoopConfig))) {
		out.println("<?xml version=\"1.0\"?>");
		out.println("<?xml-stylesheet type=\"text/xsl\" href=\"configuration.xsl\"?>");
		out.println("<configuration>");
		for (Map.Entry<String, String> entry : parameters.entrySet()) {
			out.println("\t<property>");
			out.println("\t\t<name>" + entry.getKey() + "</name>");
			out.println("\t\t<value>" + entry.getValue() + "</value>");
			out.println("\t</property>");
		}
		out.println("</configuration>");
	}

	final Configuration conf = new Configuration();
	conf.setString(ConfigConstants.HDFS_SITE_CONFIG, hadoopConfig.getAbsolutePath());

	FileSystem.initialize(conf);
}
 
Example #21
Source File: YarnFileStageTestS3ITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void checkCredentialsAndSetup() throws IOException {
	// check whether credentials exist
	S3TestCredentials.assumeCredentialsAvailable();

	skipTest = false;

	setupCustomHadoopConfig();
}
 
Example #22
Source File: PrestoS3FileSystemBehaviorITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void checkCredentialsAndSetup() throws IOException {
	// check whether credentials exist
	S3TestCredentials.assumeCredentialsAvailable();

	// initialize configuration with valid credentials
	final Configuration conf = new Configuration();
	conf.setString("s3.access.key", S3TestCredentials.getS3AccessKey());
	conf.setString("s3.secret.key", S3TestCredentials.getS3SecretKey());
	FileSystem.initialize(conf);
}
 
Example #23
Source File: HadoopS3FileSystemBehaviorITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void checkCredentialsAndSetup() throws IOException {
	// check whether credentials exist
	S3TestCredentials.assumeCredentialsAvailable();

	// initialize configuration with valid credentials
	final Configuration conf = new Configuration();
	conf.setString("s3.access.key", S3TestCredentials.getS3AccessKey());
	conf.setString("s3.secret.key", S3TestCredentials.getS3SecretKey());
	FileSystem.initialize(conf);
}
 
Example #24
Source File: YarnFileStageTestS3ITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void checkCredentialsAndSetup() throws IOException {
	// check whether credentials exist
	S3TestCredentials.assumeCredentialsAvailable();

	skipTest = false;

	setupCustomHadoopConfig();
}
 
Example #25
Source File: HadoopS3FileSystemBehaviorITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public Path getBasePath() throws Exception {
	return new Path(S3TestCredentials.getTestBucketUri() + TEST_DATA_DIR);
}
 
Example #26
Source File: PrestoS3FileSystemBehaviorITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public Path getBasePath() throws Exception {
	return new Path(S3TestCredentials.getTestBucketUri() + TEST_DATA_DIR);
}
 
Example #27
Source File: PrestoS3RecoverableWriterTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test(expected = UnsupportedOperationException.class)
public void requestingRecoverableWriterShouldThroughException() throws Exception {
	URI s3Uri = URI.create(S3TestCredentials.getTestBucketUri());
	FlinkS3FileSystem fileSystem = (FlinkS3FileSystem) FileSystem.get(s3Uri);
	fileSystem.createRecoverableWriter();
}
 
Example #28
Source File: PrestoS3RecoverableWriterTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test(expected = UnsupportedOperationException.class)
public void requestingRecoverableWriterShouldThroughException() throws Exception {
	URI s3Uri = URI.create(S3TestCredentials.getTestBucketUri());
	FlinkS3FileSystem fileSystem = (FlinkS3FileSystem) FileSystem.get(s3Uri);
	fileSystem.createRecoverableWriter();
}
 
Example #29
Source File: PrestoS3FileSystemBehaviorITCase.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Override
public Path getBasePath() throws Exception {
	return new Path(S3TestCredentials.getTestBucketUri() + TEST_DATA_DIR);
}
 
Example #30
Source File: HadoopS3FileSystemBehaviorITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public Path getBasePath() throws Exception {
	return new Path(S3TestCredentials.getTestBucketUri() + TEST_DATA_DIR);
}