Java Code Examples for org.springframework.boot.test.EnvironmentTestUtils

The following examples show how to use org.springframework.boot.test.EnvironmentTestUtils. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
@Test
public void namespaceCanBeCustomized() {
	AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
	EnvironmentTestUtils.addEnvironment(context, "hdfs.dataset.namespace:test");
	context.register(Conf.class);
	context.refresh();
	HdfsDatasetSinkProperties properties = context.getBean(HdfsDatasetSinkProperties.class);
	assertThat(properties.getNamespace(), equalTo("test"));
}
 
Example 2
private AnnotationConfigApplicationContext load(Class<?> config, String... env) {
	AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
	EnvironmentTestUtils.addEnvironment(context, env);
	context.register(config);
	context.refresh();
	return context;
}
 
Example 3
@Test
public void testNoQueues() throws Exception {
	try {
		AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
		EnvironmentTestUtils.addEnvironment(context, "rabbit.enableRetry:false");
		context.register(Config.class);
		context.refresh();
		fail("BeanCreationException expected");
	}
	catch (Exception e) {
		assertThat(e, instanceOf(BeanCreationException.class));
		assertThat(e.getMessage(), containsString("queue(s) are required"));
	}
}
 
Example 4
@Test
public void s3BucketAndBucketExpressionAreMutuallyExclusive() {
	AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
	EnvironmentTestUtils.addEnvironment(context, "s3.bucket:foo", "s3.bucketExpression:headers.bucket");
	context.register(Conf.class);
	try {
		context.refresh();
		fail("BeanCreationException expected");
	}
	catch (Exception e) {
		assertThat(e, instanceOf(BeanCreationException.class));
		assertThat(e.getMessage(), containsString("Exactly one of 'bucket' or 'bucketExpression' must be set"));
	}
}
 
Example 5
@Test
public void testNumExecutorsCanBeCustomized() {
    AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
    EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
    EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
    EnvironmentTestUtils.addEnvironment(context, "spark.assembly-jar: hdfs:///app/spark/dummy.jar");
    EnvironmentTestUtils.addEnvironment(context, "spark.num-executors: 4");
    context.register(Conf.class);
    context.refresh();
    SparkYarnTaskProperties properties = context.getBean(SparkYarnTaskProperties.class);
    assertThat(properties.getNumExecutors(), equalTo(4));
}
 
Example 6
@Test
public void maxRowsPerPollCanBeCustomized() {
	EnvironmentTestUtils.addEnvironment(this.context, "jdbc.query:select foo from bar");
	EnvironmentTestUtils.addEnvironment(this.context, "jdbc.maxRowsPerPoll:15");
	this.context.register(Conf.class);
	this.context.refresh();
	JdbcSourceProperties properties = this.context.getBean(JdbcSourceProperties.class);
	assertThat(properties.getMaxRowsPerPoll(), equalTo(15));
}
 
Example 7
@Test
public void testAppClassCanBeCustomized() {
    AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
    EnvironmentTestUtils.addEnvironment(context, "spark.app-class: MyTestClass");
    EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
 EnvironmentTestUtils.addEnvironment(context, "spark.assembly-jar: hdfs:///app/spark/dummy.jar");
    context.register(Conf.class);
    context.refresh();
    SparkYarnTaskProperties properties = context.getBean(SparkYarnTaskProperties.class);
    assertThat(properties.getAppClass(), equalTo("MyTestClass"));
}
 
Example 8
@Test(expected = BeanCreationException.class)
public void testAppClassIsRequired() {
    AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
    EnvironmentTestUtils.addEnvironment(context, "app-jar: dummy.jar");
    context.register(Conf.class);
    context.refresh();
}
 
Example 9
@Test
public void testAppJarCanBeCustomized() {
    AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
    EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
    EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: my-app-jar-0.0.1.jar");
 EnvironmentTestUtils.addEnvironment(context, "spark.assembly-jar: hdfs:///app/spark/dummy.jar");
    context.register(Conf.class);
    context.refresh();
    SparkYarnTaskProperties properties = context.getBean(SparkYarnTaskProperties.class);
    assertThat(properties.getAppJar(), equalTo("my-app-jar-0.0.1.jar"));
}
 
Example 10
@Test(expected = BeanCreationException.class)
public void testAppJarIsRequired() {
    AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
    EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
 EnvironmentTestUtils.addEnvironment(context, "spark.assembly-jar: hdfs:///app/spark/dummy.jar");
    context.register(Conf.class);
    context.refresh();
}
 
Example 11
@Test
public void testAppArgsCanBeCustomized() {
    AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
    EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
    EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
    EnvironmentTestUtils.addEnvironment(context, "spark.app-args: arg1,arg2");
 EnvironmentTestUtils.addEnvironment(context, "spark.assembly-jar: hdfs:///app/spark/dummy.jar");
    context.register(Conf.class);
    context.refresh();
    SparkYarnTaskProperties properties = context.getBean(SparkYarnTaskProperties.class);
    assertThat(properties.getAppArgs(), equalTo(new String[]{"arg1", "arg2"}));
}
 
Example 12
@Test
public void testResourceFilesCanBeCustomized() {
    AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
    EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
    EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
 EnvironmentTestUtils.addEnvironment(context, "spark.assembly-jar: hdfs:///app/spark/dummy.jar");
    EnvironmentTestUtils.addEnvironment(context, "spark.resource-files: test.txt");
    context.register(Conf.class);
    context.refresh();
    SparkYarnTaskProperties properties = context.getBean(SparkYarnTaskProperties.class);
    assertThat(properties.getResourceFiles(), equalTo("test.txt"));
}
 
Example 13
@Test
public void testResourceArchivesCanBeCustomized() {
    AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
    EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
    EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
 EnvironmentTestUtils.addEnvironment(context, "spark.assembly-jar: hdfs:///app/spark/dummy.jar");
    EnvironmentTestUtils.addEnvironment(context, "spark.resource-archives: foo.jar,bar.jar");
    context.register(Conf.class);
    context.refresh();
    SparkYarnTaskProperties properties = context.getBean(SparkYarnTaskProperties.class);
    assertThat(properties.getResourceArchives(), equalTo("foo.jar,bar.jar"));
}
 
Example 14
@Test
public void testExecutorMemoryCanBeCustomized() {
    AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
    EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
    EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
 EnvironmentTestUtils.addEnvironment(context, "spark.assembly-jar: hdfs:///app/spark/dummy.jar");
    EnvironmentTestUtils.addEnvironment(context, "spark.executor-memory: 2048M");
    context.register(Conf.class);
    context.refresh();
    SparkYarnTaskProperties properties = context.getBean(SparkYarnTaskProperties.class);
    assertThat(properties.getExecutorMemory(), equalTo("2048M"));
}
 
Example 15
@Test
public void fileExtensionCanBeCustomized() {
	AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
	EnvironmentTestUtils.addEnvironment(context, "hdfs.fileExtension:test");
	context.register(Conf.class);
	context.refresh();
	HdfsSinkProperties properties = context.getBean(HdfsSinkProperties.class);
	assertThat(properties.getFileExtension(), equalTo("test"));
}
 
Example 16
@Test
public void fileOpenAttemptsCanBeCustomized() {
	AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
	EnvironmentTestUtils.addEnvironment(context, "hdfs.fileOpenAttempts:5");
	context.register(Conf.class);
	context.refresh();
	HdfsSinkProperties properties = context.getBean(HdfsSinkProperties.class);
	assertThat(properties.getFileOpenAttempts(), equalTo(5));
}
 
Example 17
@Test
public void testRestUrlCanBeCustomized() {
    AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
    EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
    EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
    EnvironmentTestUtils.addEnvironment(context, "spark.rest-url: spark://foo:6066");
    context.register(Conf.class);
    context.refresh();
    SparkClusterTaskProperties properties = context.getBean(SparkClusterTaskProperties.class);
    assertThat(properties.getRestUrl(), equalTo("spark://foo:6066"));
}
 
Example 18
@Test
public void testAppStatusPollIntervalCanBeCustomized() {
    AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
    EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
    EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
    EnvironmentTestUtils.addEnvironment(context, "spark.rest-url: spark://dummy:6066");
    EnvironmentTestUtils.addEnvironment(context, "spark.app-status-poll-interval: 20000");
    context.register(Conf.class);
    context.refresh();
    SparkClusterTaskProperties properties = context.getBean(SparkClusterTaskProperties.class);
    assertThat(properties.getAppStatusPollInterval(), equalTo(20000L));
}
 
Example 19
@Test
public void codecCanBeCustomized() {
	AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
	EnvironmentTestUtils.addEnvironment(context, "hdfs.codec:snappy");
	context.register(Conf.class);
	context.refresh();
	HdfsSinkProperties properties = context.getBean(HdfsSinkProperties.class);
	assertThat(properties.getCodec(), equalTo(Codecs.SNAPPY.getAbbreviation()));
}
 
Example 20
@Test
public void testAppClassCanBeCustomized() {
    AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
    EnvironmentTestUtils.addEnvironment(context, "spark.app-class: MyTestClass");
    EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
    context.register(Conf.class);
    context.refresh();
    SparkClusterTaskProperties properties = context.getBean(SparkClusterTaskProperties.class);
    assertThat(properties.getAppClass(), equalTo("MyTestClass"));
}
 
Example 21
@Test
public void retryPolicyCanBeCustomized() {
	AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
	EnvironmentTestUtils.addEnvironment(context, "cassandra.retry-policy:" + RetryPolicy.DOWNGRADING_CONSISTENCY);
	context.register(Conf.class);
	context.refresh();
	CassandraSinkProperties properties = context.getBean(CassandraSinkProperties.class);
	assertThat(properties.getRetryPolicy(), equalTo(RetryPolicy.DOWNGRADING_CONSISTENCY));
	context.close();
}
 
Example 22
@Test
public void testAppJarCanBeCustomized() {
    AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
    EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
    EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: my-app-jar-0.0.1.jar");
    context.register(Conf.class);
    context.refresh();
    SparkClusterTaskProperties properties = context.getBean(SparkClusterTaskProperties.class);
    assertThat(properties.getAppJar(), equalTo("my-app-jar-0.0.1.jar"));
}
 
Example 23
@Test
public void formatCanBeCustomized() {
	AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
	EnvironmentTestUtils.addEnvironment(context, "hdfs.dataset.format:parquet");
	context.register(Conf.class);
	context.refresh();
	HdfsDatasetSinkProperties properties = context.getBean(HdfsDatasetSinkProperties.class);
	assertThat(properties.getFormat(), equalTo("parquet"));
}
 
Example 24
@Test
public void testAppArgsCanBeCustomized() {
    AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
    EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
    EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
    EnvironmentTestUtils.addEnvironment(context, "spark.app-args: arg1,arg2");
    context.register(Conf.class);
    context.refresh();
    SparkClusterTaskProperties properties = context.getBean(SparkClusterTaskProperties.class);
    assertThat(properties.getAppArgs(), equalTo(new String[]{"arg1", "arg2"}));
}
 
Example 25
@Test
public void fsUriCanBeCustomized() {
	AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
	EnvironmentTestUtils.addEnvironment(context, "hdfs.dataset.fsUri:hdfs://localhost:8020");
	context.register(Conf.class);
	context.refresh();
	HdfsDatasetSinkProperties properties = context.getBean(HdfsDatasetSinkProperties.class);
	assertThat(properties.getFsUri(), equalTo("hdfs://localhost:8020"));
}
 
Example 26
@Test
public void aclCanBeCustomized() {
	AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
	EnvironmentTestUtils.addEnvironment(context, "s3.bucket:foo", "s3.acl:AuthenticatedRead");
	context.register(Conf.class);
	context.refresh();
	AmazonS3SinkProperties properties = context.getBean(AmazonS3SinkProperties.class);
	assertThat(properties.getAcl(), equalTo(CannedAccessControlList.AuthenticatedRead));
	context.close();
}
 
Example 27
@Test
public void testExecutorMemoryCanBeCustomized() {
    AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
    EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
    EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
    EnvironmentTestUtils.addEnvironment(context, "spark.executor-memory: 2048M");
    context.register(Conf.class);
    context.refresh();
    SparkClusterTaskProperties properties = context.getBean(SparkClusterTaskProperties.class);
    assertThat(properties.getExecutorMemory(), equalTo("2048M"));
}
 
Example 28
@Test
public void compressionTypeCanBeCustomized() {
	AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
	EnvironmentTestUtils.addEnvironment(context, "hdfs.dataset.compressionType:bzip2");
	context.register(Conf.class);
	context.refresh();
	HdfsDatasetSinkProperties properties = context.getBean(HdfsDatasetSinkProperties.class);
	assertThat(properties.getCompressionType(), equalTo("bzip2"));
}
 
Example 29
@Test
public void fileNameCanBeCustomized() {
	AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
	EnvironmentTestUtils.addEnvironment(context, "hdfs.fileName:mydata");
	context.register(Conf.class);
	context.refresh();
	HdfsSinkProperties properties = context.getBean(HdfsSinkProperties.class);
	assertThat(properties.getFileName(), equalTo("mydata"));
}
 
Example 30
@Test
public void testToolArgsCanBeCustomized() {
	AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
	EnvironmentTestUtils.addEnvironment(context, "tool-args: --table foo");
	context.register(Conf.class);
	context.refresh();
	SqoopCommonTaskProperties properties = context.getBean(SqoopCommonTaskProperties.class);
	assertThat(properties.getToolArgs(), equalTo("--table foo"));
}