下面列出了怎么用org.springframework.boot.test.EnvironmentTestUtils的API类实例代码及写法,或者点击链接到github查看源代码。
@Test
public void namespaceCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "hdfs.dataset.namespace:test");
context.register(Conf.class);
context.refresh();
HdfsDatasetSinkProperties properties = context.getBean(HdfsDatasetSinkProperties.class);
assertThat(properties.getNamespace(), equalTo("test"));
}
private AnnotationConfigApplicationContext load(Class<?> config, String... env) {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, env);
context.register(config);
context.refresh();
return context;
}
@Test
public void testNoQueues() throws Exception {
try {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "rabbit.enableRetry:false");
context.register(Config.class);
context.refresh();
fail("BeanCreationException expected");
}
catch (Exception e) {
assertThat(e, instanceOf(BeanCreationException.class));
assertThat(e.getMessage(), containsString("queue(s) are required"));
}
}
@Test
public void s3BucketAndBucketExpressionAreMutuallyExclusive() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "s3.bucket:foo", "s3.bucketExpression:headers.bucket");
context.register(Conf.class);
try {
context.refresh();
fail("BeanCreationException expected");
}
catch (Exception e) {
assertThat(e, instanceOf(BeanCreationException.class));
assertThat(e.getMessage(), containsString("Exactly one of 'bucket' or 'bucketExpression' must be set"));
}
}
@Test
public void testNumExecutorsCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
EnvironmentTestUtils.addEnvironment(context, "spark.assembly-jar: hdfs:///app/spark/dummy.jar");
EnvironmentTestUtils.addEnvironment(context, "spark.num-executors: 4");
context.register(Conf.class);
context.refresh();
SparkYarnTaskProperties properties = context.getBean(SparkYarnTaskProperties.class);
assertThat(properties.getNumExecutors(), equalTo(4));
}
@Test
public void maxRowsPerPollCanBeCustomized() {
EnvironmentTestUtils.addEnvironment(this.context, "jdbc.query:select foo from bar");
EnvironmentTestUtils.addEnvironment(this.context, "jdbc.maxRowsPerPoll:15");
this.context.register(Conf.class);
this.context.refresh();
JdbcSourceProperties properties = this.context.getBean(JdbcSourceProperties.class);
assertThat(properties.getMaxRowsPerPoll(), equalTo(15));
}
@Test
public void testAppClassCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "spark.app-class: MyTestClass");
EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
EnvironmentTestUtils.addEnvironment(context, "spark.assembly-jar: hdfs:///app/spark/dummy.jar");
context.register(Conf.class);
context.refresh();
SparkYarnTaskProperties properties = context.getBean(SparkYarnTaskProperties.class);
assertThat(properties.getAppClass(), equalTo("MyTestClass"));
}
@Test(expected = BeanCreationException.class)
public void testAppClassIsRequired() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "app-jar: dummy.jar");
context.register(Conf.class);
context.refresh();
}
@Test
public void testAppJarCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: my-app-jar-0.0.1.jar");
EnvironmentTestUtils.addEnvironment(context, "spark.assembly-jar: hdfs:///app/spark/dummy.jar");
context.register(Conf.class);
context.refresh();
SparkYarnTaskProperties properties = context.getBean(SparkYarnTaskProperties.class);
assertThat(properties.getAppJar(), equalTo("my-app-jar-0.0.1.jar"));
}
@Test(expected = BeanCreationException.class)
public void testAppJarIsRequired() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
EnvironmentTestUtils.addEnvironment(context, "spark.assembly-jar: hdfs:///app/spark/dummy.jar");
context.register(Conf.class);
context.refresh();
}
@Test
public void testAppArgsCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
EnvironmentTestUtils.addEnvironment(context, "spark.app-args: arg1,arg2");
EnvironmentTestUtils.addEnvironment(context, "spark.assembly-jar: hdfs:///app/spark/dummy.jar");
context.register(Conf.class);
context.refresh();
SparkYarnTaskProperties properties = context.getBean(SparkYarnTaskProperties.class);
assertThat(properties.getAppArgs(), equalTo(new String[]{"arg1", "arg2"}));
}
@Test
public void testResourceFilesCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
EnvironmentTestUtils.addEnvironment(context, "spark.assembly-jar: hdfs:///app/spark/dummy.jar");
EnvironmentTestUtils.addEnvironment(context, "spark.resource-files: test.txt");
context.register(Conf.class);
context.refresh();
SparkYarnTaskProperties properties = context.getBean(SparkYarnTaskProperties.class);
assertThat(properties.getResourceFiles(), equalTo("test.txt"));
}
@Test
public void testResourceArchivesCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
EnvironmentTestUtils.addEnvironment(context, "spark.assembly-jar: hdfs:///app/spark/dummy.jar");
EnvironmentTestUtils.addEnvironment(context, "spark.resource-archives: foo.jar,bar.jar");
context.register(Conf.class);
context.refresh();
SparkYarnTaskProperties properties = context.getBean(SparkYarnTaskProperties.class);
assertThat(properties.getResourceArchives(), equalTo("foo.jar,bar.jar"));
}
@Test
public void testExecutorMemoryCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
EnvironmentTestUtils.addEnvironment(context, "spark.assembly-jar: hdfs:///app/spark/dummy.jar");
EnvironmentTestUtils.addEnvironment(context, "spark.executor-memory: 2048M");
context.register(Conf.class);
context.refresh();
SparkYarnTaskProperties properties = context.getBean(SparkYarnTaskProperties.class);
assertThat(properties.getExecutorMemory(), equalTo("2048M"));
}
@Test
public void fileExtensionCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "hdfs.fileExtension:test");
context.register(Conf.class);
context.refresh();
HdfsSinkProperties properties = context.getBean(HdfsSinkProperties.class);
assertThat(properties.getFileExtension(), equalTo("test"));
}
@Test
public void fileOpenAttemptsCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "hdfs.fileOpenAttempts:5");
context.register(Conf.class);
context.refresh();
HdfsSinkProperties properties = context.getBean(HdfsSinkProperties.class);
assertThat(properties.getFileOpenAttempts(), equalTo(5));
}
@Test
public void testRestUrlCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
EnvironmentTestUtils.addEnvironment(context, "spark.rest-url: spark://foo:6066");
context.register(Conf.class);
context.refresh();
SparkClusterTaskProperties properties = context.getBean(SparkClusterTaskProperties.class);
assertThat(properties.getRestUrl(), equalTo("spark://foo:6066"));
}
@Test
public void testAppStatusPollIntervalCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
EnvironmentTestUtils.addEnvironment(context, "spark.rest-url: spark://dummy:6066");
EnvironmentTestUtils.addEnvironment(context, "spark.app-status-poll-interval: 20000");
context.register(Conf.class);
context.refresh();
SparkClusterTaskProperties properties = context.getBean(SparkClusterTaskProperties.class);
assertThat(properties.getAppStatusPollInterval(), equalTo(20000L));
}
@Test
public void codecCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "hdfs.codec:snappy");
context.register(Conf.class);
context.refresh();
HdfsSinkProperties properties = context.getBean(HdfsSinkProperties.class);
assertThat(properties.getCodec(), equalTo(Codecs.SNAPPY.getAbbreviation()));
}
@Test
public void testAppClassCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "spark.app-class: MyTestClass");
EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
context.register(Conf.class);
context.refresh();
SparkClusterTaskProperties properties = context.getBean(SparkClusterTaskProperties.class);
assertThat(properties.getAppClass(), equalTo("MyTestClass"));
}
@Test
public void retryPolicyCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "cassandra.retry-policy:" + RetryPolicy.DOWNGRADING_CONSISTENCY);
context.register(Conf.class);
context.refresh();
CassandraSinkProperties properties = context.getBean(CassandraSinkProperties.class);
assertThat(properties.getRetryPolicy(), equalTo(RetryPolicy.DOWNGRADING_CONSISTENCY));
context.close();
}
@Test
public void testAppJarCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: my-app-jar-0.0.1.jar");
context.register(Conf.class);
context.refresh();
SparkClusterTaskProperties properties = context.getBean(SparkClusterTaskProperties.class);
assertThat(properties.getAppJar(), equalTo("my-app-jar-0.0.1.jar"));
}
@Test
public void formatCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "hdfs.dataset.format:parquet");
context.register(Conf.class);
context.refresh();
HdfsDatasetSinkProperties properties = context.getBean(HdfsDatasetSinkProperties.class);
assertThat(properties.getFormat(), equalTo("parquet"));
}
@Test
public void testAppArgsCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
EnvironmentTestUtils.addEnvironment(context, "spark.app-args: arg1,arg2");
context.register(Conf.class);
context.refresh();
SparkClusterTaskProperties properties = context.getBean(SparkClusterTaskProperties.class);
assertThat(properties.getAppArgs(), equalTo(new String[]{"arg1", "arg2"}));
}
@Test
public void fsUriCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "hdfs.dataset.fsUri:hdfs://localhost:8020");
context.register(Conf.class);
context.refresh();
HdfsDatasetSinkProperties properties = context.getBean(HdfsDatasetSinkProperties.class);
assertThat(properties.getFsUri(), equalTo("hdfs://localhost:8020"));
}
@Test
public void aclCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "s3.bucket:foo", "s3.acl:AuthenticatedRead");
context.register(Conf.class);
context.refresh();
AmazonS3SinkProperties properties = context.getBean(AmazonS3SinkProperties.class);
assertThat(properties.getAcl(), equalTo(CannedAccessControlList.AuthenticatedRead));
context.close();
}
@Test
public void testExecutorMemoryCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
EnvironmentTestUtils.addEnvironment(context, "spark.executor-memory: 2048M");
context.register(Conf.class);
context.refresh();
SparkClusterTaskProperties properties = context.getBean(SparkClusterTaskProperties.class);
assertThat(properties.getExecutorMemory(), equalTo("2048M"));
}
@Test
public void compressionTypeCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "hdfs.dataset.compressionType:bzip2");
context.register(Conf.class);
context.refresh();
HdfsDatasetSinkProperties properties = context.getBean(HdfsDatasetSinkProperties.class);
assertThat(properties.getCompressionType(), equalTo("bzip2"));
}
@Test
public void fileNameCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "hdfs.fileName:mydata");
context.register(Conf.class);
context.refresh();
HdfsSinkProperties properties = context.getBean(HdfsSinkProperties.class);
assertThat(properties.getFileName(), equalTo("mydata"));
}
@Test
public void testToolArgsCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "tool-args: --table foo");
context.register(Conf.class);
context.refresh();
SqoopCommonTaskProperties properties = context.getBean(SqoopCommonTaskProperties.class);
assertThat(properties.getToolArgs(), equalTo("--table foo"));
}