下面列出了org.springframework.boot.test.EnvironmentTestUtils#addEnvironment ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
@Test
public void testResourceFilesCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
EnvironmentTestUtils.addEnvironment(context, "spark.resource-files: test.txt");
context.register(Conf.class);
context.refresh();
SparkClusterTaskProperties properties = context.getBean(SparkClusterTaskProperties.class);
assertThat(properties.getResourceFiles(), equalTo("test.txt"));
}
@Test
public void testExecutorMemoryCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
EnvironmentTestUtils.addEnvironment(context, "spark.executor-memory: 2048M");
context.register(Conf.class);
context.refresh();
SparkClientTaskProperties properties = context.getBean(SparkClientTaskProperties.class);
assertThat(properties.getExecutorMemory(), equalTo("2048M"));
}
@Test
public void idleTimeoutCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "hdfs.idleTimeout:12345");
context.register(Conf.class);
context.refresh();
HdfsSinkProperties properties = context.getBean(HdfsSinkProperties.class);
assertThat(properties.getIdleTimeout(), equalTo(12345L));
}
@Test
public void localDirCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "sftp.localDir:local");
context.register(Conf.class);
context.refresh();
SftpSourceProperties properties = context.getBean(SftpSourceProperties.class);
assertThat(properties.getLocalDir(), equalTo(new File("local")));
}
@Test
public void tmpFileSuffixCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "ftp.tmpFileSuffix:.foo");
context.register(Conf.class);
context.refresh();
FtpSinkProperties properties = context.getBean(FtpSinkProperties.class);
assertThat(properties.getTmpFileSuffix(), equalTo(".foo"));
}
@Test
public void testAppNameCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
EnvironmentTestUtils.addEnvironment(context, "spark.assembly-jar: hdfs:///app/spark/dummy.jar");
EnvironmentTestUtils.addEnvironment(context, "spark.app-name: test");
context.register(Conf.class);
context.refresh();
SparkYarnTaskProperties properties = context.getBean(SparkYarnTaskProperties.class);
assertThat(properties.getAppName(), equalTo("test"));
}
@Test
public void writerCacheSizeCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "hdfs.dataset.writerCacheSize:20");
context.register(Conf.class);
context.refresh();
HdfsDatasetSinkProperties properties = context.getBean(HdfsDatasetSinkProperties.class);
assertThat(properties.getWriterCacheSize(), equalTo(20));
}
@Test
public void remoteDirCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "ftp.remoteDir:/remote");
context.register(Conf.class);
context.refresh();
FtpSourceProperties properties = context.getBean(FtpSourceProperties.class);
assertThat(properties.getRemoteDir(), equalTo("/remote"));
}
@Test
public void testNoQueues() throws Exception {
try {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "rabbit.enableRetry:false");
context.register(Config.class);
context.refresh();
fail("BeanCreationException expected");
}
catch (Exception e) {
assertThat(e, instanceOf(BeanCreationException.class));
assertThat(e.getMessage(), containsString("queue(s) are required"));
}
}
@Test
public void rolloverCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "hdfs.rollover:5555555");
context.register(Conf.class);
context.refresh();
HdfsSinkProperties properties = context.getBean(HdfsSinkProperties.class);
assertThat(properties.getRollover(), equalTo(5555555));
}
@Test
public void consistencyLevelCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "CASSANDRA_CONSISTENCY_LEVEL:" + ConsistencyLevel.LOCAL_QUOROM);
context.register(Conf.class);
context.refresh();
CassandraSinkProperties properties = context.getBean(CassandraSinkProperties.class);
assertThat(properties.getConsistencyLevel(), equalTo(ConsistencyLevel.LOCAL_QUOROM));
context.close();
}
@Test
public void partitionPathCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "hdfs.partitionPath:dateFormat('yyyy/MM/dd')");
context.register(Conf.class);
context.refresh();
HdfsSinkProperties properties = context.getBean(HdfsSinkProperties.class);
assertThat(properties.getPartitionPath(), equalTo("dateFormat('yyyy/MM/dd')"));
}
@Test
public void preserveTimestampDirCanBeDisabled() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "sftp.preserveTimestamp:false");
context.register(Conf.class);
context.refresh();
SftpSourceProperties properties = context.getBean(SftpSourceProperties.class);
assertTrue(!properties.isPreserveTimestamp());
}
@Test
public void filenamePatternCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "ftp.filenamePattern:*.foo");
context.register(Conf.class);
context.refresh();
FtpSourceProperties properties = context.getBean(FtpSourceProperties.class);
assertThat(properties.getFilenamePattern(), equalTo("*.foo"));
}
@Test
public void tmpFileSuffixCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "sftp.tmpFileSuffix:.foo");
context.register(Conf.class);
context.refresh();
SftpSourceProperties properties = context.getBean(SftpSourceProperties.class);
assertThat(properties.getTmpFileSuffix(), equalTo(".foo"));
}
@Test
public void useTemporaryFileNameCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "ftp.useTemporaryFilename:false");
context.register(Conf.class);
context.refresh();
FtpSinkProperties properties = context.getBean(FtpSinkProperties.class);
assertFalse(properties.isUseTemporaryFilename());
}
@Test
public void testMasterCanBeCustomized() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "spark.app-class: Dummy");
EnvironmentTestUtils.addEnvironment(context, "spark.app-jar: dummy.jar");
EnvironmentTestUtils.addEnvironment(context, "spark.master: local[4]");
context.register(Conf.class);
context.refresh();
SparkClientTaskProperties properties = context.getBean(SparkClientTaskProperties.class);
assertThat(properties.getMaster(), equalTo("local[4]"));
}
@Test
public void maxRowsPerPollCanBeCustomized() {
EnvironmentTestUtils.addEnvironment(this.context, "jdbc.query:select foo from bar");
EnvironmentTestUtils.addEnvironment(this.context, "jdbc.maxRowsPerPoll:15");
this.context.register(Conf.class);
this.context.refresh();
JdbcSourceProperties properties = this.context.getBean(JdbcSourceProperties.class);
assertThat(properties.getMaxRowsPerPoll(), equalTo(15));
}
@Test
public void aclAndAclExpressionAreMutuallyExclusive() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "s3.bucket:foo", "s3.acl:private", "s3.acl-expression:'acl'");
context.register(Conf.class);
try {
context.refresh();
fail("BeanCreationException expected");
}
catch (Exception e) {
assertThat(e, instanceOf(BeanCreationException.class));
assertThat(e.getMessage(), containsString("Only one of 'acl' or 'aclExpression' must be set"));
}
}
@Test
public void ingestQueryCanBeCustomized() {
String query = "insert into book (isbn, title, author) values (?, ?, ?)";
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
EnvironmentTestUtils.addEnvironment(context, "cassandra.ingest-query:" + query);
context.register(Conf.class);
context.refresh();
CassandraSinkProperties properties = context.getBean(CassandraSinkProperties.class);
assertThat(properties.getIngestQuery(), equalTo(query));
context.close();
}