下面列出了org.junit.rules.TemporaryFolder#newFolder() 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
protected void configureRecon() throws IOException {
ConfigurationProvider.resetConfiguration();
TemporaryFolder tempFolder = new TemporaryFolder();
tempFolder.create();
File tempNewFolder = tempFolder.newFolder();
conf.set(OZONE_RECON_DB_DIR,
tempNewFolder.getAbsolutePath());
conf.set(OZONE_RECON_OM_SNAPSHOT_DB_DIR, tempNewFolder
.getAbsolutePath());
conf.set(OZONE_RECON_SCM_DB_DIR,
tempNewFolder.getAbsolutePath());
conf.set(OZONE_RECON_SQL_DB_JDBC_URL, "jdbc:derby:" +
tempNewFolder.getAbsolutePath() + "/ozone_recon_derby.db");
conf.set(OZONE_RECON_HTTP_ADDRESS_KEY, "0.0.0.0:0");
conf.set(OZONE_RECON_DATANODE_ADDRESS_KEY, "0.0.0.0:0");
ConfigurationProvider.setConfiguration(conf);
}
public static void compareArrays(int rank, char ordering, TemporaryFolder testDir) throws Exception {
List<Pair<INDArray, String>> all = NDArrayCreationUtil.getTestMatricesWithVaryingShapes(rank,ordering, Nd4j.defaultFloatingPointType());
Iterator<Pair<INDArray,String>> iter = all.iterator();
int cnt = 0;
while (iter.hasNext()) {
File dir = testDir.newFolder();
Pair<INDArray,String> currentPair = iter.next();
INDArray origArray = currentPair.getFirst();
//adding elements outside the bounds where print switches to scientific notation
origArray.tensorAlongDimension(0,0).muli(0).addi(100000);
origArray.putScalar(0,10001.1234);
// log.info("\nChecking shape ..." + currentPair.getSecond());
//log.info("C:\n"+ origArray.dup('c').toString());
// log.info("F:\n"+ origArray.toString());
Nd4j.writeTxt(origArray, new File(dir, "someArr.txt").getAbsolutePath());
INDArray readBack = Nd4j.readTxt(new File(dir, "someArr.txt").getAbsolutePath());
assertEquals("\nNot equal on shape " + ArrayUtils.toString(origArray.shape()), origArray, readBack);
cnt++;
}
}
protected Configuration createClusterConfig() throws IOException {
TemporaryFolder temporaryFolder = new TemporaryFolder();
temporaryFolder.create();
final File haDir = temporaryFolder.newFolder();
Configuration config = new Configuration();
config.setString(TaskManagerOptions.MANAGED_MEMORY_SIZE, "48m");
// the default network buffers size (10% of heap max =~ 150MB) seems to much for this test case
config.setString(NettyShuffleEnvironmentOptions.NETWORK_BUFFERS_MEMORY_MAX, String.valueOf(80L << 20)); // 80 MB
config.setString(AkkaOptions.FRAMESIZE, String.valueOf(MAX_MEM_STATE_SIZE) + "b");
if (zkServer != null) {
config.setString(HighAvailabilityOptions.HA_MODE, "ZOOKEEPER");
config.setString(HighAvailabilityOptions.HA_ZOOKEEPER_QUORUM, zkServer.getConnectString());
config.setString(HighAvailabilityOptions.HA_STORAGE_PATH, haDir.toURI().toString());
}
return config;
}
TestDir(TemporaryFolder testDir) throws IOException {
String baseSourcePath = "ml-modules/root/dbfunctiondef/positive/decoratorBase/";
String customSourcePath = "ml-modules/root/dbfunctiondef/positive/decoratorCustom/";
String otherSourcePath = "ml-modules/root/dbfunctiondef/positive/mimetype/";
srcDir = testDir.newFolder("src");
baseServiceDir = new File(srcDir, baseSourcePath);
customServiceDir = new File(srcDir, customSourcePath);
otherServiceDir = new File(srcDir, otherSourcePath);
buildFile = testDir.newFile("build.gradle");
propsFile = testDir.newFile("gradle.properties");
baseServiceDir.mkdirs();
customServiceDir.mkdirs();
otherServiceDir.mkdirs();
GradleTestUtil.copyFiles(new File("src/test/" + baseSourcePath), baseServiceDir);
GradleTestUtil.copyFiles(new File("src/test/" + customSourcePath), customServiceDir);
GradleTestUtil.copyFiles(new File("src/test/" + otherSourcePath), otherServiceDir);
}
@Before
public void setUp() throws Exception {
folder = new TemporaryFolder();
folder.create();
testDir = folder.newFolder("fileTests");
childDirA = testDir+File.separator+"childDirA";
childDirB = testDir+File.separator+"childDirB";
childDirC = childDirA+File.separator+"childDirC";
childDirD = childDirC+File.separator+"childDirD/";
fileName1 = childDirA+File.separator+"testFile1.txt";
fileName1Zipped = fileName1+".gz";
fileName2 = childDirD+File.separator+"testFile2.txt";
fileName3 = childDirB+File.separator+".testFile3.txt";
objectFileName = childDirA+File.separator+"person1.ser";
objectFileNameZipped = objectFileName+".gz";
fileText = "Sample Text";
}
public static void beforeClass(TemporaryFolder junitFolder) throws IOException {
if (System.getProperties().getProperty("MCR.Home") == null) {
File baseDir = junitFolder.newFolder("mcrhome");
System.out.println("Setting MCR.Home=" + baseDir.getAbsolutePath());
System.getProperties().setProperty("MCR.Home", baseDir.getAbsolutePath());
}
if (System.getProperties().getProperty("MCR.AppName") == null) {
String currentComponentName = getCurrentComponentName();
System.out.println("Setting MCR.AppName=" + currentComponentName);
System.getProperties().setProperty("MCR.AppName", getCurrentComponentName());
}
File configDir = new File(System.getProperties().getProperty("MCR.Home"),
System.getProperties().getProperty("MCR.AppName"));
System.out.println("Creating config directory: " + configDir);
configDir.mkdirs();
}
public Preferences create(String name, TemporaryFolder folder) {
try {
final File srcDir = folder.newFolder();
final File backupDir = folder.newFolder();
final File lockDir = folder.newFolder();
DirectoryProvider directoryProvider = new DirectoryProvider() {
@Override
public File getStoreDirectory() {
return srcDir;
}
@Override
public File getBackupDirectory() {
return backupDir;
}
@Override
public File getLockDirectory() {
return lockDir;
}
};
return create(name, directoryProvider);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
TestDir(TemporaryFolder testDir) throws IOException {
String sourcePath = "ml-modules/root/dbfunctiondef/positive/sessions/";
srcDir = testDir.newFolder("src");
serviceDir = new File(srcDir, sourcePath);
javaBaseDir = new File(srcDir, "main/java");
buildFile = testDir.newFile("build.gradle");
propsFile = testDir.newFile("gradle.properties");
outClass = new File(javaBaseDir, "com/marklogic/client/test/dbfunction/positive/SessionsBundle.java");
serviceDir.mkdirs();
GradleTestUtil.copyFiles(new File("src/test/" + sourcePath), serviceDir);
javaBaseDir.mkdirs();
}
public HgTestRepo(String workingCopyName, TemporaryFolder temporaryFolder) throws IOException {
super(temporaryFolder);
File tempFolder = temporaryFolder.newFolder();
remoteRepo = new File(tempFolder, "remote-repo");
remoteRepo.mkdirs();
//Copy file to work around bug in hg
File bundleToExtract = new File(tempFolder, "repo.bundle");
FileUtils.copyFile(new File(HG_BUNDLE_FILE), bundleToExtract);
setUpServerRepoFromHgBundle(remoteRepo, bundleToExtract);
File workingCopy = new File(tempFolder, workingCopyName);
hgCommand = new HgCommand(null, workingCopy, "default", remoteRepo.getAbsolutePath(), null);
InMemoryStreamConsumer output = inMemoryConsumer();
if (hgCommand.clone(output, new UrlArgument(remoteRepo.getAbsolutePath())) != 0) {
fail("Error creating repository\n" + output.getAllOutput());
}
}
public TestCompiler(JavaCompiler compiler, TemporaryFolder temporaryFolder) throws IOException {
this.compiler = compiler;
this.fileManager = compiler.getStandardFileManager(null, null, null);
this.outputLocation = temporaryFolder.newFolder();
Iterable<? extends File> temp = Collections.singletonList(this.outputLocation);
this.fileManager.setLocation(StandardLocation.CLASS_OUTPUT, temp);
this.fileManager.setLocation(StandardLocation.SOURCE_OUTPUT, temp);
}
@Before
public void setup() throws IOException {
TemporaryFolder tmpFolder = new TemporaryFolder();
tmpFolder.create();
File tmp = makeTempRepoDir(tmpFolder, "rootdir");
original = tmpFolder.newFolder("original");
FileUtils.copyDirectory(tmp, original);
repoStore = new FSGitRepoStore(tmp.getAbsolutePath(), Optional.empty());
}
public TestCompiler(JavaCompiler compiler, TemporaryFolder temporaryFolder) throws IOException {
this.compiler = compiler;
this.fileManager = compiler.getStandardFileManager(null, null, null);
this.outputLocation = temporaryFolder.newFolder();
Iterable<? extends File> temp = Collections.singletonList(this.outputLocation);
this.fileManager.setLocation(StandardLocation.CLASS_OUTPUT, temp);
this.fileManager.setLocation(StandardLocation.SOURCE_OUTPUT, temp);
}
@Before
public void setup() throws IOException {
TemporaryFolder tmpFolder = new TemporaryFolder();
tmpFolder.create();
testDir = ResourceUtil.copyOfFolderResource(
RESOURCE_DIR + "/testdir",
tmpFolder::newFolder);
dirWithEmptyFile = ResourceUtil.copyOfFolderResource(
RESOURCE_DIR + "/dir_with_empty_file",
tmpFolder::newFolder);
tmpDir = tmpFolder.newFolder();
}
public static File makeTempRepoDir(
TemporaryFolder tmpFolder,
String name
) throws IOException {
File tmp = tmpFolder.newFolder(name);
Path rootdir = Paths.get(
"src/test/resources/uk/ac/ic/wlgitbridge/"
+ "bridge/repo/GitProjectRepoTest/rootdir"
);
FileUtils.copyDirectory(rootdir.toFile(), tmp);
Files.renameAll(tmp, "DOTgit", ".git");
return tmp;
}
public void setup() throws IOException {
TokyoCabinetBenchmark.shuffleKeys(randomKeys);
temporaryFolder = new TemporaryFolder();
temporaryFolder.create();
final File testsDirectory = temporaryFolder.newFolder("data");
env = new Env();
env.open(testsDirectory.getPath(), Constants.NOSYNC | Constants.WRITEMAP);
env.setMapSize(MAP_SIZE);
db = env.openDatabase();
}
public static void prepare(TemporaryFolder tempFolder) {
try {
File baseDirForSecureRun = tempFolder.newFolder();
LOG.info("Base Directory for Secure Environment: {}", baseDirForSecureRun);
String hostName = "localhost";
Properties kdcConf = MiniKdc.createConf();
if (LOG.isDebugEnabled()) {
kdcConf.setProperty(MiniKdc.DEBUG, "true");
}
kdcConf.setProperty(MiniKdc.KDC_BIND_ADDRESS, hostName);
kdc = new MiniKdc(kdcConf, baseDirForSecureRun);
kdc.start();
LOG.info("Started Mini KDC");
File keytabFile = new File(baseDirForSecureRun, "test-users.keytab");
testKeytab = keytabFile.getAbsolutePath();
testZkServerPrincipal = "zookeeper/127.0.0.1";
testZkClientPrincipal = "zk-client/127.0.0.1";
testKafkaServerPrincipal = "kafka/" + hostName;
hadoopServicePrincipal = "hadoop/" + hostName;
testPrincipal = "client/" + hostName;
kdc.createPrincipal(keytabFile, testPrincipal, testZkServerPrincipal,
hadoopServicePrincipal,
testZkClientPrincipal,
testKafkaServerPrincipal);
testPrincipal = testPrincipal + "@" + kdc.getRealm();
testZkServerPrincipal = testZkServerPrincipal + "@" + kdc.getRealm();
testZkClientPrincipal = testZkClientPrincipal + "@" + kdc.getRealm();
testKafkaServerPrincipal = testKafkaServerPrincipal + "@" + kdc.getRealm();
hadoopServicePrincipal = hadoopServicePrincipal + "@" + kdc.getRealm();
LOG.info("-------------------------------------------------------------------");
LOG.info("Test Principal: {}", testPrincipal);
LOG.info("Test ZK Server Principal: {}", testZkServerPrincipal);
LOG.info("Test ZK Client Principal: {}", testZkClientPrincipal);
LOG.info("Test Kafka Server Principal: {}", testKafkaServerPrincipal);
LOG.info("Test Hadoop Service Principal: {}", hadoopServicePrincipal);
LOG.info("Test Keytab: {}", testKeytab);
LOG.info("-------------------------------------------------------------------");
//Security Context is established to allow non hadoop applications that requires JAAS
//based SASL/Kerberos authentication to work. However, for Hadoop specific applications
//the context can be reinitialized with Hadoop configuration by calling
//ctx.setHadoopConfiguration() for the UGI implementation to work properly.
//See Yarn test case module for reference
Configuration flinkConfig = GlobalConfiguration.loadConfiguration();
flinkConfig.setBoolean(SecurityOptions.ZOOKEEPER_SASL_DISABLE, false);
flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB, testKeytab);
flinkConfig.setBoolean(SecurityOptions.KERBEROS_LOGIN_USETICKETCACHE, false);
flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, testPrincipal);
flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_CONTEXTS, "Client,KafkaClient");
SecurityConfiguration ctx = new SecurityConfiguration(flinkConfig);
TestingSecurityContext.install(ctx, getClientSecurityConfigurationMap());
populateJavaPropertyVariables();
} catch (Exception e) {
throw new RuntimeException("Exception occured while preparing secure environment.", e);
}
}
/**
* This tests if the {@link TaskStateManager} properly returns the the subtask local state dir from the
* corresponding {@link TaskLocalStateStoreImpl}.
*/
@Test
public void testForwardingSubtaskLocalStateBaseDirFromLocalStateStore() throws IOException {
JobID jobID = new JobID(42L, 43L);
AllocationID allocationID = new AllocationID(4711L, 23L);
JobVertexID jobVertexID = new JobVertexID(12L, 34L);
ExecutionAttemptID executionAttemptID = new ExecutionAttemptID(23L, 24L);
TestCheckpointResponder checkpointResponderMock = new TestCheckpointResponder();
Executor directExecutor = Executors.directExecutor();
TemporaryFolder tmpFolder = new TemporaryFolder();
try {
tmpFolder.create();
File[] allocBaseDirs = new File[]{tmpFolder.newFolder(), tmpFolder.newFolder(), tmpFolder.newFolder()};
LocalRecoveryDirectoryProviderImpl directoryProvider =
new LocalRecoveryDirectoryProviderImpl(allocBaseDirs, jobID, jobVertexID, 0);
LocalRecoveryConfig localRecoveryConfig =
new LocalRecoveryConfig(true, directoryProvider);
TaskLocalStateStore taskLocalStateStore =
new TaskLocalStateStoreImpl(jobID, allocationID, jobVertexID, 13, localRecoveryConfig, directExecutor);
TaskStateManager taskStateManager = taskStateManager(
jobID,
executionAttemptID,
checkpointResponderMock,
null,
taskLocalStateStore);
LocalRecoveryConfig localRecoveryConfFromTaskLocalStateStore =
taskLocalStateStore.getLocalRecoveryConfig();
LocalRecoveryConfig localRecoveryConfFromTaskStateManager =
taskStateManager.createLocalRecoveryConfig();
for (int i = 0; i < 10; ++i) {
Assert.assertEquals(allocBaseDirs[i % allocBaseDirs.length],
localRecoveryConfFromTaskLocalStateStore.getLocalStateDirectoryProvider().allocationBaseDirectory(i));
Assert.assertEquals(allocBaseDirs[i % allocBaseDirs.length],
localRecoveryConfFromTaskStateManager.getLocalStateDirectoryProvider().allocationBaseDirectory(i));
}
Assert.assertEquals(
localRecoveryConfFromTaskLocalStateStore.isLocalRecoveryEnabled(),
localRecoveryConfFromTaskStateManager.isLocalRecoveryEnabled());
} finally {
tmpFolder.delete();
}
}
/**
* This tests if the {@link TaskStateManager} properly returns the the subtask local state dir from the
* corresponding {@link TaskLocalStateStoreImpl}.
*/
@Test
public void testForwardingSubtaskLocalStateBaseDirFromLocalStateStore() throws IOException {
JobID jobID = new JobID(42L, 43L);
AllocationID allocationID = new AllocationID(4711L, 23L);
JobVertexID jobVertexID = new JobVertexID(12L, 34L);
ExecutionAttemptID executionAttemptID = new ExecutionAttemptID(23L, 24L);
TestCheckpointResponder checkpointResponderMock = new TestCheckpointResponder();
Executor directExecutor = Executors.directExecutor();
TemporaryFolder tmpFolder = new TemporaryFolder();
try {
tmpFolder.create();
File[] allocBaseDirs = new File[]{tmpFolder.newFolder(), tmpFolder.newFolder(), tmpFolder.newFolder()};
LocalRecoveryDirectoryProviderImpl directoryProvider =
new LocalRecoveryDirectoryProviderImpl(allocBaseDirs, jobID, jobVertexID, 0);
LocalRecoveryConfig localRecoveryConfig =
new LocalRecoveryConfig(true, directoryProvider);
TaskLocalStateStore taskLocalStateStore =
new TaskLocalStateStoreImpl(jobID, allocationID, jobVertexID, 13, localRecoveryConfig, directExecutor);
TaskStateManager taskStateManager = taskStateManager(
jobID,
executionAttemptID,
checkpointResponderMock,
null,
taskLocalStateStore);
LocalRecoveryConfig localRecoveryConfFromTaskLocalStateStore =
taskLocalStateStore.getLocalRecoveryConfig();
LocalRecoveryConfig localRecoveryConfFromTaskStateManager =
taskStateManager.createLocalRecoveryConfig();
for (int i = 0; i < 10; ++i) {
Assert.assertEquals(allocBaseDirs[i % allocBaseDirs.length],
localRecoveryConfFromTaskLocalStateStore.getLocalStateDirectoryProvider().allocationBaseDirectory(i));
Assert.assertEquals(allocBaseDirs[i % allocBaseDirs.length],
localRecoveryConfFromTaskStateManager.getLocalStateDirectoryProvider().allocationBaseDirectory(i));
}
Assert.assertEquals(
localRecoveryConfFromTaskLocalStateStore.isLocalRecoveryEnabled(),
localRecoveryConfFromTaskStateManager.isLocalRecoveryEnabled());
} finally {
tmpFolder.delete();
}
}
public static void prepare(TemporaryFolder tempFolder) {
try {
File baseDirForSecureRun = tempFolder.newFolder();
LOG.info("Base Directory for Secure Environment: {}", baseDirForSecureRun);
String hostName = "localhost";
Properties kdcConf = MiniKdc.createConf();
if (LOG.isDebugEnabled()) {
kdcConf.setProperty(MiniKdc.DEBUG, "true");
}
kdcConf.setProperty(MiniKdc.KDC_BIND_ADDRESS, hostName);
kdc = new MiniKdc(kdcConf, baseDirForSecureRun);
kdc.start();
LOG.info("Started Mini KDC");
File keytabFile = new File(baseDirForSecureRun, "test-users.keytab");
testKeytab = keytabFile.getAbsolutePath();
testZkServerPrincipal = "zookeeper/" + hostName;
testZkClientPrincipal = "zk-client/" + hostName;
testKafkaServerPrincipal = "kafka/" + hostName;
hadoopServicePrincipal = "hadoop/" + hostName;
testPrincipal = "client/" + hostName;
kdc.createPrincipal(keytabFile, testPrincipal, testZkServerPrincipal,
hadoopServicePrincipal,
testZkClientPrincipal,
testKafkaServerPrincipal);
testPrincipal = testPrincipal + "@" + kdc.getRealm();
testZkServerPrincipal = testZkServerPrincipal + "@" + kdc.getRealm();
testZkClientPrincipal = testZkClientPrincipal + "@" + kdc.getRealm();
testKafkaServerPrincipal = testKafkaServerPrincipal + "@" + kdc.getRealm();
hadoopServicePrincipal = hadoopServicePrincipal + "@" + kdc.getRealm();
LOG.info("-------------------------------------------------------------------");
LOG.info("Test Principal: {}", testPrincipal);
LOG.info("Test ZK Server Principal: {}", testZkServerPrincipal);
LOG.info("Test ZK Client Principal: {}", testZkClientPrincipal);
LOG.info("Test Kafka Server Principal: {}", testKafkaServerPrincipal);
LOG.info("Test Hadoop Service Principal: {}", hadoopServicePrincipal);
LOG.info("Test Keytab: {}", testKeytab);
LOG.info("-------------------------------------------------------------------");
//Security Context is established to allow non hadoop applications that requires JAAS
//based SASL/Kerberos authentication to work. However, for Hadoop specific applications
//the context can be reinitialized with Hadoop configuration by calling
//ctx.setHadoopConfiguration() for the UGI implementation to work properly.
//See Yarn test case module for reference
Configuration flinkConfig = GlobalConfiguration.loadConfiguration();
flinkConfig.setBoolean(SecurityOptions.ZOOKEEPER_SASL_DISABLE, false);
flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB, testKeytab);
flinkConfig.setBoolean(SecurityOptions.KERBEROS_LOGIN_USETICKETCACHE, false);
flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, testPrincipal);
flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_CONTEXTS, "Client,KafkaClient");
SecurityConfiguration ctx = new SecurityConfiguration(flinkConfig);
TestingSecurityContext.install(ctx, getClientSecurityConfigurationMap());
populateJavaPropertyVariables();
} catch (Exception e) {
throw new RuntimeException("Exception occured while preparing secure environment.", e);
}
}
public GitTestRepo(File gitBundleFile, TemporaryFolder temporaryFolder) throws IOException {
super(temporaryFolder);
gitRepo = temporaryFolder.newFolder("GitTestRepo" + UUID.randomUUID().toString(), "repo");
cloneBundleToFolder(gitBundleFile, gitRepo);
tmpFolders.add(gitRepo);
}