下面列出了org.apache.commons.io.filefilter.AndFileFilter#org.apache.commons.io.filefilter.PrefixFileFilter 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
/**
* This function figures out the right lineage file path+name to process sorted by the last
* time they are modified. (old -> new)
* @return get the lineage files from given directory with given prefix.
*/
public File[] getCurrentFiles() {
try {
LOG.info("Scanning: " + directoryName);
File folder = new File(directoryName);
File[] listOfFiles = folder.listFiles((FileFilter) new PrefixFileFilter(prefix, IOCase.SENSITIVE));
if ((listOfFiles == null) || (listOfFiles.length == 0)) {
LOG.info("Found no lineage files.");
return new File[0];
}
if(listOfFiles.length > 1) {
Arrays.sort(listOfFiles, LastModifiedFileComparator.LASTMODIFIED_COMPARATOR);
}
LOG.info("Found {} lineage files" + listOfFiles.length);
return listOfFiles;
} catch(Exception e) {
LOG.error("Import lineage file failed.", e);
}
return new File[0];
}
public DefaultCommandStore(File file, int maxFileSize, int minTimeMilliToGcAfterModified, IntSupplier fileNumToKeep, long commandReaderFlyingThreshold, KeeperMonitor keeperMonitor) throws IOException {
this.baseDir = file.getParentFile();
this.fileNamePrefix = file.getName();
this.maxFileSize = maxFileSize;
this.fileNumToKeep = fileNumToKeep;
this.commandReaderFlyingThreshold = commandReaderFlyingThreshold;
this.minTimeMilliToGcAfterModified = minTimeMilliToGcAfterModified;
this.commandStoreDelay = keeperMonitor.createCommandStoreDelay(this);
fileFilter = new PrefixFileFilter(fileNamePrefix);
long currentStartOffset = findMaxStartOffset();
File currentFile = fileForStartOffset(currentStartOffset);
logger.info("Write to {}", currentFile.getName());
CommandFileContext cmdFileCtx = new CommandFileContext(currentStartOffset, currentFile);
cmdFileCtxRef.set(cmdFileCtx);
offsetNotifier = new OffsetNotifier(cmdFileCtx.totalLength() - 1);
}
/**
* Return set of file user identifiers from a list of files
*
* @param user user who uploaded or will upload file
* @param files list of files objects
* @return Set containing all user identifiers from list of files
*
* @see org.kuali.kfs.sys.batch.BatchInputFileSetType#extractFileUserIdentifiers(org.kuali.rice.kim.api.identity.Person, java.util.List)
*/
public Set<String> extractFileUserIdentifiers(Person user, List<File> files) {
Set<String> extractedFileUserIdentifiers = new TreeSet<String>();
StringBuilder buf = new StringBuilder();
buf.append(FILE_NAME_PREFIX).append(FILE_NAME_PART_DELIMITER).append(user.getPrincipalName()).append(FILE_NAME_PART_DELIMITER);
String prefixString = buf.toString();
IOFileFilter prefixFilter = new PrefixFileFilter(prefixString);
IOFileFilter suffixFilter = new SuffixFileFilter(CamsConstants.BarCodeInventory.DATA_FILE_EXTENSION);
IOFileFilter combinedFilter = new AndFileFilter(prefixFilter, suffixFilter);
for (File file : files) {
if (combinedFilter.accept(file)) {
String fileName = file.getName();
if (fileName.endsWith(CamsConstants.BarCodeInventory.DATA_FILE_EXTENSION)) {
extractedFileUserIdentifiers.add(StringUtils.substringBetween(fileName, prefixString, CamsConstants.BarCodeInventory.DATA_FILE_EXTENSION));
} else {
LOG.error("Unable to determine file user identifier for file name: " + fileName);
throw new RuntimeException("Unable to determine file user identifier for file name: " + fileName);
}
}
}
return extractedFileUserIdentifiers;
}
@Override
public File getAvatarResource(String username) throws ApsSystemException {
File avatarFile = null;
try {
if (StringUtils.isNotBlank(username)) {
String basePath = this.getAvatarDiskFolder() + AVATAR_SUBFOLDER;
File dir = new File(basePath);
String[] files = dir.list(new PrefixFileFilter(username.toLowerCase() + "."));
if (null != files && files.length > 0) {
File resFile = new File(basePath + File.separator + files[0]);
if (resFile.exists()) {
avatarFile = resFile;
}
}
}
} catch (Throwable t) {
_logger.error("Error getting avatar resource for user {}", username, t);
throw new ApsSystemException("Error getting avatar resource for user " + username, t);
}
return avatarFile;
}
/** Import organizations */
private void importOrganizations() throws Exception {
String[] organizationFileNames = importDir.list( new PrefixFileFilter( "organization." ) );
logger.info( "Organizations to read: " + organizationFileNames.length );
for ( String organizationFileName : organizationFileNames ) {
try {
importOrganization( organizationFileName );
}
catch ( Exception e ) {
logger.warn( "Unable to import organization:" + organizationFileName, e );
}
}
}
private void removeLogFileIfNeeded(int numOfLogFiles) throws IOException {
File dir = new File(logDir);
FilenameFilter filter = new PrefixFileFilter(logFileName);
List<File> files = Arrays.asList(dir.listFiles(filter));
Collections.sort(files, LastModifiedFileComparator.LASTMODIFIED_COMPARATOR);
if (files.size() > numOfLogFiles) {
for (int i = 0; i < files.size() - numOfLogFiles; ++i) {
FileUtils.forceDelete(files.get(i));
System.out.print(
String.format(
"Removed log file: %s.\n",
files.get(i).getPath()));
}
}
}
private boolean isTranscodingStepInstalled(String step) {
if (StringUtils.isEmpty(step)) {
return true;
}
String executable = StringUtil.split(step)[0];
PrefixFileFilter filter = new PrefixFileFilter(executable);
String[] matches = getTranscodeDirectory().list(filter);
return matches != null && matches.length > 0;
}
private boolean isTranscodingStepInstalled(String step) {
if (StringUtils.isEmpty(step)) {
return true;
}
String executable = StringUtil.split(step)[0];
PrefixFileFilter filter = new PrefixFileFilter(executable);
String[] matches = getTranscodeDirectory().list(filter);
return matches != null && matches.length > 0;
}
public static void reconstructLineBased(File partFolder,
File reconstructed, boolean removeHeaders) throws IOException {
Path tmpOut = Files.createTempFile(partFolder.toPath(), "reconstr",
".tmp");
BufferedOutputStream dstOut = new BufferedOutputStream(
new FileOutputStream(tmpOut.toFile()));
try {
if (!Files.isDirectory(partFolder.toPath()))
throw new IOException("Not a directory: " + partFolder);
File[] fileList = FileUtils.listFiles(partFolder,
new PrefixFileFilter("part"), TrueFileFilter.TRUE).toArray(
new File[0]);
Arrays.sort(fileList);
for (int i = 0; i < fileList.length; i++) {
if (fileList[i].canRead()) {
BufferedReader in = new BufferedReader(new FileReader(
fileList[i]));
try {
if (removeHeaders && i != 0)
in.readLine();
IOUtils.copy(in, dstOut);
} finally {
in.close();
}
}
}
} finally {
dstOut.close();
}
Files.move(tmpOut, reconstructed.toPath(),
StandardCopyOption.ATOMIC_MOVE,
StandardCopyOption.REPLACE_EXISTING);
FileUtils.deleteQuietly(tmpOut.toFile());
FileUtils.deleteQuietly(partFolder);
}
/**
* Method that to reconstruct part* files into a single file <BR>
* Suitable for line-based, CSV files.
*
* @param partFolder
* directory contatining partial files (part001,part002..)
* @param reconstructed
* file to which the output is written
* @throws IOException
*/
public static void reconstructLineBased(File partFolder,
File reconstructed, boolean removeHeaders) throws IOException {
Path tmpOut = Files.createTempFile(partFolder.toPath(), "reconstr",
".tmp");
BufferedOutputStream dstOut = new BufferedOutputStream(
new FileOutputStream(tmpOut.toFile()));
try {
if (!Files.isDirectory(partFolder.toPath()))
throw new IOException("Not a directory: " + partFolder);
File[] fileList = FileUtils.listFiles(partFolder,
new PrefixFileFilter("part"), TrueFileFilter.TRUE).toArray(
new File[0]);
Arrays.sort(fileList);
for (int i = 0; i < fileList.length; i++) {
if (fileList[i].canRead()) {
BufferedReader in = new BufferedReader(new FileReader(
fileList[i]));
try {
if (removeHeaders && i != 0)
in.readLine();
IOUtils.copy(in, dstOut);
} finally {
in.close();
}
}
}
} finally {
dstOut.close();
}
Files.move(tmpOut, reconstructed.toPath(),
StandardCopyOption.ATOMIC_MOVE,
StandardCopyOption.REPLACE_EXISTING);
FileUtils.deleteQuietly(tmpOut.toFile());
FileUtils.deleteQuietly(partFolder);
}
private void monitorAdvancedServiceConfigurations() {
final File[] advancedConfigurationFiles = new File(gatewayConfigurationDir).listFiles((FileFilter) new PrefixFileFilter(ADVANCED_CONFIGURATION_FILE_NAME_PREFIX));
if (advancedConfigurationFiles != null) {
for (File advancedConfigurationFile : advancedConfigurationFiles) {
monitorAdvancedServiceConfiguration(Paths.get(advancedConfigurationFile.getAbsolutePath()));
}
}
}
protected List<File> retrieveFilesToAggregate() {
File inputDirectory = new File(inputFilePath);
if (!inputDirectory.exists() || !inputDirectory.isDirectory()) {
throw new RuntimeException(inputFilePath + " does not exist or is not a directory.");
}
FileFilter filter = FileFilterUtils.andFileFilter(
new PrefixFileFilter(inputFilePrefix), new SuffixFileFilter(inputFileSuffix));
List<File> fileList = Arrays.asList(inputDirectory.listFiles(filter));
Collections.sort(fileList);
return fileList;
}
/**
* Return set of file user identifiers from a list of files
*
* @param user user who uploaded or will upload file
* @param files list of files objects
* @return Set containing all user identifiers from list of files
* @see org.kuali.kfs.sys.batch.BatchInputFileSetType#extractFileUserIdentifiers(org.kuali.rice.kim.api.identity.Person, java.util.List)
*/
public Set<String> extractFileUserIdentifiers(Person user, List<File> files) {
Set<String> extractedFileUserIdentifiers = new TreeSet<String>();
StringBuilder buf = new StringBuilder();
buf.append(FILE_NAME_PREFIX).append(FILE_NAME_PART_DELIMITER).append(user.getPrincipalName()).append(FILE_NAME_PART_DELIMITER);
String prefixString = buf.toString();
IOFileFilter prefixFilter = new PrefixFileFilter(prefixString);
IOFileFilter suffixFilter = new OrFileFilter(new SuffixFileFilter(EnterpriseFeederService.DATA_FILE_SUFFIX), new SuffixFileFilter(EnterpriseFeederService.RECON_FILE_SUFFIX));
IOFileFilter combinedFilter = new AndFileFilter(prefixFilter, suffixFilter);
for (File file : files) {
if (combinedFilter.accept(file)) {
String fileName = file.getName();
if (fileName.endsWith(EnterpriseFeederService.DATA_FILE_SUFFIX)) {
extractedFileUserIdentifiers.add(StringUtils.substringBetween(fileName, prefixString, EnterpriseFeederService.DATA_FILE_SUFFIX));
}
else if (fileName.endsWith(EnterpriseFeederService.RECON_FILE_SUFFIX)) {
extractedFileUserIdentifiers.add(StringUtils.substringBetween(fileName, prefixString, EnterpriseFeederService.RECON_FILE_SUFFIX));
}
else {
LOG.error("Unable to determine file user identifier for file name: " + fileName);
throw new RuntimeException("Unable to determine file user identifier for file name: " + fileName);
}
}
}
return extractedFileUserIdentifiers;
}
protected List<File> getReportsToAggregateIntoReport(String documentNumber) {
File inputDirectory = new File(temporaryReportsDirectory);
if (!inputDirectory.exists() || !inputDirectory.isDirectory()) {
LOG.error(temporaryReportsDirectory + " does not exist or is not a directory.");
throw new RuntimeException("Unable to locate temporary reports directory");
}
String filePrefix = documentNumber + "_" + temporaryReportFilenameComponent;
FileFilter filter = FileFilterUtils.andFileFilter(
new PrefixFileFilter(filePrefix), new SuffixFileFilter(temporaryReportFilenameSuffix));
// FSKD-244, KFSMI-5424 sort with filename, just in case
List<File> fileList = Arrays.asList(inputDirectory.listFiles(filter));
Comparator fileNameComparator = new Comparator() {
public int compare(Object obj1, Object obj2) {
if (obj1 == null) {
return -1;
}
if (obj2 == null) {
return 1;
}
File file1 = (File) obj1;
File file2 = (File) obj2;
return ((Comparable) file1.getName()).compareTo(file2.getName());
}
};
Collections.sort(fileList, fileNameComparator);
return fileList ;
}
/**
* Import admin users.
*/
private void importAdminUsers(int writeThreadCount, int auditThreadCount) throws Exception {
String[] fileNames = importDir.list(new PrefixFileFilter(ExportAdmins.ADMIN_USERS_PREFIX + "."));
logger.info( "Applications to read: " + fileNames.length );
for (String fileName : fileNames) {
try {
importAdminUsers(fileName, writeThreadCount, auditThreadCount);
} catch (Exception e) {
logger.warn("Unable to import application: " + fileName, e);
}
}
}
/**
* Import collections. Collections files are named: collections.<application_name>.Timestamp.json
*/
private void importMetadata(int writeThreadCount) throws Exception {
String[] fileNames = importDir.list(
new PrefixFileFilter( ExportAdmins.ADMIN_USER_METADATA_PREFIX + "." ) );
logger.info( "Metadata files to read: " + fileNames.length );
for (String fileName : fileNames) {
try {
importMetadata(fileName, writeThreadCount);
} catch (Exception e) {
logger.warn("Unable to import metadata file: " + fileName, e);
}
}
}
/** Import applications */
private void importApplications() throws Exception {
String[] nanemspaceFileNames = importDir.list( new PrefixFileFilter( "application." ) );
logger.info( "Applications to read: " + nanemspaceFileNames.length );
for ( String applicationName : nanemspaceFileNames ) {
try {
importApplication( applicationName );
}
catch ( Exception e ) {
logger.warn( "Unable to import application: " + applicationName, e );
}
}
}
/** Import collections. Collections files are named: collections.<application_name>.Timestamp.json */
private void importCollections() throws Exception {
String[] collectionsFileNames = importDir.list( new PrefixFileFilter( "collections." ) );
logger.info( "Collections to read: " + collectionsFileNames.length );
for ( String collectionName : collectionsFileNames ) {
try {
importCollection( collectionName );
}
catch ( Exception e ) {
logger.warn( "Unable to import collection: " + collectionName, e );
}
}
}
@Parameters(name = "{index}: running on file {0}")
public static Iterable<File> tsvFiles()
{
return asList(new File("src/test/resources/tsv3-suite/").listFiles(
(FilenameFilter) new PrefixFileFilter(asList("test", "issue", "sample"))));
}
@Parameters(name = "{index}: running on file {0}")
public static Iterable<File> tsvFiles()
{
return asList(new File("src/test/resources/tsv3-suite/").listFiles(
(FilenameFilter) new PrefixFileFilter(asList("test", "issue", "sample"))));
}