下面列出了org.apache.commons.io.FilenameUtils#isExtension ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
private static DynamicSettings readDynamicSettings(String configFile, Module... modules) {
ObjectMapper mapper;
if (FilenameUtils.isExtension(configFile.toLowerCase(Locale.ROOT), new String[]{"yml", "yaml"})) {
mapper = Yaml.mapper().copy();
} else {
mapper = Json.mapper().copy();
}
if (modules != null && modules.length > 0) {
mapper.registerModules(modules);
}
mapper.registerModule(new GuavaModule());
try {
return mapper.readValue(new File(configFile), DynamicSettings.class);
} catch (IOException ex) {
LOGGER.error(ex.getMessage());
throw new RuntimeException("Unable to deserialize config file: " + configFile);
}
}
/**
* Copy all entries that are a JAR file or a directory
*/
@edu.umd.cs.findbugs.annotations.SuppressFBWarnings (value="DMI_COLLECTION_OF_URLS", justification="All of type file")
private void copyValidClasspathEntries(Collection<URL> source, Set<URL> destination) {
String fileName;
boolean isJarFile;
boolean isDirectory;
for (URL url : source) {
if(destination.contains(url)) {
continue;
}
fileName = url.getFile();
isJarFile = FilenameUtils.isExtension(fileName, "jar");
isDirectory = new File(fileName).isDirectory();
if (isJarFile || isDirectory) {
destination.add(url);
} else if (logger.isDebugEnabled()) {
logger.debug("Ignored classpath entry: " + fileName);
}
}
}
private void initContext() {
if (session == null) {
String jar = "";
try {
jar =
KMeansRunner.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath();
if (!FilenameUtils.isExtension(jar.toLowerCase(), "jar")) {
jar = "";
}
} catch (final URISyntaxException e) {
LOGGER.error("Unable to set jar location in spark configuration", e);
}
session = GeoWaveSparkConf.createSessionFromParams(appName, master, host, jar);
jsc = JavaSparkContext.fromSparkContext(session.sparkContext());
}
}
private void exportSdk() {
Sdk sdk = getSelectedSdk();
if (sdk != null) {
String fname = SWTFactory.browseFile(this.getShell(), true, Messages.SDKsPreferencePage_SaveSdkAs, new String[]{"*.ini"}, null); //$NON-NLS-1$
if (fname != null) {
if (!FilenameUtils.isExtension(fname.toLowerCase(), "ini")) { //$NON-NLS-1$
fname = fname + ".ini"; //$NON-NLS-1$
}
if (new File(fname).exists()) {
if (!SWTFactory.YesNoQuestion(getShell(), Messages.SDKsPreferencePage_AskOverwriteTitle, String.format(Messages.SDKsPreferencePage_AskOverwriteQuestion, fname))) {
return;
}
}
SdkIniFileWriter.exportSdk(sdk, fname);
}
}
}
private void initContext() {
if (session == null) {
String jar = "";
try {
jar =
RasterTileResizeSparkRunner.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath();
if (!FilenameUtils.isExtension(jar.toLowerCase(), "jar")) {
jar = "";
}
} catch (final URISyntaxException e) {
LOGGER.error("Unable to set jar location in spark configuration", e);
}
session = GeoWaveSparkConf.createSessionFromParams(appName, master, host, jar);
jsc = JavaSparkContext.fromSparkContext(session.sparkContext());
}
}
private void initContext() {
if (session == null) {
String jar = "";
try {
jar =
SqlQueryRunner.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath();
if (!FilenameUtils.isExtension(jar.toLowerCase(), "jar")) {
jar = "";
}
} catch (final URISyntaxException e) {
LOGGER.error("Unable to set jar location in spark configuration", e);
}
session = GeoWaveSparkConf.createSessionFromParams(appName, master, host, jar);
}
}
@Override
public boolean accept(File file) {
return !file.isDirectory() && (
// checks that zip file has no json report file near it to avoid duplicating reports
(FilenameUtils.isExtension(file.getName(), ZipReport.ZIP_EXTENSION) && !file.getParentFile().toPath().resolve(ROOT_JSON_REPORT_FILE).toFile().exists())
|| (file.toPath().endsWith(ROOT_JSON_REPORT_FILE))
);
}
private TestScriptDescription getReport(File reportFile, Path relativeReportPath) {
String reportRootPath = reportFile.toString();
try {
if (FilenameUtils.isExtension(reportRootPath, ZipReport.ZIP_EXTENSION)) {
try (ZipFile zipFile = new ZipFile(reportFile)) {
String entryToExtract = FilenameUtils.removeExtension(reportFile.getName()) + "/" + ROOT_JSON_REPORT_ZIP_ENTRY;
logger.debug("Extract entry {}", entryToExtract);
ZipEntry zipJson = zipFile.getEntry(entryToExtract);
try (InputStream stream = zipFile.getInputStream(zipJson)) {
return convertToTestScriptDescription(FilenameUtils.removeExtension(relativeReportPath.toString()), jsonObjectMapper.readValue(stream, ReportRoot.class));
}
}
}
else if (FilenameUtils.isExtension(reportRootPath, JSON_EXTENSION)) {
try (InputStream stream = new FileInputStream(reportFile)) {
return convertToTestScriptDescription(relativeReportPath.getParent().getParent().toString(), jsonObjectMapper.readValue(stream, ReportRoot.class));
}
}
else {
throw new IllegalArgumentException(String.format("Unknown file extension '%s'", FilenameUtils.getExtension(reportRootPath)));
}
} catch (Exception e) {
logger.error(String.format("Unable to parse report '%s'", reportRootPath), e);
return null;
}
}
public boolean isImageFormat(String fileName) {
if (fileName != null && !fileName.isEmpty()) {
String[] arrImageFormat = appConfig.getStringArray("appSettings.imageFileFormats");
return FilenameUtils.isExtension(fileName.toLowerCase(), arrImageFormat);
}
return false;
}
@Override
public boolean accept(File pathname) {
if(!pathname.isFile() || (excludeStartingDot && pathname.getName().startsWith("."))){
return false;
}
if(caseSensitive) {
return FilenameUtils.isExtension(pathname.getName(), extension);
} else {
final String ext = FilenameUtils.getExtension(pathname.getName());
return extension.equalsIgnoreCase(ext);
}
}
public static List<String> fileList (String directory, String fileExt) {
List<String> fileNames = new ArrayList<>();
try (DirectoryStream<Path> directoryStream = Files.newDirectoryStream(Paths.get(directory))) {
for (Path path : directoryStream) {
if (FilenameUtils.isExtension(path.getFileName().toString(), fileExt)) {
fileNames.add(path.toString());
}
}
} catch (IOException ex) {}
return fileNames;
}
@Override
public boolean matches(Source src) {
String name = src.getOrigin().getName().toLowerCase();
String[] extensions = getInputFileExtensions();
if (extensions.length == 1) {
return FilenameUtils.isExtension(name, extensions[0]);
}
return FilenameUtils.isExtension(name, extensions);
}
public OutputResult getOutputResult(String userBaseDirectory, String outputDirectory) {
if (FilenameUtils.isExtension(outputDirectory, new String[]{"zip", "jar"})) {
return new ZipOutputResult(outputDirectory);
} else {
return new FolderOutputResult(newUserSource(userBaseDirectory), newGeneratedSource(outputDirectory), fileTracker);
}
}
public static Schema getNewestSchemaFromSource(Path sourceDir, FileSystem fs) throws IOException {
FileStatus[] files = fs.listStatus(sourceDir);
Arrays.sort(files, new LastModifiedDescComparator());
for (FileStatus status : files) {
if (status.isDirectory()) {
Schema schema = getNewestSchemaFromSource(status.getPath(), fs);
if (schema != null)
return schema;
} else if (FilenameUtils.isExtension(status.getPath().getName(), AVRO)) {
return AvroUtils.getSchemaFromDataFile(status.getPath(), fs);
}
}
return null;
}
@Override
public boolean matches(Source src) {
String name = src.getOrigin().getName().toLowerCase();
if (FilenameUtils.isExtension(name, new String[]{"markdown", "md"})) {
return true;
}
if ("post".equals(src.getMeta().get("layout")) && FilenameUtils.isExtension(name, "txt")) {
return true;
}
return false;
}
@Override
public boolean accept(File file) {
String name = file.getName();
return FilenameUtils.getBaseName(name).startsWith("config")
&& FilenameUtils.isExtension(name, ACCEPTABLE_EXTENSIONS)
&& file.canRead();
}
@Override public OutputPersisted<File> build(Input input) throws IOException {
require(input.inputFile);
if(input.origin != null) {
requireBuild(input.origin);
}
final Arguments args = new Arguments();
args.addAll(input.extraArgs);
for(File path : input.includePaths) {
require(path, FileExistsStamper.instance);
if(!path.exists()) {
continue;
}
if(FilenameUtils.isExtension(path.getName(), "def")) {
args.addFile("-Idef", path);
} else {
/*
* HACK: for full incremental soundness, a require on the directory is needed here, since new files can
* be added to the path, which influence pack-sdf. However, since the Spoofax build generates new files
* into some of these directories, that would cause the requirement to always be inconsistent, always
* triggering a rebuild. This is why we omit the requirement.
*
* seba: This could be solved using a customary stamper that only tracks files matching some naming
* convention.
*/
args.addFile("-I", path);
}
}
// @formatter:off
final Arguments arguments = new Arguments()
.addFile("-i", input.inputFile)
.addFile("-o", input.outputFile)
.addAll(args)
;
final ExecutionResult result = new StrategoExecutor()
.withToolsContext()
.withStrategy(main_pack_sdf_0_0.instance)
.withTracker(newResourceTracker(Pattern.quote(" including ") + ".*"))
.withName("pack-sdf")
.executeCLI(arguments)
;
// @formatter:on
provide(input.outputFile);
for(File required : extractRequiredPaths(result.errLog)) {
require(required);
}
setState(State.finished(result.success));
return OutputPersisted.of(input.outputFile);
}
@Override
public boolean accept(File pathname) {
return !FilenameUtils.isExtension(pathname.getName(), ZIP_EXTENSION);
}
private void readMPQs() {
if (fileTreeNodes == null) {
fileTreeNodes = new PatriciaTrie<>();
fileTreeCofNodes = new PatriciaTrie<>();
} else {
fileTreeNodes.clear();
fileTreeCofNodes.clear();
}
BufferedReader reader = null;
try {
//if (options_useExternalList.isChecked()) {
reader = Gdx.files.internal(ASSETS + "(listfile)").reader(4096);
//} else {
// try {
// reader = new BufferedReader(new InputStreamReader((new ByteArrayInputStream(mpq.readBytes("(listfile)")))));
// } catch (Throwable t) {
// reader = Gdx.files.internal(ASSETS + "(listfile)").reader(4096);
// }
//}
Node<Node, Object, Actor> root = new BaseNode(new VisLabel("root"));
final boolean checkExisting = options_checkExisting.isChecked();
String fileName;
while ((fileName = reader.readLine()) != null) {
if (checkExisting && !Riiablo.mpqs.contains(fileName)) {
continue;
}
String path = FilenameUtils.getPathNoEndSeparator(fileName).toLowerCase();
treeify(fileTreeNodes, root, path);
final MPQFileHandle handle = (MPQFileHandle) Riiablo.mpqs.resolve(fileName);
VisLabel label = new VisLabel(FilenameUtils.getName(fileName));
final Node node = new BaseNode(label);
node.setValue(handle);
label.addListener(new ClickListener(Input.Buttons.RIGHT) {
@Override
public void clicked(InputEvent event, float x, float y) {
showPopmenu(node, handle);
}
});
String key = fileName.toLowerCase();
fileTreeNodes.put(key, node);
if (FilenameUtils.isExtension(key, "cof")) {
key = FilenameUtils.getBaseName(key);
fileTreeCofNodes.put(key, node);
}
if (path.isEmpty()) {
root.add(node);
} else {
fileTreeNodes.get(path + "\\").add(node);
}
}
sort(root);
fileTree.clearChildren();
for (Node child : root.getChildren()) {
fileTree.add(child);
}
fileTree.layout();
fileTreeFilter.clearText();
} catch (IOException e) {
throw new GdxRuntimeException("Failed to read list file.", e);
} finally {
StreamUtils.closeQuietly(reader);
}
}
@Override
public void run() {
Configuration conf = HadoopUtils.getConfFromState(this.dataset.jobProps());
// Turn on mapreduce output compression by default
if (conf.get("mapreduce.output.fileoutputformat.compress") == null && conf.get("mapred.output.compress") == null) {
conf.setBoolean("mapreduce.output.fileoutputformat.compress", true);
}
// Disable delegation token cancellation by default
if (conf.get("mapreduce.job.complete.cancel.delegation.tokens") == null) {
conf.setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false);
}
try {
DateTime compactionTimestamp = getCompactionTimestamp();
LOG.info("MR Compaction Job Timestamp " + compactionTimestamp.getMillis());
if (this.dataset.jobProps().getPropAsBoolean(MRCompactor.COMPACTION_JOB_LATE_DATA_MOVEMENT_TASK, false)) {
List<Path> newLateFilePaths = Lists.newArrayList();
for (String filePathString : this.dataset.jobProps()
.getPropAsList(MRCompactor.COMPACTION_JOB_LATE_DATA_FILES)) {
if (FilenameUtils.isExtension(filePathString, getApplicableFileExtensions())) {
newLateFilePaths.add(new Path(filePathString));
}
}
Path lateDataOutputPath = this.outputDeduplicated ? this.dataset.outputLatePath() : this.dataset.outputPath();
LOG.info(String.format("Copying %d late data files to %s", newLateFilePaths.size(), lateDataOutputPath));
if (this.outputDeduplicated) {
if (!this.fs.exists(lateDataOutputPath)) {
if (!this.fs.mkdirs(lateDataOutputPath)) {
throw new RuntimeException(
String.format("Failed to create late data output directory: %s.", lateDataOutputPath.toString()));
}
}
}
this.copyDataFiles(lateDataOutputPath, newLateFilePaths);
if (this.outputDeduplicated) {
dataset.checkIfNeedToRecompact (datasetHelper);
}
this.status = Status.COMMITTED;
} else {
if (this.fs.exists(this.dataset.outputPath()) && !canOverwriteOutputDir()) {
LOG.warn(String.format("Output paths %s exists. Will not compact %s.", this.dataset.outputPath(),
this.dataset.inputPaths()));
this.status = Status.COMMITTED;
return;
}
addJars(conf);
Job job = Job.getInstance(conf);
this.configureJob(job);
this.submitAndWait(job);
if (shouldPublishData(compactionTimestamp)) {
// remove all invalid empty files due to speculative task execution
List<Path> goodPaths = CompactionJobConfigurator.getGoodFiles(job, this.dataset.outputTmpPath(), this.tmpFs,
ImmutableList.of("avro"));
if (!this.recompactAllData && this.recompactFromDestPaths) {
// append new files without deleting output directory
addGoodFilesToOutputPath(goodPaths);
// clean up late data from outputLateDirectory, which has been set to inputPath
deleteFilesByPaths(this.dataset.inputPaths());
} else {
moveTmpPathToOutputPath();
if (this.recompactFromDestPaths) {
deleteFilesByPaths(this.dataset.additionalInputPaths());
}
}
submitSlaEvent(job);
LOG.info("Successfully published data for input folder " + this.dataset.inputPaths());
this.status = Status.COMMITTED;
} else {
LOG.info("Data not published for input folder " + this.dataset.inputPaths() + " due to incompleteness");
this.status = Status.ABORTED;
return;
}
}
if (renameSourceDir) {
MRCompactor.renameSourceDirAsCompactionComplete (this.fs, this.dataset);
} else {
this.markOutputDirAsCompleted(compactionTimestamp);
}
this.submitRecordsCountsEvent();
} catch (Throwable t) {
throw Throwables.propagate(t);
}
}