org.apache.commons.io.FilenameUtils#isExtension ( )源码实例Demo

下面列出了org.apache.commons.io.FilenameUtils#isExtension ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: openapi-generator   文件: CodegenConfigurator.java
private static DynamicSettings readDynamicSettings(String configFile, Module... modules) {
    ObjectMapper mapper;

    if (FilenameUtils.isExtension(configFile.toLowerCase(Locale.ROOT), new String[]{"yml", "yaml"})) {
        mapper = Yaml.mapper().copy();
    } else {
        mapper = Json.mapper().copy();
    }

    if (modules != null && modules.length > 0) {
        mapper.registerModules(modules);
    }

    mapper.registerModule(new GuavaModule());

    try {
        return mapper.readValue(new File(configFile), DynamicSettings.class);
    } catch (IOException ex) {
        LOGGER.error(ex.getMessage());
        throw new RuntimeException("Unable to deserialize config file: " + configFile);
    }
}
 
源代码2 项目: butterfly   文件: ExtensionRegistry.java
/**
 * Copy all entries that are a JAR file or a directory
 */
@edu.umd.cs.findbugs.annotations.SuppressFBWarnings (value="DMI_COLLECTION_OF_URLS", justification="All of type file")
private void copyValidClasspathEntries(Collection<URL> source, Set<URL> destination) {
    String fileName;
    boolean isJarFile;
    boolean isDirectory;

    for (URL url : source) {
        if(destination.contains(url)) {
            continue;
        }

        fileName = url.getFile();
        isJarFile = FilenameUtils.isExtension(fileName, "jar");
        isDirectory = new File(fileName).isDirectory();

        if (isJarFile || isDirectory) {
            destination.add(url);
        } else if (logger.isDebugEnabled()) {
            logger.debug("Ignored classpath entry: " + fileName);
        }
    }
}
 
源代码3 项目: geowave   文件: KMeansRunner.java
private void initContext() {
  if (session == null) {
    String jar = "";
    try {
      jar =
          KMeansRunner.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath();
      if (!FilenameUtils.isExtension(jar.toLowerCase(), "jar")) {
        jar = "";
      }
    } catch (final URISyntaxException e) {
      LOGGER.error("Unable to set jar location in spark configuration", e);
    }

    session = GeoWaveSparkConf.createSessionFromParams(appName, master, host, jar);

    jsc = JavaSparkContext.fromSparkContext(session.sparkContext());
  }
}
 
源代码4 项目: xds-ide   文件: SDKsPreferencePage.java
private void exportSdk() {
    Sdk sdk = getSelectedSdk();
    if (sdk != null) {
        String fname = SWTFactory.browseFile(this.getShell(), true, Messages.SDKsPreferencePage_SaveSdkAs, new String[]{"*.ini"}, null);  //$NON-NLS-1$
        if (fname != null) {
            if (!FilenameUtils.isExtension(fname.toLowerCase(), "ini")) {  //$NON-NLS-1$
                fname = fname + ".ini";  //$NON-NLS-1$
            }
            if (new File(fname).exists()) {
                if (!SWTFactory.YesNoQuestion(getShell(), Messages.SDKsPreferencePage_AskOverwriteTitle, String.format(Messages.SDKsPreferencePage_AskOverwriteQuestion, fname))) {
                    return;
                }
            }
            SdkIniFileWriter.exportSdk(sdk, fname);
        }
    }
}
 
源代码5 项目: geowave   文件: RasterTileResizeSparkRunner.java
private void initContext() {
  if (session == null) {
    String jar = "";
    try {
      jar =
          RasterTileResizeSparkRunner.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath();
      if (!FilenameUtils.isExtension(jar.toLowerCase(), "jar")) {
        jar = "";
      }
    } catch (final URISyntaxException e) {
      LOGGER.error("Unable to set jar location in spark configuration", e);
    }

    session = GeoWaveSparkConf.createSessionFromParams(appName, master, host, jar);

    jsc = JavaSparkContext.fromSparkContext(session.sparkContext());
  }
}
 
源代码6 项目: geowave   文件: SqlQueryRunner.java
private void initContext() {
  if (session == null) {
    String jar = "";
    try {
      jar =
          SqlQueryRunner.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath();
      if (!FilenameUtils.isExtension(jar.toLowerCase(), "jar")) {
        jar = "";
      }
    } catch (final URISyntaxException e) {
      LOGGER.error("Unable to set jar location in spark configuration", e);
    }

    session = GeoWaveSparkConf.createSessionFromParams(appName, master, host, jar);
  }
}
 
源代码7 项目: sailfish-core   文件: ReportFilter.java
@Override
public boolean accept(File file) {

    return !file.isDirectory() && (
            // checks that zip file has no json report file near it to avoid duplicating reports
            (FilenameUtils.isExtension(file.getName(), ZipReport.ZIP_EXTENSION) && !file.getParentFile().toPath().resolve(ROOT_JSON_REPORT_FILE).toFile().exists())
            || (file.toPath().endsWith(ROOT_JSON_REPORT_FILE))
    );
}
 
源代码8 项目: sailfish-core   文件: DefaultTestScriptStorage.java
private TestScriptDescription getReport(File reportFile, Path relativeReportPath) {
    String reportRootPath = reportFile.toString();

    try {
        if (FilenameUtils.isExtension(reportRootPath, ZipReport.ZIP_EXTENSION)) {
            try (ZipFile zipFile = new ZipFile(reportFile)) {
                String entryToExtract = FilenameUtils.removeExtension(reportFile.getName()) + "/" + ROOT_JSON_REPORT_ZIP_ENTRY;
                logger.debug("Extract entry {}", entryToExtract);
                ZipEntry zipJson = zipFile.getEntry(entryToExtract);
                try (InputStream stream = zipFile.getInputStream(zipJson)) {
                    return convertToTestScriptDescription(FilenameUtils.removeExtension(relativeReportPath.toString()), jsonObjectMapper.readValue(stream, ReportRoot.class));
                }
            }
        }
        else if (FilenameUtils.isExtension(reportRootPath, JSON_EXTENSION)) {
            try (InputStream stream = new FileInputStream(reportFile)) {
                return convertToTestScriptDescription(relativeReportPath.getParent().getParent().toString(), jsonObjectMapper.readValue(stream, ReportRoot.class));
            }
        }
        else {
            throw new IllegalArgumentException(String.format("Unknown file extension '%s'", FilenameUtils.getExtension(reportRootPath)));
        }
    } catch (Exception e) {
        logger.error(String.format("Unable to parse report '%s'", reportRootPath), e);
        return null;
    }
}
 
源代码9 项目: email-mime-parser   文件: AppConfig.java
public boolean isImageFormat(String fileName) {
	if (fileName != null && !fileName.isEmpty()) {
		String[] arrImageFormat = appConfig.getStringArray("appSettings.imageFileFormats");
		return FilenameUtils.isExtension(fileName.toLowerCase(), arrImageFormat);
	}
	return false;
}
 
源代码10 项目: TranskribusCore   文件: ExtensionFileFilter.java
@Override
public boolean accept(File pathname) {
	if(!pathname.isFile() || (excludeStartingDot && pathname.getName().startsWith("."))){
		return false;
	}
	if(caseSensitive) {
		return FilenameUtils.isExtension(pathname.getName(), extension);
	} else {
		final String ext = FilenameUtils.getExtension(pathname.getName());
		return extension.equalsIgnoreCase(ext);
	}
}
 
源代码11 项目: owl2neo4j   文件: Owl2Neo4J.java
public static List<String> fileList (String directory, String fileExt) {
    List<String> fileNames = new ArrayList<>();
    try (DirectoryStream<Path> directoryStream = Files.newDirectoryStream(Paths.get(directory))) {
        for (Path path : directoryStream) {
            if (FilenameUtils.isExtension(path.getFileName().toString(), fileExt)) {
                fileNames.add(path.toString());
            }
        }
    } catch (IOException ex) {}
    return fileNames;
}
 
源代码12 项目: opoopress   文件: AbstractWikiTextConverter.java
@Override
public boolean matches(Source src) {
    String name = src.getOrigin().getName().toLowerCase();
    String[] extensions = getInputFileExtensions();
    if (extensions.length == 1) {
        return FilenameUtils.isExtension(name, extensions[0]);
    }

    return FilenameUtils.isExtension(name, extensions);
}
 
源代码13 项目: celerio   文件: OutputResultFactory.java
public OutputResult getOutputResult(String userBaseDirectory, String outputDirectory) {
    if (FilenameUtils.isExtension(outputDirectory, new String[]{"zip", "jar"})) {
        return new ZipOutputResult(outputDirectory);
    } else {
        return new FolderOutputResult(newUserSource(userBaseDirectory), newGeneratedSource(outputDirectory), fileTracker);
    }
}
 
public static Schema getNewestSchemaFromSource(Path sourceDir, FileSystem fs) throws IOException {
  FileStatus[] files = fs.listStatus(sourceDir);
  Arrays.sort(files, new LastModifiedDescComparator());
  for (FileStatus status : files) {
    if (status.isDirectory()) {
      Schema schema = getNewestSchemaFromSource(status.getPath(), fs);
      if (schema != null)
        return schema;
    } else if (FilenameUtils.isExtension(status.getPath().getName(), AVRO)) {
      return AvroUtils.getSchemaFromDataFile(status.getPath(), fs);
    }
  }
  return null;
}
 
源代码15 项目: opoopress   文件: TxtmarkMarkdownConverter.java
@Override
public boolean matches(Source src) {
    String name = src.getOrigin().getName().toLowerCase();
    if (FilenameUtils.isExtension(name, new String[]{"markdown", "md"})) {
        return true;
    }
    if ("post".equals(src.getMeta().get("layout")) && FilenameUtils.isExtension(name, "txt")) {
        return true;
    }
    return false;
}
 
源代码16 项目: opoopress   文件: SiteConfigImpl.java
@Override
public boolean accept(File file) {
    String name = file.getName();
    return FilenameUtils.getBaseName(name).startsWith("config")
            && FilenameUtils.isExtension(name, ACCEPTABLE_EXTENSIONS)
            && file.canRead();
}
 
源代码17 项目: spoofax   文件: PackSdfLegacy.java
@Override public OutputPersisted<File> build(Input input) throws IOException {
    require(input.inputFile);

    if(input.origin != null) {
        requireBuild(input.origin);
    }
    
    final Arguments args = new Arguments();
    args.addAll(input.extraArgs);
    for(File path : input.includePaths) {
        require(path, FileExistsStamper.instance);
        if(!path.exists()) {
            continue;
        }
        if(FilenameUtils.isExtension(path.getName(), "def")) {
            args.addFile("-Idef", path);
        } else {
            /*
             * HACK: for full incremental soundness, a require on the directory is needed here, since new files can
             * be added to the path, which influence pack-sdf. However, since the Spoofax build generates new files
             * into some of these directories, that would cause the requirement to always be inconsistent, always
             * triggering a rebuild. This is why we omit the requirement.
             * 
             * seba: This could be solved using a customary stamper that only tracks files matching some naming
             * convention.
             */
            args.addFile("-I", path);
        }
    }

    // @formatter:off
    final Arguments arguments = new Arguments()
        .addFile("-i", input.inputFile)
        .addFile("-o", input.outputFile)
        .addAll(args)
        ;
    
    final ExecutionResult result = new StrategoExecutor()
        .withToolsContext()
        .withStrategy(main_pack_sdf_0_0.instance)
        .withTracker(newResourceTracker(Pattern.quote("  including ") + ".*"))
        .withName("pack-sdf")
        .executeCLI(arguments)
        ;
    // @formatter:on 

    provide(input.outputFile);
    for(File required : extractRequiredPaths(result.errLog)) {
        require(required);
    }

    setState(State.finished(result.success));
    return OutputPersisted.of(input.outputFile);
}
 
源代码18 项目: sailfish-core   文件: ZipReport.java
@Override
public boolean accept(File pathname) {
    return !FilenameUtils.isExtension(pathname.getName(), ZIP_EXTENSION);
}
 
源代码19 项目: riiablo   文件: MPQViewer.java
private void readMPQs() {
  if (fileTreeNodes == null) {
    fileTreeNodes = new PatriciaTrie<>();
    fileTreeCofNodes = new PatriciaTrie<>();
  } else {
    fileTreeNodes.clear();
    fileTreeCofNodes.clear();
  }

  BufferedReader reader = null;
  try {
    //if (options_useExternalList.isChecked()) {
      reader = Gdx.files.internal(ASSETS + "(listfile)").reader(4096);
    //} else {
    //  try {
    //    reader = new BufferedReader(new InputStreamReader((new ByteArrayInputStream(mpq.readBytes("(listfile)")))));
    //  } catch (Throwable t) {
    //    reader = Gdx.files.internal(ASSETS + "(listfile)").reader(4096);
    //  }
    //}

    Node<Node, Object, Actor> root = new BaseNode(new VisLabel("root"));
    final boolean checkExisting = options_checkExisting.isChecked();

    String fileName;
    while ((fileName = reader.readLine()) != null) {
      if (checkExisting && !Riiablo.mpqs.contains(fileName)) {
        continue;
      }

      String path = FilenameUtils.getPathNoEndSeparator(fileName).toLowerCase();
      treeify(fileTreeNodes, root, path);

      final MPQFileHandle handle = (MPQFileHandle) Riiablo.mpqs.resolve(fileName);
      VisLabel label = new VisLabel(FilenameUtils.getName(fileName));
      final Node node = new BaseNode(label);
      node.setValue(handle);
      label.addListener(new ClickListener(Input.Buttons.RIGHT) {
        @Override
        public void clicked(InputEvent event, float x, float y) {
          showPopmenu(node, handle);
        }
      });

      String key = fileName.toLowerCase();
      fileTreeNodes.put(key, node);
      if (FilenameUtils.isExtension(key, "cof")) {
        key = FilenameUtils.getBaseName(key);
        fileTreeCofNodes.put(key, node);
      }
      if (path.isEmpty()) {
        root.add(node);
      } else {
        fileTreeNodes.get(path + "\\").add(node);
      }
    }

    sort(root);
    fileTree.clearChildren();
    for (Node child : root.getChildren()) {
      fileTree.add(child);
    }

    fileTree.layout();
    fileTreeFilter.clearText();
  } catch (IOException e) {
    throw new GdxRuntimeException("Failed to read list file.", e);
  } finally {
    StreamUtils.closeQuietly(reader);
  }
}
 
源代码20 项目: incubator-gobblin   文件: MRCompactorJobRunner.java
@Override
public void run() {
  Configuration conf = HadoopUtils.getConfFromState(this.dataset.jobProps());

  // Turn on mapreduce output compression by default
  if (conf.get("mapreduce.output.fileoutputformat.compress") == null && conf.get("mapred.output.compress") == null) {
    conf.setBoolean("mapreduce.output.fileoutputformat.compress", true);
  }

  // Disable delegation token cancellation by default
  if (conf.get("mapreduce.job.complete.cancel.delegation.tokens") == null) {
    conf.setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false);
  }

  try {
    DateTime compactionTimestamp = getCompactionTimestamp();
    LOG.info("MR Compaction Job Timestamp " + compactionTimestamp.getMillis());
    if (this.dataset.jobProps().getPropAsBoolean(MRCompactor.COMPACTION_JOB_LATE_DATA_MOVEMENT_TASK, false)) {
      List<Path> newLateFilePaths = Lists.newArrayList();
      for (String filePathString : this.dataset.jobProps()
          .getPropAsList(MRCompactor.COMPACTION_JOB_LATE_DATA_FILES)) {
        if (FilenameUtils.isExtension(filePathString, getApplicableFileExtensions())) {
          newLateFilePaths.add(new Path(filePathString));
        }
      }

      Path lateDataOutputPath = this.outputDeduplicated ? this.dataset.outputLatePath() : this.dataset.outputPath();
      LOG.info(String.format("Copying %d late data files to %s", newLateFilePaths.size(), lateDataOutputPath));
      if (this.outputDeduplicated) {
        if (!this.fs.exists(lateDataOutputPath)) {
          if (!this.fs.mkdirs(lateDataOutputPath)) {
            throw new RuntimeException(
                String.format("Failed to create late data output directory: %s.", lateDataOutputPath.toString()));
          }
        }
      }
      this.copyDataFiles(lateDataOutputPath, newLateFilePaths);
      if (this.outputDeduplicated) {
        dataset.checkIfNeedToRecompact (datasetHelper);
      }
      this.status = Status.COMMITTED;
    } else {
      if (this.fs.exists(this.dataset.outputPath()) && !canOverwriteOutputDir()) {
        LOG.warn(String.format("Output paths %s exists. Will not compact %s.", this.dataset.outputPath(),
            this.dataset.inputPaths()));
        this.status = Status.COMMITTED;
        return;
      }
      addJars(conf);
      Job job = Job.getInstance(conf);
      this.configureJob(job);
      this.submitAndWait(job);
      if (shouldPublishData(compactionTimestamp)) {
        // remove all invalid empty files due to speculative task execution
        List<Path> goodPaths = CompactionJobConfigurator.getGoodFiles(job, this.dataset.outputTmpPath(), this.tmpFs,
            ImmutableList.of("avro"));

        if (!this.recompactAllData && this.recompactFromDestPaths) {
          // append new files without deleting output directory
          addGoodFilesToOutputPath(goodPaths);
          // clean up late data from outputLateDirectory, which has been set to inputPath
          deleteFilesByPaths(this.dataset.inputPaths());
        } else {
          moveTmpPathToOutputPath();
          if (this.recompactFromDestPaths) {
            deleteFilesByPaths(this.dataset.additionalInputPaths());
          }
        }
        submitSlaEvent(job);
        LOG.info("Successfully published data for input folder " + this.dataset.inputPaths());
        this.status = Status.COMMITTED;
      } else {
        LOG.info("Data not published for input folder " + this.dataset.inputPaths() + " due to incompleteness");
        this.status = Status.ABORTED;
        return;
      }
    }
    if (renameSourceDir) {
      MRCompactor.renameSourceDirAsCompactionComplete (this.fs, this.dataset);
    } else {
      this.markOutputDirAsCompleted(compactionTimestamp);
    }
    this.submitRecordsCountsEvent();
  } catch (Throwable t) {
    throw Throwables.propagate(t);
  }
}