类org.apache.commons.io.LineIterator源码实例Demo

下面列出了怎么用org.apache.commons.io.LineIterator的API类实例代码及写法,或者点击链接到github查看源代码。

源代码1 项目: MergeProcessor   文件: SvnMergeTask.java
/**
 * Returns the package name by parsing the content of the class.
 * 
 * @param javaClassPath the path of the class to parse
 * @return the package name if it could be parsed from the content
 */
private static Optional<String> getPackageNameFromClassContent(final Path javaClassPath) {
	try {
		final LineIterator lineIterator = FileUtils.lineIterator(javaClassPath.toFile());
		while (lineIterator.hasNext()) {
			final String line = lineIterator.next();
			if (line.startsWith("package ")) {
				return Optional.of(line.substring(8, line.indexOf(';')));
			}
		}
		LogUtil.getLogger().warning("No Package could be parsed from the Java file content: " + javaClassPath);
	} catch (IOException e) {
		LogUtil.getLogger().log(Level.SEVERE,
				"An error occurred during parsing the Java file content: " + javaClassPath, e);
	}
	return Optional.empty();
}
 
源代码2 项目: common_gui_tools   文件: TextFileSplit.java
private String loopSplitFile(LineIterator lineIterator, int splitLength, String splitFileName, String targetEncoding) throws IOException {
    File splitFile = new File(splitResultDirectory + "/" + splitFileName);
    int lineCnt = 0;
    List<String> lines;
    List<String> writeLines = new ArrayList<String>();
    while ((lines = iteratorLine(lineIterator, 100, splitLength, lineCnt)).size() != 0) {
        lineCnt += lines.size();
        writeLines.addAll(lines);
        if (currentSplitType.equals(GuiUtils.SPLIT_TYPE_SIZE) && splitFile.getTotalSpace() >= splitLength * 1024 * 1024) {
            break;
        }
    }
    if (writeLines.size() != 0) {
        org.apache.commons.io.FileUtils.writeLines(splitFile, targetEncoding, writeLines, currentLineSeparator);
        return splitFile.getAbsolutePath();
    } else {
        return null;
    }
}
 
源代码3 项目: systemsgenetics   文件: DeconvolutionTest.java
@Test
public void writeDeconvolutionResultPredictedExpressionTest() throws Exception {
	String[] args = {"-o",outputDir+"deconvolutionPredictedExpression","-c",counts,
					 "-e",expression, "-g", genotypes,
					 "-sn", geneSnpList, "-oe"};
	commandLineOptions.parseCommandLine(args);
	Deconvolution deconvolution = new Deconvolution(commandLineOptions);
	deconvolution.readInputData();
	List<DeconvolutionResult> deconvolutionResults = deconvolution.runDeconPerGeneSnpPair();
	deconvolution.writeDeconvolutionResults(deconvolutionResults);

	LineIterator predictedResults = FileUtils.lineIterator(new File(outputDir+"deconvolutionPredictedExpression/predictedExpressionLevels.txt"), "UTF-8");
	LineIterator predictedExpected = FileUtils.lineIterator(new File("src/test/resources/expected_results/expectedPredictedExpressionLevels.txt"), "UTF-8");
	//test if header is same
	assertEquals("File header the same",predictedExpected.next(),predictedResults.next());
	while (predictedResults.hasNext() && predictedExpected.hasNext()){
		ArrayList<String> deconResultsStringVector = new ArrayList<String>(Arrays.asList(predictedResults.next().split("\t")));
		ArrayList<String> deconExpectedStringVector = new ArrayList<String>(Arrays.asList(predictedExpected.next().split("\t")));
		assertEquals("Predicted expression same as expected", deconExpectedStringVector, deconResultsStringVector);
		assertEquals("QTL name the same", deconExpectedStringVector.remove(0), deconResultsStringVector.remove(0));
	}
}
 
源代码4 项目: deeplearning4j   文件: SharedFlatMapPaths.java
@Override
public Iterator<R> call(Iterator<String> dataSetIterator) throws Exception {
    //Under some limited circumstances, we might have an empty partition. In this case, we should return immediately
    if(!dataSetIterator.hasNext()){
        return Collections.emptyIterator();
    }
    // here we'll be converting out Strings coming out of iterator to DataSets
    // PathSparkDataSetIterator does that for us
    //For better fault tolerance, we'll pull all paths to a local file. This way, if the Iterator<String> is backed
    // by a remote source that later goes down, we won't fail (as long as the source is still available)
    File f = SharedFlatMapPaths.toTempFile(dataSetIterator);

    LineIterator lineIter = new LineIterator(new FileReader(f));    //Buffered reader added automatically
    try {
        // iterator should be silently attached to VirtualDataSetIterator, and used appropriately
        SharedTrainingWrapper.getInstance(worker.getInstanceId()).attachDS(new PathSparkDataSetIterator(lineIter, loader, hadoopConfig));

        // first callee will become master, others will obey and die
        SharedTrainingResult result = SharedTrainingWrapper.getInstance(worker.getInstanceId()).run(worker);

        return Collections.singletonList((R) result).iterator();
    } finally {
        lineIter.close();
        f.delete();
    }
}
 
源代码5 项目: systemsgenetics   文件: DeconvolutionTest.java
@Test
public void deconvolutionResultRoundDosageTest() throws Exception {
	String[] args = {"-o",outputDir+"deconvolutionRoundedDosage","-c",counts,
					 "-e",expression, "-g", genotypes,
					 "-sn", geneSnpList, "-r"};
	commandLineOptions.parseCommandLine(args);
	Deconvolution deconvolution = new Deconvolution(commandLineOptions);
	deconvolution.readInputData();
	List<DeconvolutionResult> deconvolutionResults = deconvolution.runDeconPerGeneSnpPair();
	deconvolution.writeDeconvolutionResults(deconvolutionResults);

	LineIterator deconResults = FileUtils.lineIterator(new File(outputDir+"deconvolutionRoundedDosage/deconvolutionResults.csv"), "UTF-8");
	LineIterator deconExpected = FileUtils.lineIterator(new File("src/test/resources/expected_results/deconRoundDosageExpected.txt"), "UTF-8");
	//test if header is same
	assertEquals("File header the same",deconExpected.next(),deconResults.next());
	while (deconResults.hasNext() && deconExpected.hasNext()){
		ArrayList<String> deconResultsStringVector = new ArrayList<String>(Arrays.asList(deconResults.next().split("\t")));
		ArrayList<String> deconExpectedStringVector = new ArrayList<String>(Arrays.asList(deconExpected.next().split("\t")));
		assertEquals("Deconresult same as expected", deconExpectedStringVector, deconResultsStringVector);
		assertEquals("QTL name the same", deconExpectedStringVector.remove(0), deconResultsStringVector.remove(0));
	}
}
 
源代码6 项目: metadata-qa-marc   文件: CodeFileReader.java
public static List<Code> fileToCodeList(String fileName) {

    List<Code> codes = new ArrayList<>();
    try {
      LineIterator it = getLineIterator(fileName);
      while (it.hasNext()) {
        String line = it.nextLine();
        if (line.equals("") || line.startsWith("#"))
          continue;
        String[] parts = line.split(";", 2);
        codes.add((new Code(parts[0], parts[1])));
      }
    } catch (IOException e) {
      e.printStackTrace();
    }

    return codes;

  }
 
源代码7 项目: deeplearning4j   文件: ParagraphVectorsTest.java
public static SentenceIterator getIterator(boolean isIntegration, File file, int linesForUnitTest) throws IOException {
    if(isIntegration){
        return new BasicLineIterator(file);
    } else {
        List<String> lines = new ArrayList<>();
        try(InputStream is = new BufferedInputStream(new FileInputStream(file))){
            LineIterator lineIter = IOUtils.lineIterator(is, StandardCharsets.UTF_8);
            try{
                for( int i=0; i<linesForUnitTest && lineIter.hasNext(); i++ ){
                    lines.add(lineIter.next());
                }
            } finally {
                lineIter.close();
            }
        }

        return new CollectionSentenceIterator(lines);
    }
}
 
源代码8 项目: sonar-ruby-plugin   文件: ClassCountParser.java
public static int countClasses(File file) {
    int numClasses = 0;
    LineIterator iterator = null;
    try {
        iterator = FileUtils.lineIterator(file);

        while (iterator.hasNext()) {
            String line = iterator.nextLine();
            if (StringUtils.contains(line.trim(), "class ")) {
                numClasses++;
            }
        }
    } catch (IOException e) {
        LOG.error("Error determining class count for file " + file, e);
    } finally {
        LineIterator.closeQuietly(iterator);
    }

    return numClasses;
}
 
public InMemoryArchiveIdentificationService addMappingsFrom(File file)
{
    try (FileInputStream inputStream = new FileInputStream(file))
    {
        LineIterator iterator = IOUtils.lineIterator(inputStream, "UTF-8");
        int lineNumber = 0;
        while (iterator.hasNext())
        {
            lineNumber++;
            String line = iterator.next();
            if (line.startsWith("#") || line.trim().isEmpty())
                continue;
            String[] parts = StringUtils.split(line, ' ');
            if (parts.length < 2)
                throw new IllegalArgumentException("Expected 'SHA1 GROUP_ID:ARTIFACT_ID:[PACKAGING:[COORDINATE:]]VERSION', but was: [" + line
                            + "] in [" + file + "] at line [" + lineNumber + "]");

            addMapping(parts[0], parts[1]);
        }
    }
    catch (IOException e)
    {
        throw new WindupException("Failed to load SHA1 to " + Coordinate.class.getSimpleName() + " definitions from [" + file + "]", e);
    }
    return this;
}
 
源代码10 项目: cloudml   文件: DockerConnector.java
public void BuildImageFromDockerFile(String path, String tag){
    File baseDir = new File(path);

    InputStream response = dockerClient.buildImageCmd(baseDir)
            .withNoCache()
            .withTag(tag)
            .exec();

    StringWriter logwriter = new StringWriter();

    try {
        LineIterator itr = IOUtils.lineIterator(response, "UTF-8");
        while (itr.hasNext()) {
            String line = itr.next();
            logwriter.write(line);
            journal.log(Level.INFO,line);
        }
    } catch (IOException e) {
        e.printStackTrace();
    } finally {
        IOUtils.closeQuietly(response);
    }
}
 
源代码11 项目: windup   文件: SkippedArchives.java
/**
 * Load the given configuration file.
 */
public static void load(File file)
{
    try(FileInputStream inputStream = new FileInputStream(file))
    {
        LineIterator it = IOUtils.lineIterator(inputStream, "UTF-8");
        while (it.hasNext())
        {
            String line = it.next();
            if (!line.startsWith("#") && !line.trim().isEmpty())
            {
                add(line);
            }
        }
    }
    catch (Exception e)
    {
        throw new WindupException("Failed loading archive ignore patterns from [" + file.toString() + "]", e);
    }
}
 
源代码12 项目: sawmill   文件: SawmillBenchmark.java
private Iterator<Doc> extractDocs(File file) {
    List<Doc> docs = new ArrayList<>();
    try {
        LineIterator lineIterator = FileUtils.lineIterator(file, "UTF-8");
        while (lineIterator.hasNext()) {
            String line = lineIterator.next();
            if (!line.isEmpty()) {
                docs.add(new Doc(JsonUtils.fromJsonString(Map.class, line)));
            }
        }

    } catch (Exception e) {
        throw new RuntimeException("failed to extract docs from file [" + file + "]", e);
    }

    return Iterables.cycle(docs).iterator();
}
 
源代码13 项目: dkpro-c4corpus   文件: StatisticsTableCreator.java
public static Table<String, String, Long> loadTable(InputStream stream)
        throws IOException
{
    Table<String, String, Long> result = TreeBasedTable.create();

    LineIterator lineIterator = IOUtils.lineIterator(stream, "utf-8");
    while (lineIterator.hasNext()) {
        String line = lineIterator.next();

        System.out.println(line);

        String[] split = line.split("\t");
        String language = split[0];
        String license = split[1];
        Long documents = Long.valueOf(split[2]);
        Long tokens = Long.valueOf(split[3]);

        result.put(language, "docs " + license, documents);
        result.put(language, "tokens " + license, tokens);
    }

    return result;
}
 
源代码14 项目: dkpro-c4corpus   文件: TopNWordsCorrelation.java
public static LinkedHashMap<String, Integer> loadCorpusToRankedVocabulary(InputStream corpus)
        throws IOException
{
    LinkedHashMap<String, Integer> result = new LinkedHashMap<>();

    LineIterator lineIterator = IOUtils.lineIterator(corpus, "utf-8");
    int counter = 0;
    while (lineIterator.hasNext()) {
        String line = lineIterator.next();

        String word = line.split("\\s+")[0];

        result.put(word, counter);
        counter++;
    }

    return result;
}
 
源代码15 项目: deeplearning4j   文件: Word2VecTests.java
public static List<String> firstNLines(File f, int n){
    List<String> lines = new ArrayList<>();
    try(InputStream is = new BufferedInputStream(new FileInputStream(f))){
        LineIterator lineIter = IOUtils.lineIterator(is, StandardCharsets.UTF_8);
        try{
            for( int i=0; i<n && lineIter.hasNext(); i++ ){
                lines.add(lineIter.next());
            }
        } finally {
            lineIter.close();
        }
        return lines;
    } catch (IOException e){
        throw new RuntimeException(e);
    }
}
 
源代码16 项目: entity-fishing   文件: WikipediaDomainMap.java
/**
 * Import the GRISP general domains
 */
private void importDomains() throws IOException {
    domain2id = new HashMap<String, Integer>();
    id2domain = new HashMap<Integer, String>();

    LineIterator domainIterator = FileUtils.lineIterator(new File(grispDomains));
    int n = 0;
    while (domainIterator.hasNext()) {
        String line = domainIterator.next();
        final String domain = line.replace('\t', ' ').trim();
        domain2id.put(domain, new Integer(n));
        id2domain.put(new Integer(n), domain);
        n++;
    }
    LineIterator.closeQuietly(domainIterator);
}
 
源代码17 项目: modernmt   文件: ChineseCharacterConverter.java
private static Map<Integer, Integer> loadDictionary(String filename) {
    HashMap<Integer, Integer> result = new HashMap<>();

    InputStream stream = null;
    LineIterator iterator = null;

    try {
        stream = ChineseCharacterConverter.class.getResourceAsStream(filename);
        iterator = IOUtils.lineIterator(stream, "UTF-8");
        while (iterator.hasNext()) {
            String line = iterator.nextLine();
            String[] keyValues = line.split("\t", 2);
            Integer key = keyValues[0].codePointAt(0);
            Integer value = keyValues[1].codePointAt(0);
            result.put(key, value);
        }

        return result;
    } catch (IOException e) {
        throw new Error(e);
    } finally {
        IOUtils.closeQuietly(stream);
        if (iterator != null)
            iterator.close();
    }
}
 
源代码18 项目: modernmt   文件: StatisticalChineseAnnotator.java
private static Dictionary load(String filename) throws IOException {
    InputStream stream = null;

    try {
        HashSet<String> words = new HashSet<>(21000);
        int maxLength = 0;

        stream = StatisticalChineseAnnotator.class.getResourceAsStream(filename);

        LineIterator lines = IOUtils.lineIterator(stream, Charset.forName("UTF-8"));
        while (lines.hasNext()) {
            String line = lines.nextLine();

            words.add(line);
            maxLength = Math.max(maxLength, line.length());
        }

        return new Dictionary(maxLength, words);
    } finally {
        IOUtils.closeQuietly(stream);
    }
}
 
源代码19 项目: modernmt   文件: EmbeddedCassandra.java
private void waitForStartupCompleted() throws IOException {
    for (int i = 0; i < 100; i++) {
        if (!this.process.isAlive())
            throw new IOException("Unable to start Cassandra process, more details here: " + this.logFile.getAbsolutePath());

        LineIterator lines = FileUtils.lineIterator(this.logFile, UTF8Charset.get().name());

        while (lines.hasNext()) {
            String line = lines.next();

            if (line.contains("Starting listening for CQL clients"))
                return;
        }

        try {
            Thread.sleep(1000);
        } catch (InterruptedException e) {
            throw new IOException("Unexpected interruption", e);
        }
    }

    throw new IOException("Cassandra process startup timeout, more details here: " + this.logFile.getAbsolutePath());
}
 
源代码20 项目: deeplearning4j   文件: SharedFlatMapPathsMDS.java
@Override
public Iterator<R> call(Iterator<String> dataSetIterator) throws Exception {
    //Under some limited circumstances, we might have an empty partition. In this case, we should return immediately
    if(!dataSetIterator.hasNext()){
        return Collections.emptyIterator();
    }
    // here we'll be converting out Strings coming out of iterator to DataSets
    // PathSparkDataSetIterator does that for us
    //For better fault tolerance, we'll pull all paths to a local file. This way, if the Iterator<String> is backed
    // by a remote source that later goes down, we won't fail (as long as the source is still available)
    File f = SharedFlatMapPaths.toTempFile(dataSetIterator);

    LineIterator lineIter = new LineIterator(new FileReader(f));    //Buffered reader added automatically
    try {
        // iterator should be silently attached to VirtualDataSetIterator, and used appropriately
        SharedTrainingWrapper.getInstance(worker.getInstanceId()).attachMDS(new PathSparkMultiDataSetIterator(lineIter, loader, hadoopConfig));

        // first callee will become master, others will obey and die
        SharedTrainingResult result = SharedTrainingWrapper.getInstance(worker.getInstanceId()).run(worker);

        return Collections.singletonList((R) result).iterator();
    } finally {
        lineIter.close();
        f.delete();
    }
}
 
源代码21 项目: webanno   文件: MiraAutomationServiceImpl.java
/**
 * Check if a TAB-Sep training file is in correct format before importing
 */
private boolean isTabSepFileFormatCorrect(File aFile)
{
    try {
        LineIterator it = new LineIterator(new FileReader(aFile));
        while (it.hasNext()) {
            String line = it.next();
            if (line.trim().length() == 0) {
                continue;
            }
            if (line.split("\t").length != 2) {
                return false;
            }
        }
    }
    catch (Exception e) {
        return false;
    }
    return true;
}
 
源代码22 项目: hiped2   文件: Main.java
public static void createInputFile(Configuration conf, Path file, Path targetFile,
                                   String startNode)
    throws IOException {
  FileSystem fs = file.getFileSystem(conf);

  OutputStream os = fs.create(targetFile);
  LineIterator iter = org.apache.commons.io.IOUtils
      .lineIterator(fs.open(file), "UTF8");
  while (iter.hasNext()) {
    String line = iter.nextLine();

    String[] parts = StringUtils.split(line);
    int distance = Node.INFINITE;
    if (startNode.equals(parts[0])) {
      distance = 0;
    }
    IOUtils.write(parts[0] + '\t' + String.valueOf(distance) + "\t\t",
        os);
    IOUtils.write(StringUtils.join(parts, '\t', 1, parts.length), os);
    IOUtils.write("\n", os);
  }

  os.close();
}
 
源代码23 项目: deeplearning4j   文件: TestStrumpf.java
@Test
public void testResolvingActual() throws Exception {
    File f = Resources.asFile("data/irisSvmLight.txt");
    assertTrue(f.exists());

    //System.out.println(f.getAbsolutePath());
    int count = 0;
    try(Reader r = new BufferedReader(new FileReader(f))){
        LineIterator iter = IOUtils.lineIterator(r);
        while(iter.hasNext()){
            String line = iter.next();
            //System.out.println("LINE " + i + ": " + line);
            count++;
        }
    }

    assertEquals(12, count);        //Iris normally has 150 examples; this is subset with 12
}
 
源代码24 项目: api-mining   文件: FrequentSequenceMiner.java
/** Read in frequent sequences (sorted by support) */
public static SortedMap<Sequence, Integer> readFrequentSequences(final File output) throws IOException {
	final HashMap<Sequence, Integer> sequences = new HashMap<>();

	final LineIterator it = FileUtils.lineIterator(output);
	while (it.hasNext()) {
		final String line = it.nextLine();
		if (!line.trim().isEmpty()) {
			final String[] splitLine = line.split("#SUP:");
			final String[] items = splitLine[0].trim().split("-1");
			final Sequence seq = new Sequence();
			for (final String item : items)
				seq.add(Integer.parseInt(item.trim()));
			final int supp = Integer.parseInt(splitLine[1].trim());
			sequences.put(seq, supp);
		}
	}
	// Sort sequences by support
	final Ordering<Sequence> comparator = Ordering.natural().reverse().onResultOf(Functions.forMap(sequences))
			.compound(Ordering.usingToString());
	return ImmutableSortedMap.copyOf(sequences, comparator);
}
 
源代码25 项目: logstash   文件: ResponseCollector.java
public static String collectResponse(InputStream response) {
    StringWriter logwriter = new StringWriter();

    try {
        LineIterator itr = IOUtils.lineIterator(response, "UTF-8");

        while (itr.hasNext()) {
            String line = (String) itr.next();
            logwriter.write(line + (itr.hasNext() ? "\n" : ""));
        }
        response.close();

        return logwriter.toString();
    } catch (IOException e) {
        throw new RuntimeException(e);
    } finally {
        IOUtils.closeQuietly(response);
    }
}
 
源代码26 项目: CratesPlus   文件: CratesPlus.java
public String uploadFile(String fileName) {
    File file = new File(getDataFolder(), fileName);
    if (!file.exists())
        return null;
    LineIterator it;
    String lines = "";
    try {
        it = FileUtils.lineIterator(file, "UTF-8");
        try {
            while (it.hasNext()) {
                String line = it.nextLine();
                lines += line + "\n";
            }
        } finally {
            it.close();
        }
    } catch (IOException e) {
        e.printStackTrace();
    }
    return MCDebug.paste(fileName, lines);
}
 
/** Read in SQS sequences (sorted by worth) */
public static LinkedHashMap<Sequence, Double> readSQSSequences(final File output) throws IOException {
	final LinkedHashMap<Sequence, Double> sequences = new LinkedHashMap<>();

	final LineIterator it = FileUtils.lineIterator(output);
	while (it.hasNext()) {
		final String line = it.nextLine();
		if (!line.trim().isEmpty()) {
			final String[] splitLine = line.split("  ");
			final String[] items = splitLine[0].split(" ");
			final Sequence seq = new Sequence();
			for (final String item : items)
				seq.add(Integer.parseInt(item));
			final double worth = Double.parseDouble(splitLine[1].split(" ")[1]);
			sequences.put(seq, worth);
		}
	}

	return sequences;
}
 
/** Read in GoKrimp sequences (sorted by compression benefit) */
public static LinkedHashMap<Sequence, Double> readGoKrimpSequences(final File output) throws IOException {
	final LinkedHashMap<Sequence, Double> sequences = new LinkedHashMap<>();

	final LineIterator it = FileUtils.lineIterator(output);
	while (it.hasNext()) {
		final String line = it.nextLine();
		if (!line.trim().isEmpty() && line.charAt(0) == '[') {
			final String[] splitLine = line.split(" ");
			final double worth = Double.parseDouble(splitLine[splitLine.length - 1]);
			final Sequence seq = new Sequence();
			for (int i = 1; i < splitLine.length - 2; i++)
				seq.add(Integer.parseInt(splitLine[i]));
			sequences.put(seq, worth);
		}
	}

	return sequences;
}
 
/**
 * Read in GOKRIMP sequences (sorted by compression benefit)
 *
 * @deprecated gives slightly different results to reference implementation
 */
@Deprecated
public static LinkedHashMap<Sequence, Double> readGoKrimpSequencesSPMF(final File output) throws IOException {
	final LinkedHashMap<Sequence, Double> sequences = new LinkedHashMap<>();

	final LineIterator it = FileUtils.lineIterator(output);
	while (it.hasNext()) {
		final String line = it.nextLine();
		if (!line.trim().isEmpty()) {
			final String[] splitLine = line.split("#SUP:");
			final String[] items = splitLine[0].trim().split(" ");
			final Sequence seq = new Sequence();
			for (final String item : items)
				seq.add(Integer.parseInt(item.trim()));
			final double compressionBenefit = Double.parseDouble(splitLine[1].trim());
			sequences.put(seq, compressionBenefit);
		}
	}

	return sequences;
}
 
源代码30 项目: sequence-mining   文件: FrequentSequenceMining.java
/** Read in frequent sequences (sorted by support) */
public static SortedMap<Sequence, Integer> readFrequentSequences(final File output) throws IOException {
	final HashMap<Sequence, Integer> sequences = new HashMap<>();

	final LineIterator it = FileUtils.lineIterator(output);
	while (it.hasNext()) {
		final String line = it.nextLine();
		if (!line.trim().isEmpty()) {
			final String[] splitLine = line.split("#SUP:");
			final String[] items = splitLine[0].trim().split("-1");
			final Sequence seq = new Sequence();
			for (final String item : items)
				seq.add(Integer.parseInt(item.trim()));
			final int supp = Integer.parseInt(splitLine[1].trim());
			sequences.put(seq, supp);
		}
	}
	// Sort sequences by support
	final Ordering<Sequence> comparator = Ordering.natural().reverse().onResultOf(Functions.forMap(sequences))
			.compound(Ordering.usingToString());
	return ImmutableSortedMap.copyOf(sequences, comparator);
}
 
 类所在包
 同包方法