com.google.common.collect.LinkedHashMultiset#create ( )源码实例Demo

下面列出了com.google.common.collect.LinkedHashMultiset#create ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: businessworks   文件: WeakKeySet.java
public void add(Key<?> key, State state, Object source) {
  if (backingMap == null) {
    backingMap = Maps.newHashMap();
  }
  // if it's an instanceof Class, it was a JIT binding, which we don't
  // want to retain.
  if (source instanceof Class || source == SourceProvider.UNKNOWN_SOURCE) {
    source = null;
  }
  Multiset<Object> sources = backingMap.get(key);
  if (sources == null) {
    sources = LinkedHashMultiset.create();
    backingMap.put(key, sources);
  }
  Object convertedSource = Errors.convert(source);
  sources.add(convertedSource);

  // Avoid all the extra work if we can.
  if (state.parent() != State.NONE) {
    Set<KeyAndSource> keyAndSources = evictionCache.getIfPresent(state);
    if (keyAndSources == null) {
      evictionCache.put(state, keyAndSources = Sets.newHashSet());
    }
    keyAndSources.add(new KeyAndSource(key, convertedSource));
  }
}
 
源代码2 项目: xtext-extras   文件: TypeConformanceComputer.java
protected LightweightTypeReference doGetCommonSuperType(List<LightweightTypeReference> types) {
	LightweightTypeReference firstType = types.get(0);
	final List<LightweightTypeReference> tail = types.subList(1, types.size());
	// mapping from rawtype to resolved parameterized types
	// used to determine the correct type arguments
	Multimap<JvmType, LightweightTypeReference> all = LinkedHashMultimap.create();
	// cumulated rawtype to max distance (used for sorting)
	Multiset<JvmType> cumulatedDistance = LinkedHashMultiset.create();
	
	initializeDistance(firstType, all, cumulatedDistance);
	cumulateDistance(tail, all, cumulatedDistance);
	
	List<Entry<JvmType>> candidates = Lists.newArrayList(cumulatedDistance.entrySet());
	if (candidates.size() == 1) { // only one super type -> should be java.lang.Object
		JvmType firstRawType = candidates.get(0).getElement();
		return getFirstForRawType(all, firstRawType);
	}
	inplaceSortByDistanceAndName(candidates);
	List<LightweightTypeReference> referencesWithSameDistance = getMostSpecialCandidates(types, all, candidates);
	return wrapInCompoundTypeIfNecessary(referencesWithSameDistance);
}
 
public static void main(String[] args) {
    // Parse text to separate words
    String INPUT_TEXT = "Hello World! Hello All! Hi World!";
    // Create Multiset
    Multiset<String> multiset = LinkedHashMultiset.create(Arrays.asList(INPUT_TEXT.split(" ")));

    // Print count words
    System.out.println(multiset); // print [Hello x 2, World! x 2, All!, Hi]- in predictable iteration order
    // Print all unique words
    System.out.println(multiset.elementSet());    // print [Hello, World!, All!, Hi] - in predictable iteration order

    // Print count occurrences of words
    System.out.println("Hello = " + multiset.count("Hello"));    // print 2
    System.out.println("World = " + multiset.count("World!"));    // print 2
    System.out.println("All = " + multiset.count("All!"));    // print 1
    System.out.println("Hi = " + multiset.count("Hi"));    // print 1
    System.out.println("Empty = " + multiset.count("Empty"));    // print 0

    // Print count all words
    System.out.println(multiset.size());    //print 6

    // Print count unique words
    System.out.println(multiset.elementSet().size());    //print 4
}
 
public static void main(String[] args) {
    // Разберем текст на слова
    String INPUT_TEXT = "Hello World! Hello All! Hi World!";
    // Создаем Multiset
    Multiset<String> multiset = LinkedHashMultiset.create(Arrays.asList(INPUT_TEXT.split(" ")));

    // Выводим кол-вом вхождений слов
    System.out.println(multiset); // напечатает [Hello x 2, World! x 2, All!, Hi]- в порядке первого добавления элемента
    // Выводим все уникальные слова
    System.out.println(multiset.elementSet());    // напечатает [Hello, World!, All!, Hi] - в порядке первого добавления элемента

    // Выводим количество по каждому слову
    System.out.println("Hello = " + multiset.count("Hello"));    // напечатает 2
    System.out.println("World = " + multiset.count("World!"));    // напечатает 2
    System.out.println("All = " + multiset.count("All!"));    // напечатает 1
    System.out.println("Hi = " + multiset.count("Hi"));    // напечатает 1
    System.out.println("Empty = " + multiset.count("Empty"));    // напечатает 0

    // Выводим общее количества всех слов в тексте
    System.out.println(multiset.size());    //напечатает 6

    // Выводим общее количество всех уникальных слов
    System.out.println(multiset.elementSet().size());    //напечатает 4
}
 
源代码5 项目: batfish   文件: TracerouteAnswerer.java
public static Multiset<Row> flowTracesToRows(
    SortedMap<Flow, List<Trace>> flowTraces, int maxTraces) {
  Multiset<Row> rows = LinkedHashMultiset.create();
  for (Map.Entry<Flow, List<Trace>> flowTrace : flowTraces.entrySet()) {
    List<Trace> traces = flowTrace.getValue();
    List<Trace> prunedTraces = TracePruner.prune(traces, maxTraces);
    rows.add(
        Row.of(
            COL_FLOW,
            flowTrace.getKey(),
            COL_TRACES,
            prunedTraces,
            COL_TRACE_COUNT,
            traces.size()));
  }
  return rows;
}
 
源代码6 项目: batfish   文件: TracerouteAnswerer.java
public static Multiset<Row> diffFlowTracesToRows(
    Map<Flow, List<Trace>> baseFlowTraces,
    Map<Flow, List<Trace>> deltaFlowTraces,
    int maxTraces) {
  Multiset<Row> rows = LinkedHashMultiset.create();
  checkArgument(
      baseFlowTraces.keySet().equals(deltaFlowTraces.keySet()),
      "Base and delta flow traces should have same flows");
  for (Flow flow : baseFlowTraces.keySet()) {
    rows.add(
        Row.of(
            COL_FLOW,
            flow,
            TableDiff.baseColumnName(COL_TRACES),
            TracePruner.prune(baseFlowTraces.get(flow), maxTraces),
            TableDiff.baseColumnName(COL_TRACE_COUNT),
            baseFlowTraces.get(flow).size(),
            TableDiff.deltaColumnName(COL_TRACES),
            TracePruner.prune(deltaFlowTraces.get(flow), maxTraces),
            TableDiff.deltaColumnName(COL_TRACE_COUNT),
            deltaFlowTraces.get(flow).size()));
  }
  return rows;
}
 
源代码7 项目: gwt-jackson   文件: MultisetGwtTest.java
public void testSerialization() {
    BeanWithMultisetTypes bean = new BeanWithMultisetTypes();

    List<String> list = Arrays.asList( "foo", "abc", null, "abc" );
    List<String> listWithNonNull = Arrays.asList( "foo", "abc", "bar", "abc" );

    bean.multiset = LinkedHashMultiset.create( list );
    bean.hashMultiset = HashMultiset.create( Arrays.asList( "abc", "abc" ) );
    bean.linkedHashMultiset = LinkedHashMultiset.create( list );
    bean.sortedMultiset = TreeMultiset.create( listWithNonNull );
    bean.treeMultiset = TreeMultiset.create( listWithNonNull );
    bean.immutableMultiset = ImmutableMultiset.copyOf( listWithNonNull );
    bean.enumMultiset = EnumMultiset.create( Arrays.asList( AlphaEnum.B, AlphaEnum.A, AlphaEnum.D, AlphaEnum.A ) );

    String expected = "{" +
            "\"multiset\":[\"foo\",\"abc\",\"abc\",null]," +
            "\"hashMultiset\":[\"abc\",\"abc\"]," +
            "\"linkedHashMultiset\":[\"foo\",\"abc\",\"abc\",null]," +
            "\"sortedMultiset\":[\"abc\",\"abc\",\"bar\",\"foo\"]," +
            "\"treeMultiset\":[\"abc\",\"abc\",\"bar\",\"foo\"]," +
            "\"immutableMultiset\":[\"foo\",\"abc\",\"abc\",\"bar\"]," +
            "\"enumMultiset\":[\"A\",\"A\",\"B\",\"D\"]" +
            "}";

    assertEquals( expected, BeanWithMultisetTypesMapper.INSTANCE.write( bean ) );
}
 
源代码8 项目: xtext-extras   文件: TypeConformanceComputer.java
/**
 * Keeps the cumulated distance for all the common raw super types of the given references.
 * Interfaces that are more directly implemented will get a lower total count than more general
 * interfaces.
 */
protected void cumulateDistance(final List<LightweightTypeReference> references, Multimap<JvmType, LightweightTypeReference> all,
		Multiset<JvmType> cumulatedDistance) {
	for(LightweightTypeReference other: references) {
		Multiset<JvmType> otherDistance = LinkedHashMultiset.create();
		initializeDistance(other, all, otherDistance);
		cumulatedDistance.retainAll(otherDistance);
		for(Multiset.Entry<JvmType> typeToDistance: otherDistance.entrySet()) {
			if (cumulatedDistance.contains(typeToDistance.getElement()))
				cumulatedDistance.add(typeToDistance.getElement(), typeToDistance.getCount());
		}
	}
}
 
源代码9 项目: registry   文件: Schema.java
private static Multiset<Field> parseArray(List<Object> array) throws ParserException {
    Multiset<Field> members = LinkedHashMultiset.create();
    for(Object member: array) {
        members.add(parseField(null, member));
    }
    return members;
}
 
@Override
public TableAnswerElement answer(NetworkSnapshot snapshot) {
  UndefinedReferencesQuestion question = (UndefinedReferencesQuestion) _question;

  // Find all the filenames that produced the queried nodes. This might have false positives if
  // a file produced multiple nodes, but that was already mis-handled before. Need to rewrite
  // this question as a TableAnswerElement.
  Set<String> includeNodes =
      question.getNodeSpecifier().resolve(_batfish.specifierContext(snapshot));
  Multimap<String, String> hostnameFilenameMap =
      _batfish.loadParseVendorConfigurationAnswerElement(snapshot).getFileMap();
  Set<String> includeFiles =
      hostnameFilenameMap.entries().stream()
          .filter(e -> includeNodes.contains(e.getKey()))
          .map(Entry::getValue)
          .collect(Collectors.toSet());

  Multiset<Row> rows = LinkedHashMultiset.create();
  SortedMap<String, SortedMap<String, SortedMap<String, SortedMap<String, SortedSet<Integer>>>>>
      undefinedReferences =
          _batfish
              .loadConvertConfigurationAnswerElementOrReparse(snapshot)
              .getUndefinedReferences();
  undefinedReferences.entrySet().stream()
      .filter(e -> includeFiles.contains(e.getKey()))
      .forEach(e -> rows.addAll(processEntryToRows(e)));

  TableAnswerElement table = new TableAnswerElement(createMetadata());
  table.postProcessAnswer(_question, rows);
  return table;
}
 
源代码11 项目: batfish   文件: UnusedStructuresQuestionPlugin.java
@Override
public TableAnswerElement answer(NetworkSnapshot snapshot) {
  UnusedStructuresQuestion question = (UnusedStructuresQuestion) _question;

  // Find all the filenames that produced the queried nodes. This might have false positives if
  // a file produced multiple nodes, but that was already mis-handled before. Need to rewrite
  // this question as a TableAnswerElement.
  Set<String> includeNodes =
      question.getNodeSpecifier().resolve(_batfish.specifierContext(snapshot));
  Multimap<String, String> hostnameFilenameMap =
      _batfish.loadParseVendorConfigurationAnswerElement(snapshot).getFileMap();
  Set<String> includeFiles =
      hostnameFilenameMap.entries().stream()
          .filter(e -> includeNodes.contains(e.getKey()))
          .map(Entry::getValue)
          .collect(Collectors.toSet());

  Multiset<Row> rows = LinkedHashMultiset.create();
  SortedMap<String, SortedMap<String, SortedMap<String, DefinedStructureInfo>>>
      definedStructures =
          _batfish
              .loadConvertConfigurationAnswerElementOrReparse(snapshot)
              .getDefinedStructures();
  definedStructures.entrySet().stream()
      .filter(e -> includeFiles.contains(e.getKey()))
      .forEach(e -> rows.addAll(processEntryToRows(e)));

  TableAnswerElement table = new TableAnswerElement(createMetadata(question));
  table.postProcessAnswer(_question, rows);
  return table;
}
 
@Override
protected LinkedHashMultiset<Object> createMultiset() {
    return LinkedHashMultiset.create();
}
 
源代码13 项目: batfish   文件: ReferencedStructuresAnswerer.java
@Override
public TableAnswerElement answer(NetworkSnapshot snapshot) {
  ReferencedStructuresQuestion question = (ReferencedStructuresQuestion) _question;
  Set<String> includeNodes = question.getNodes().getMatchingNodes(_batfish, snapshot);
  Multimap<String, String> hostnameFilenameMap =
      _batfish.loadParseVendorConfigurationAnswerElement(snapshot).getFileMap();
  Set<String> includeFiles =
      hostnameFilenameMap.entries().stream()
          .filter(e -> includeNodes.contains(e.getKey()))
          .map(Entry::getValue)
          .collect(Collectors.toSet());

  Pattern includeStructureNames = Pattern.compile(question.getNames(), Pattern.CASE_INSENSITIVE);
  Pattern includeStructureTypes = Pattern.compile(question.getTypes(), Pattern.CASE_INSENSITIVE);

  Multiset<Row> rows = LinkedHashMultiset.create();
  _batfish
      .loadConvertConfigurationAnswerElementOrReparse(snapshot)
      .getReferencedStructures()
      .forEach(
          (filename, value) -> {
            if (!includeFiles.contains(filename)) {
              return;
            }
            List<Row> rows1 = new ArrayList<>();
            value.forEach(
                (structType, byName) -> {
                  if (!includeStructureTypes.matcher(structType).matches()) {
                    return;
                  }
                  byName.forEach(
                      (name, byContext) -> {
                        if (!includeStructureNames.matcher(name).matches()) {
                          return;
                        }
                        byContext.forEach(
                            (context, lineNums) -> {
                              rows1.add(
                                  Row.of(
                                      COL_STRUCTURE_TYPE,
                                      structType,
                                      COL_STRUCTURE_NAME,
                                      name,
                                      COL_CONTEXT,
                                      context,
                                      COL_SOURCE_LINES,
                                      new FileLines(filename, lineNums)));
                            });
                      });
                });
            rows.addAll(rows1);
          });

  TableAnswerElement table = new TableAnswerElement(createMetadata());
  table.postProcessAnswer(_question, rows);
  return table;
}
 
@Override
protected LinkedHashMultiset<T> newCollection() {
    return LinkedHashMultiset.create();
}
 
源代码15 项目: gwt-jackson   文件: MultisetJsonDeserializer.java
@Override
protected Multiset<T> newCollection() {
    return LinkedHashMultiset.create();
}
 
 同类方法