下面列出了怎么用com.google.common.collect.LinkedHashMultiset的API类实例代码及写法,或者点击链接到github查看源代码。
public void add(Key<?> key, State state, Object source) {
if (backingMap == null) {
backingMap = Maps.newHashMap();
}
// if it's an instanceof Class, it was a JIT binding, which we don't
// want to retain.
if (source instanceof Class || source == SourceProvider.UNKNOWN_SOURCE) {
source = null;
}
Multiset<Object> sources = backingMap.get(key);
if (sources == null) {
sources = LinkedHashMultiset.create();
backingMap.put(key, sources);
}
Object convertedSource = Errors.convert(source);
sources.add(convertedSource);
// Avoid all the extra work if we can.
if (state.parent() != State.NONE) {
Set<KeyAndSource> keyAndSources = evictionCache.getIfPresent(state);
if (keyAndSources == null) {
evictionCache.put(state, keyAndSources = Sets.newHashSet());
}
keyAndSources.add(new KeyAndSource(key, convertedSource));
}
}
protected LightweightTypeReference doGetCommonSuperType(List<LightweightTypeReference> types) {
LightweightTypeReference firstType = types.get(0);
final List<LightweightTypeReference> tail = types.subList(1, types.size());
// mapping from rawtype to resolved parameterized types
// used to determine the correct type arguments
Multimap<JvmType, LightweightTypeReference> all = LinkedHashMultimap.create();
// cumulated rawtype to max distance (used for sorting)
Multiset<JvmType> cumulatedDistance = LinkedHashMultiset.create();
initializeDistance(firstType, all, cumulatedDistance);
cumulateDistance(tail, all, cumulatedDistance);
List<Entry<JvmType>> candidates = Lists.newArrayList(cumulatedDistance.entrySet());
if (candidates.size() == 1) { // only one super type -> should be java.lang.Object
JvmType firstRawType = candidates.get(0).getElement();
return getFirstForRawType(all, firstRawType);
}
inplaceSortByDistanceAndName(candidates);
List<LightweightTypeReference> referencesWithSameDistance = getMostSpecialCandidates(types, all, candidates);
return wrapInCompoundTypeIfNecessary(referencesWithSameDistance);
}
public static void main(String[] args) {
// Parse text to separate words
String INPUT_TEXT = "Hello World! Hello All! Hi World!";
// Create Multiset
Multiset<String> multiset = LinkedHashMultiset.create(Arrays.asList(INPUT_TEXT.split(" ")));
// Print count words
System.out.println(multiset); // print [Hello x 2, World! x 2, All!, Hi]- in predictable iteration order
// Print all unique words
System.out.println(multiset.elementSet()); // print [Hello, World!, All!, Hi] - in predictable iteration order
// Print count occurrences of words
System.out.println("Hello = " + multiset.count("Hello")); // print 2
System.out.println("World = " + multiset.count("World!")); // print 2
System.out.println("All = " + multiset.count("All!")); // print 1
System.out.println("Hi = " + multiset.count("Hi")); // print 1
System.out.println("Empty = " + multiset.count("Empty")); // print 0
// Print count all words
System.out.println(multiset.size()); //print 6
// Print count unique words
System.out.println(multiset.elementSet().size()); //print 4
}
public static void main(String[] args) {
// Разберем текст на слова
String INPUT_TEXT = "Hello World! Hello All! Hi World!";
// Создаем Multiset
Multiset<String> multiset = LinkedHashMultiset.create(Arrays.asList(INPUT_TEXT.split(" ")));
// Выводим кол-вом вхождений слов
System.out.println(multiset); // напечатает [Hello x 2, World! x 2, All!, Hi]- в порядке первого добавления элемента
// Выводим все уникальные слова
System.out.println(multiset.elementSet()); // напечатает [Hello, World!, All!, Hi] - в порядке первого добавления элемента
// Выводим количество по каждому слову
System.out.println("Hello = " + multiset.count("Hello")); // напечатает 2
System.out.println("World = " + multiset.count("World!")); // напечатает 2
System.out.println("All = " + multiset.count("All!")); // напечатает 1
System.out.println("Hi = " + multiset.count("Hi")); // напечатает 1
System.out.println("Empty = " + multiset.count("Empty")); // напечатает 0
// Выводим общее количества всех слов в тексте
System.out.println(multiset.size()); //напечатает 6
// Выводим общее количество всех уникальных слов
System.out.println(multiset.elementSet().size()); //напечатает 4
}
public static Multiset<Row> flowTracesToRows(
SortedMap<Flow, List<Trace>> flowTraces, int maxTraces) {
Multiset<Row> rows = LinkedHashMultiset.create();
for (Map.Entry<Flow, List<Trace>> flowTrace : flowTraces.entrySet()) {
List<Trace> traces = flowTrace.getValue();
List<Trace> prunedTraces = TracePruner.prune(traces, maxTraces);
rows.add(
Row.of(
COL_FLOW,
flowTrace.getKey(),
COL_TRACES,
prunedTraces,
COL_TRACE_COUNT,
traces.size()));
}
return rows;
}
public static Multiset<Row> diffFlowTracesToRows(
Map<Flow, List<Trace>> baseFlowTraces,
Map<Flow, List<Trace>> deltaFlowTraces,
int maxTraces) {
Multiset<Row> rows = LinkedHashMultiset.create();
checkArgument(
baseFlowTraces.keySet().equals(deltaFlowTraces.keySet()),
"Base and delta flow traces should have same flows");
for (Flow flow : baseFlowTraces.keySet()) {
rows.add(
Row.of(
COL_FLOW,
flow,
TableDiff.baseColumnName(COL_TRACES),
TracePruner.prune(baseFlowTraces.get(flow), maxTraces),
TableDiff.baseColumnName(COL_TRACE_COUNT),
baseFlowTraces.get(flow).size(),
TableDiff.deltaColumnName(COL_TRACES),
TracePruner.prune(deltaFlowTraces.get(flow), maxTraces),
TableDiff.deltaColumnName(COL_TRACE_COUNT),
deltaFlowTraces.get(flow).size()));
}
return rows;
}
public void testSerialization() {
BeanWithMultisetTypes bean = new BeanWithMultisetTypes();
List<String> list = Arrays.asList( "foo", "abc", null, "abc" );
List<String> listWithNonNull = Arrays.asList( "foo", "abc", "bar", "abc" );
bean.multiset = LinkedHashMultiset.create( list );
bean.hashMultiset = HashMultiset.create( Arrays.asList( "abc", "abc" ) );
bean.linkedHashMultiset = LinkedHashMultiset.create( list );
bean.sortedMultiset = TreeMultiset.create( listWithNonNull );
bean.treeMultiset = TreeMultiset.create( listWithNonNull );
bean.immutableMultiset = ImmutableMultiset.copyOf( listWithNonNull );
bean.enumMultiset = EnumMultiset.create( Arrays.asList( AlphaEnum.B, AlphaEnum.A, AlphaEnum.D, AlphaEnum.A ) );
String expected = "{" +
"\"multiset\":[\"foo\",\"abc\",\"abc\",null]," +
"\"hashMultiset\":[\"abc\",\"abc\"]," +
"\"linkedHashMultiset\":[\"foo\",\"abc\",\"abc\",null]," +
"\"sortedMultiset\":[\"abc\",\"abc\",\"bar\",\"foo\"]," +
"\"treeMultiset\":[\"abc\",\"abc\",\"bar\",\"foo\"]," +
"\"immutableMultiset\":[\"foo\",\"abc\",\"abc\",\"bar\"]," +
"\"enumMultiset\":[\"A\",\"A\",\"B\",\"D\"]" +
"}";
assertEquals( expected, BeanWithMultisetTypesMapper.INSTANCE.write( bean ) );
}
public void testDeserialization() {
String input = "{" +
"\"multiset\":[\"foo\",\"abc\",\"abc\",null]," +
"\"hashMultiset\":[\"abc\",\"abc\"]," +
"\"linkedHashMultiset\":[\"foo\",\"abc\",\"abc\",null]," +
"\"sortedMultiset\":[\"foo\",\"abc\",\"bar\",\"abc\",null]," +
"\"treeMultiset\":[\"bar\",\"abc\",\"abc\",\"foo\",null]," +
"\"immutableMultiset\":[\"foo\",\"abc\",\"abc\",\"bar\",null]," +
"\"enumMultiset\":[\"B\",\"A\",\"A\",\"D\",null]" +
"}";
BeanWithMultisetTypes result = BeanWithMultisetTypesMapper.INSTANCE.read( input );
assertNotNull( result );
List<String> expectedList = Arrays.asList( "foo", "abc", null, "abc" );
List<String> expectedListWithNonNull = Arrays.asList( "foo", "abc", "bar", "abc" );
assertEquals( LinkedHashMultiset.create( expectedList ), result.multiset );
assertEquals( HashMultiset.create( Arrays.asList( "abc", "abc" ) ), result.hashMultiset );
assertEquals( LinkedHashMultiset.create( expectedList ), result.linkedHashMultiset );
assertEquals( TreeMultiset.create( expectedListWithNonNull ), result.sortedMultiset );
assertEquals( TreeMultiset.create( expectedListWithNonNull ), result.treeMultiset );
assertEquals( ImmutableMultiset.copyOf( expectedListWithNonNull ), result.immutableMultiset );
assertEquals( EnumMultiset.create( Arrays.asList( AlphaEnum.B, AlphaEnum.A, AlphaEnum.D, AlphaEnum.A ) ), result.enumMultiset );
}
/**
* Keeps the cumulated distance for all the common raw super types of the given references.
* Interfaces that are more directly implemented will get a lower total count than more general
* interfaces.
*/
protected void cumulateDistance(final List<LightweightTypeReference> references, Multimap<JvmType, LightweightTypeReference> all,
Multiset<JvmType> cumulatedDistance) {
for(LightweightTypeReference other: references) {
Multiset<JvmType> otherDistance = LinkedHashMultiset.create();
initializeDistance(other, all, otherDistance);
cumulatedDistance.retainAll(otherDistance);
for(Multiset.Entry<JvmType> typeToDistance: otherDistance.entrySet()) {
if (cumulatedDistance.contains(typeToDistance.getElement()))
cumulatedDistance.add(typeToDistance.getElement(), typeToDistance.getCount());
}
}
}
private static Multiset<Field> parseArray(List<Object> array) throws ParserException {
Multiset<Field> members = LinkedHashMultiset.create();
for(Object member: array) {
members.add(parseField(null, member));
}
return members;
}
@Override
public TableAnswerElement answer(NetworkSnapshot snapshot) {
UndefinedReferencesQuestion question = (UndefinedReferencesQuestion) _question;
// Find all the filenames that produced the queried nodes. This might have false positives if
// a file produced multiple nodes, but that was already mis-handled before. Need to rewrite
// this question as a TableAnswerElement.
Set<String> includeNodes =
question.getNodeSpecifier().resolve(_batfish.specifierContext(snapshot));
Multimap<String, String> hostnameFilenameMap =
_batfish.loadParseVendorConfigurationAnswerElement(snapshot).getFileMap();
Set<String> includeFiles =
hostnameFilenameMap.entries().stream()
.filter(e -> includeNodes.contains(e.getKey()))
.map(Entry::getValue)
.collect(Collectors.toSet());
Multiset<Row> rows = LinkedHashMultiset.create();
SortedMap<String, SortedMap<String, SortedMap<String, SortedMap<String, SortedSet<Integer>>>>>
undefinedReferences =
_batfish
.loadConvertConfigurationAnswerElementOrReparse(snapshot)
.getUndefinedReferences();
undefinedReferences.entrySet().stream()
.filter(e -> includeFiles.contains(e.getKey()))
.forEach(e -> rows.addAll(processEntryToRows(e)));
TableAnswerElement table = new TableAnswerElement(createMetadata());
table.postProcessAnswer(_question, rows);
return table;
}
@Override
public TableAnswerElement answer(NetworkSnapshot snapshot) {
UnusedStructuresQuestion question = (UnusedStructuresQuestion) _question;
// Find all the filenames that produced the queried nodes. This might have false positives if
// a file produced multiple nodes, but that was already mis-handled before. Need to rewrite
// this question as a TableAnswerElement.
Set<String> includeNodes =
question.getNodeSpecifier().resolve(_batfish.specifierContext(snapshot));
Multimap<String, String> hostnameFilenameMap =
_batfish.loadParseVendorConfigurationAnswerElement(snapshot).getFileMap();
Set<String> includeFiles =
hostnameFilenameMap.entries().stream()
.filter(e -> includeNodes.contains(e.getKey()))
.map(Entry::getValue)
.collect(Collectors.toSet());
Multiset<Row> rows = LinkedHashMultiset.create();
SortedMap<String, SortedMap<String, SortedMap<String, DefinedStructureInfo>>>
definedStructures =
_batfish
.loadConvertConfigurationAnswerElementOrReparse(snapshot)
.getDefinedStructures();
definedStructures.entrySet().stream()
.filter(e -> includeFiles.contains(e.getKey()))
.forEach(e -> rows.addAll(processEntryToRows(e)));
TableAnswerElement table = new TableAnswerElement(createMetadata(question));
table.postProcessAnswer(_question, rows);
return table;
}
@Override
protected LinkedHashMultiset<Object> createMultiset() {
return LinkedHashMultiset.create();
}
@Override
public GuavaCollectionDeserializer<LinkedHashMultiset<Object>> withResolved(JsonDeserializer<?> valueDeser, TypeDeserializer typeDeser,
NullValueProvider nuller, Boolean unwrapSingle) {
return new LinkedHashMultisetDeserializer(_containerType,
valueDeser, typeDeser, nuller, unwrapSingle);
}
@Override
public void addBuilderFieldDeclaration(SourceBuilder code) {
code.addLine("private final %1$s<%2$s> %3$s = %1$s.create();",
LinkedHashMultiset.class, elementType, property.getField());
}
@Override
public TableAnswerElement answer(NetworkSnapshot snapshot) {
ReferencedStructuresQuestion question = (ReferencedStructuresQuestion) _question;
Set<String> includeNodes = question.getNodes().getMatchingNodes(_batfish, snapshot);
Multimap<String, String> hostnameFilenameMap =
_batfish.loadParseVendorConfigurationAnswerElement(snapshot).getFileMap();
Set<String> includeFiles =
hostnameFilenameMap.entries().stream()
.filter(e -> includeNodes.contains(e.getKey()))
.map(Entry::getValue)
.collect(Collectors.toSet());
Pattern includeStructureNames = Pattern.compile(question.getNames(), Pattern.CASE_INSENSITIVE);
Pattern includeStructureTypes = Pattern.compile(question.getTypes(), Pattern.CASE_INSENSITIVE);
Multiset<Row> rows = LinkedHashMultiset.create();
_batfish
.loadConvertConfigurationAnswerElementOrReparse(snapshot)
.getReferencedStructures()
.forEach(
(filename, value) -> {
if (!includeFiles.contains(filename)) {
return;
}
List<Row> rows1 = new ArrayList<>();
value.forEach(
(structType, byName) -> {
if (!includeStructureTypes.matcher(structType).matches()) {
return;
}
byName.forEach(
(name, byContext) -> {
if (!includeStructureNames.matcher(name).matches()) {
return;
}
byContext.forEach(
(context, lineNums) -> {
rows1.add(
Row.of(
COL_STRUCTURE_TYPE,
structType,
COL_STRUCTURE_NAME,
name,
COL_CONTEXT,
context,
COL_SOURCE_LINES,
new FileLines(filename, lineNums)));
});
});
});
rows.addAll(rows1);
});
TableAnswerElement table = new TableAnswerElement(createMetadata());
table.postProcessAnswer(_question, rows);
return table;
}
@Override
protected LinkedHashMultiset<T> newCollection() {
return LinkedHashMultiset.create();
}
@Override
protected Multiset<T> newCollection() {
return LinkedHashMultiset.create();
}
@Override
protected void configure() {
type( Optional.class ).serializer( OptionalJsonSerializer.class ).deserializer( OptionalJsonDeserializer.class );
type( FluentIterable.class ).serializer( IterableJsonSerializer.class );
// Immutable Collections
type( ImmutableCollection.class ).serializer( CollectionJsonSerializer.class )
.deserializer( ImmutableCollectionJsonDeserializer.class );
type( ImmutableList.class ).serializer( CollectionJsonSerializer.class ).deserializer( ImmutableListJsonDeserializer.class );
type( ImmutableSet.class ).serializer( CollectionJsonSerializer.class ).deserializer( ImmutableSetJsonDeserializer.class );
type( ImmutableSortedSet.class ).serializer( CollectionJsonSerializer.class )
.deserializer( ImmutableSortedSetJsonDeserializer.class );
// Immutable Map
type( ImmutableMap.class ).serializer( MapJsonSerializer.class ).deserializer( ImmutableMapJsonDeserializer.class );
type( ImmutableSortedMap.class ).serializer( MapJsonSerializer.class ).deserializer( ImmutableSortedMapJsonDeserializer.class );
// BiMap
type( BiMap.class ).serializer( MapJsonSerializer.class ).deserializer( BiMapJsonDeserializer.class );
type( ImmutableBiMap.class ).serializer( MapJsonSerializer.class ).deserializer( ImmutableBiMapJsonDeserializer.class );
type( HashBiMap.class ).serializer( MapJsonSerializer.class ).deserializer( HashBiMapJsonDeserializer.class );
type( EnumBiMap.class ).serializer( MapJsonSerializer.class ).deserializer( EnumBiMapJsonDeserializer.class );
type( EnumHashBiMap.class ).serializer( MapJsonSerializer.class ).deserializer( EnumHashBiMapJsonDeserializer.class );
// Multiset
type( Multiset.class ).serializer( CollectionJsonSerializer.class ).deserializer( MultisetJsonDeserializer.class );
type( HashMultiset.class ).serializer( CollectionJsonSerializer.class ).deserializer( HashMultisetJsonDeserializer.class );
type( LinkedHashMultiset.class ).serializer( CollectionJsonSerializer.class )
.deserializer( LinkedHashMultisetJsonDeserializer.class );
type( SortedMultiset.class ).serializer( CollectionJsonSerializer.class ).deserializer( SortedMultisetJsonDeserializer.class );
type( TreeMultiset.class ).serializer( CollectionJsonSerializer.class ).deserializer( TreeMultisetJsonDeserializer.class );
type( ImmutableMultiset.class ).serializer( CollectionJsonSerializer.class )
.deserializer( ImmutableMultisetJsonDeserializer.class );
type( EnumMultiset.class ).serializer( CollectionJsonSerializer.class ).deserializer( EnumMultisetJsonDeserializer.class );
// Multimap
type( Multimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( MultimapJsonDeserializer.class );
type( ImmutableMultimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( ImmutableMultimapJsonDeserializer.class );
type( ImmutableSetMultimap.class ).serializer( MultimapJsonSerializer.class )
.deserializer( ImmutableSetMultimapJsonDeserializer.class );
type( ImmutableListMultimap.class ).serializer( MultimapJsonSerializer.class )
.deserializer( ImmutableListMultimapJsonDeserializer.class );
type( SetMultimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( SetMultimapJsonDeserializer.class );
type( HashMultimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( HashMultimapJsonDeserializer.class );
type( LinkedHashMultimap.class ).serializer( MultimapJsonSerializer.class )
.deserializer( LinkedHashMultimapJsonDeserializer.class );
type( SortedSetMultimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( SortedSetMultimapJsonDeserializer.class );
type( TreeMultimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( TreeMultimapJsonDeserializer.class );
type( ListMultimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( ListMultimapJsonDeserializer.class );
type( ArrayListMultimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( ArrayListMultimapJsonDeserializer.class );
type( LinkedListMultimap.class ).serializer( MultimapJsonSerializer.class )
.deserializer( LinkedListMultimapJsonDeserializer.class );
}