com.google.common.collect.Multiset#isEmpty ( )源码实例Demo

下面列出了com.google.common.collect.Multiset#isEmpty ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: nomulus   文件: BaseDomainLabelList.java
/**
 * Turns the list CSV data into a map of labels to parsed data of type R.
 *
 * @param lines the CSV file, line by line
 */
public ImmutableMap<String, R> parse(Iterable<String> lines) {
  Map<String, R> labelsToEntries = new HashMap<>();
  Multiset<String> duplicateLabels = HashMultiset.create();
  for (String line : lines) {
    R entry = createFromLine(line);
    if (entry == null) {
      continue;
    }
    String label = entry.getLabel();
    // Check if the label was already processed for this list (which is an error), and if so,
    // accumulate it so that a list of all duplicates can be thrown.
    if (labelsToEntries.containsKey(label)) {
      duplicateLabels.add(label, duplicateLabels.contains(label) ? 1 : 2);
    } else {
      labelsToEntries.put(label, entry);
    }
  }
  if (!duplicateLabels.isEmpty()) {
    throw new IllegalStateException(
        String.format(
            "List '%s' cannot contain duplicate labels. Dupes (with counts) were: %s",
            name, duplicateLabels));
  }
  return ImmutableMap.copyOf(labelsToEntries);
}
 
源代码2 项目: attic-aurora   文件: PendingTaskProcessor.java
/**
 * Creates execution sequence for pending task groups by interleaving batches of requested size of
 * their occurrences. For example: {G1, G1, G1, G2, G2} with batch size of 2 task per group will
 * be converted into {G1, G1, G2, G2, G1}.
 *
 * @param groups Multiset of task groups.
 * @param batchSize The batch size of tasks from each group to sequence together.
 * @return A task group execution sequence.
 */
@VisibleForTesting
static List<TaskGroupKey> getPreemptionSequence(
    Multiset<TaskGroupKey> groups,
    int batchSize) {

  Preconditions.checkArgument(batchSize > 0, "batchSize should be positive.");

  Multiset<TaskGroupKey> mutableGroups = HashMultiset.create(groups);
  List<TaskGroupKey> instructions = Lists.newLinkedList();
  Set<TaskGroupKey> keys = ImmutableSet.copyOf(groups.elementSet());
  while (!mutableGroups.isEmpty()) {
    for (TaskGroupKey key : keys) {
      if (mutableGroups.contains(key)) {
        int elementCount = mutableGroups.remove(key, batchSize);
        int removedCount = Math.min(elementCount, batchSize);
        instructions.addAll(Collections.nCopies(removedCount, key));
      }
    }
  }

  return instructions;
}
 
源代码3 项目: businessworks   文件: WeakKeySet.java
/**
 * There may be multiple child injectors blacklisting a certain key so only remove the source
 * that's relevant.
 */
private void cleanUpForCollectedState(Set<KeyAndSource> keysAndSources) {
  synchronized (lock) {
    for (KeyAndSource keyAndSource : keysAndSources) {
      Multiset<Object> set = backingMap.get(keyAndSource.key);
      if (set != null) {
        set.remove(keyAndSource.source);
        if (set.isEmpty()) {
          backingMap.remove(keyAndSource.key);
        }
      }
    }
  }
}
 
源代码4 项目: gef   文件: ObservableMultisetWrapper.java
@Override
public void clear() {
	Multiset<E> previousContents = delegateCopy();
	super.clear();
	if (!previousContents.isEmpty()) {
		List<ElementarySubChange<E>> elementaryChanges = new ArrayList<>();
		for (E e : previousContents.elementSet()) {
			elementaryChanges.add(new ElementarySubChange<>(e,
					previousContents.count(e), 0));
		}
		helper.fireValueChangedEvent(
				new MultisetListenerHelper.AtomicChange<>(this,
						previousContents, elementaryChanges));
	}
}
 
/** Turns the list CSV data into a map of labels to {@link ReservedEntry}. */
static ImmutableMap<String, ReservedEntry> parseToReservationsByLabels(Iterable<String> lines) {
  Map<String, ReservedEntry> labelsToEntries = Maps.newHashMap();
  Multiset<String> duplicateLabels = HashMultiset.create();
  for (String originalLine : lines) {
    List<String> lineAndComment = splitOnComment(originalLine);
    if (lineAndComment.isEmpty()) {
      continue;
    }
    String line = lineAndComment.get(0);
    String comment = lineAndComment.get(1);
    List<String> parts = Splitter.on(',').trimResults().splitToList(line);
    checkArgument(
        parts.size() == 2 || parts.size() == 3,
        "Could not parse line in reserved list: %s",
        originalLine);
    String label = parts.get(0);
    checkArgument(
        label.equals(canonicalizeDomainName(label)),
        "Label '%s' must be in puny-coded, lower-case form",
        label);
    ReservationType reservationType = ReservationType.valueOf(parts.get(1));
    ReservedEntry reservedEntry = ReservedEntry.create(reservationType, comment);
    // Check if the label was already processed for this list (which is an error), and if so,
    // accumulate it so that a list of all duplicates can be thrown.
    if (labelsToEntries.containsKey(label)) {
      duplicateLabels.add(label, duplicateLabels.contains(label) ? 1 : 2);
    } else {
      labelsToEntries.put(label, reservedEntry);
    }
  }
  if (!duplicateLabels.isEmpty()) {
    throw new IllegalStateException(
        String.format(
            "Reserved list cannot contain duplicate labels. Dupes (with counts) were: %s",
            duplicateLabels));
  }
  return ImmutableMap.copyOf(labelsToEntries);
}
 
源代码6 项目: sqoop-on-spark   文件: HdfsAsserts.java
/**
 * Verify that mapreduce output (across all files) is as expected.
 *
 * @param directory Mapreduce output directory
 * @param lines Expected lines
 * @throws IOException
 */
public static void assertMapreduceOutput(FileSystem fs, String directory, String... lines) throws IOException {
  Multiset<String> setLines = HashMultiset.create(Arrays.asList(lines));
  List<String> notFound = new LinkedList<String>();

  Path[] files = HdfsUtils.getOutputMapreduceFiles(fs, directory);
  for(Path file : files) {
    BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(file)));

    String line;
    while ((line = br.readLine()) != null) {
      if (!setLines.remove(line)) {
        notFound.add(line);
      }
    }
    br.close();
  }

  if(!setLines.isEmpty() || !notFound.isEmpty()) {
    LOG.error("Output do not match expectations.");
    LOG.error("Expected lines that weren't present in the files:");
    LOG.error("\t'" + StringUtils.join(setLines, "'\n\t'") + "'");
    LOG.error("Extra lines in files that weren't expected:");
    LOG.error("\t'" + StringUtils.join(notFound, "'\n\t'") + "'");
    fail("Output do not match expectations.");
  }
}
 
源代码7 项目: imhotep   文件: FlamdexCompare.java
static boolean unorderedEquals(List<FlamdexDocument> l1, List<FlamdexDocument> l2) {
    if (l1.size() != l2.size()) return false;

    Multiset<FlamdexDocumentWrapper> s1 = HashMultiset.create(Lists.transform(l1, new Function<FlamdexDocument, FlamdexDocumentWrapper>() {
        @Override
        public FlamdexDocumentWrapper apply(FlamdexDocument input) {
            return new FlamdexDocumentWrapper(input);
        }
    }));
    for (final FlamdexDocument doc : l2) {
        final FlamdexDocumentWrapper w = new FlamdexDocumentWrapper(doc);
        if (!s1.remove(w)) return false;
    }
    return s1.isEmpty();
}
 
源代码8 项目: gef   文件: MultisetExpression.java
@Override
public boolean isEmpty() {
	final Multiset<E> multiset = get();
	return (multiset == null) ? EMPTY_MULTISET.isEmpty()
			: multiset.isEmpty();
}