java.util.HashMap#compute ( )源码实例Demo

下面列出了java.util.HashMap#compute ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: macrobase   文件: ExactCountTest.java
@Test
public void testCount() {
    ExactCount ec = new ExactCount();
    HashMap<Integer, Integer> truth = new HashMap<>();

    List<Datum> dws = new ArrayList<>();
    for (int i = 0; i < 100; ++i) {
        for (int j = 0; j < i; ++j) {
            dws.add(new Datum(Lists.newArrayList(i), new ArrayRealVector()));
            truth.compute(i, (k, v) -> v == null ? 1 : v + 1);
        }
    }

    ec.count(dws);

    for (Map.Entry<Integer, Double> cnt : ec.getCounts().entrySet()) {
        assertEquals(truth.get(cnt.getKey()), cnt.getValue(), 1e-10);
    }
}
 
@Override
public IExpr evaluate(final IAST ast, EvalEngine engine) {
	IExpr arg1 = ast.arg1();
	if (arg1.isList()) {
		IAST list = (IAST) arg1;
		try {
			HashMap<IExpr, MutableInt> map = new HashMap<IExpr, MutableInt>();
			for (int i = 1; i < list.size(); i++) {
				IExpr key = list.get(i);
				map.compute(key, (k, v) -> (v == null) ? new MutableInt(1) : v.increment());
			}
			IAssociation assoc = new ASTAssociation(map.size(), false);
			for (Map.Entry<IExpr, AssociationFunctions.MutableInt> elem : map.entrySet()) {
				assoc.appendRule(F.Rule(elem.getKey(), F.ZZ(elem.getValue().value())));
			}
			return assoc;
		} catch (ValidateException ve) {
			return engine.printMessage(ast.topHead(), ve);
		}
	}
	return F.NIL;
}
 
源代码3 项目: hadoop-ozone   文件: InnerNodeImpl.java
/**
 * Get a ancestor to its excluded node count map.
 *
 * @param nodes a collection of leaf nodes to exclude
 * @param genToExclude  the ancestor generation to exclude
 * @param genToReturn  the ancestor generation to return the count map
 * @return the map.
 * example:
 *
 *                *  --- root
 *              /    \
 *             *      *   -- genToReturn =2
 *            / \    / \
 *          *   *   *   *  -- genToExclude = 1
 *         /\  /\  /\  /\
 *       *  * * * * * * *  -- nodes
 */
private Map<Node, Integer> getAncestorCountMap(Collection<Node> nodes,
    int genToExclude, int genToReturn) {
  Preconditions.checkState(genToExclude >= 0);
  Preconditions.checkState(genToReturn >= 0);

  if (nodes == null || nodes.size() == 0) {
    return Collections.emptyMap();
  }
  // with the recursive call, genToReturn can be smaller than genToExclude
  if (genToReturn < genToExclude) {
    genToExclude = genToReturn;
  }
  // ancestorToExclude to ancestorToReturn map
  HashMap<Node, Node> ancestorMap = new HashMap<>();
  for (Node node: nodes) {
    Node ancestorToExclude = node.getAncestor(genToExclude);
    Node ancestorToReturn = node.getAncestor(genToReturn);
    if (ancestorToExclude == null || ancestorToReturn == null) {
      LOG.warn("Ancestor not found, node: {}"
          + ", generation to exclude: {}"
          + ", generation to return: {}", node.getNetworkFullPath(),
              genToExclude, genToReturn);
      continue;
    }
    ancestorMap.put(ancestorToExclude, ancestorToReturn);
  }
  // ancestorToReturn to exclude node count map
  HashMap<Node, Integer> countMap = new HashMap<>();
  for (Map.Entry<Node, Node> entry : ancestorMap.entrySet()) {
    countMap.compute(entry.getValue(),
        (key, n) -> (n == null ? 0 : n) + entry.getKey().getNumOfLeaves());
  }

  return countMap;
}
 
源代码4 项目: besu   文件: BlockData.java
public NonceProvider getNonceProvider(final WorldState worldState) {
  final HashMap<Address, Long> currentNonceValues = new HashMap<>();
  return (Address address) ->
      currentNonceValues.compute(
          address,
          (addr, currentValue) -> {
            if (currentValue == null) {
              return Optional.ofNullable(worldState.get(address))
                  .map(Account::getNonce)
                  .orElse(0L);
            }
            return currentValue + 1;
          });
}
 
@Override
public Pongo measure(Project project) {
	DocumentationReadabilityHistoricMetric documentationHistoricMetric = new DocumentationReadabilityHistoricMetric();
	
	DocumentationReadabilityTransMetric analyzedDocumentationReadability = ((DocumentationReadabilityTransMetricProvider)uses.get(0)).adapt(context.getProjectDB(project));
	
	HashMap<String, Double> readabilityScoreSum = new HashMap<String, Double>();
	HashMap<String, Integer> documentationEntryCounter = new HashMap<String, Integer>();
	
	for(DocumentationEntryReadability documentationEntry : analyzedDocumentationReadability.getDocumentationEntriesReadability())
	{
		DocumentationEntryHistoricReadability documentationEntryHistoric =  new DocumentationEntryHistoricReadability();
		documentationEntryHistoric.setDocumentationId(documentationEntry.getDocumentationId());
		documentationEntryHistoric.setEntryId(documentationEntry.getEntryId());
		documentationEntryHistoric.setReadability(documentationEntry.getReadability());
		readabilityScoreSum.compute(documentationEntry.getDocumentationId(), (k,v)-> v==null ? documentationEntry.getReadability() : v+documentationEntry.getReadability());
		documentationEntryCounter.compute(documentationEntry.getDocumentationId(), (k,v)-> v==null ? 1 : v+1);
		
		documentationHistoricMetric.getDocumentationEntriesReadability().add(documentationEntryHistoric);
	}
	
	if(documentationEntryCounter.size()>0)
	{
		Double readabilityScoreAvg;
		for(String documentationId : documentationEntryCounter.keySet())
		{
			DocumentationHistoricReadability documentationHistoric = new DocumentationHistoricReadability();
			documentationHistoric.setDocumentationId(documentationId);
			documentationHistoric.setNumberOfDocumentationEntries(documentationEntryCounter.get(documentationId));
			readabilityScoreAvg=readabilityScoreSum.get(documentationId)/(double) documentationEntryCounter.get(documentationId);
			documentationHistoric.setAverageDocumentationReadability(readabilityScoreAvg);
			
			documentationHistoricMetric.getDocumentationReadability().add(documentationHistoric);
		}
	}
	
	
	return documentationHistoricMetric;
}
 
源代码6 项目: lucene-solr   文件: SloppyPhraseMatcher.java
/** find repeating terms and assign them ordinal values */
private LinkedHashMap<Term,Integer> repeatingTerms() {
  LinkedHashMap<Term,Integer> tord = new LinkedHashMap<>();
  HashMap<Term,Integer> tcnt = new HashMap<>();
  for (PhrasePositions pp : phrasePositions) {
    for (Term t : pp.terms) {
      Integer cnt = tcnt.compute(t, (key, old) -> old == null ? 1 : 1 + old);
      if (cnt==2) {
        tord.put(t,tord.size());
      }
    }
  }
  return tord;
}
 
@Override
public Pongo measure(Project project) {
	DocumentationSentimentHistoricMetric documentationHistoricMetric = new DocumentationSentimentHistoricMetric();
	
	DocumentationSentimentTransMetric analyzedDocumentationSentiment = ((DocumentationSentimentTransMetricProvider)uses.get(0)).adapt(context.getProjectDB(project));
	
	HashMap<String, Double> sentimentSum = new HashMap<String, Double>();
	HashMap<String, Integer> documentationEntryCounter = new HashMap<String, Integer>();
	
	for(DocumentationEntrySentiment documentationEntry : analyzedDocumentationSentiment.getDocumentationEntriesSentiment())
	{
		DocumentationEntryHistoricSentiment documentationEntryHistoric =  new DocumentationEntryHistoricSentiment();
		documentationEntryHistoric.setDocumentationId(documentationEntry.getDocumentationId());
		documentationEntryHistoric.setEntryId(documentationEntry.getEntryId());
		documentationEntryHistoric.setPolarity(documentationEntry.getPolarity());
		sentimentSum.compute(documentationEntry.getDocumentationId(), (k,v)-> {
			if(v==null)
				v=0.0;
			switch(documentationEntry.getPolarity())
			{
				case "__label__positive":
					v+=1.0;
					break;
				case "__label__negative":
					v-=1.0;
					break; 
			}
			return v;
		});
		documentationEntryCounter.compute(documentationEntry.getDocumentationId(), (k,v)-> v==null ? 1 : v+1);
		
		documentationHistoricMetric.getDocumentationEntriesSentiment().add(documentationEntryHistoric);
	}
	
	if(documentationEntryCounter.size()>0)
	{
		Double sentimentAvg;
		for(String documentationId : documentationEntryCounter.keySet())
		{
			DocumentationHistoricSentiment documentationHistoric = new DocumentationHistoricSentiment();
			documentationHistoric.setDocumentationId(documentationId);
			documentationHistoric.setNumberOfDocumentationEntries(documentationEntryCounter.get(documentationId));
			sentimentAvg=sentimentSum.get(documentationId)/(double) documentationEntryCounter.get(documentationId);
			documentationHistoric.setAverageDocumentationSentiment(sentimentAvg);
			
			documentationHistoricMetric.getDocumentationSentiment().add(documentationHistoric);
		}
	}
	
	
	return documentationHistoricMetric;
}