java.util.TreeMap#entrySet ( )源码实例Demo

下面列出了java.util.TreeMap#entrySet ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: audiveris   文件: HiLoPeakFinder.java
/**
 * Report the XYSeries for peaks found.
 *
 * @param x1 lower x bound for plot
 * @param x2 upper x bound for plot
 * @return XY peaks ready to plot
 */
public XYSeries getPeakSeries (int x1,
                               int x2)
{
    final XYSeries peakSeries = new XYSeries("Peak", false); // No autosort

    for (Range peak : getPeaks()) {
        if ((peak != null) && (peak.min <= x2) && (peak.max >= x1)) {
            final TreeMap<Integer, Double> thresholds = replay(peak);

            for (Map.Entry<Integer, Double> entry : thresholds.entrySet()) {
                peakSeries.add(entry.getKey(), entry.getValue());
            }

            peakSeries.add(thresholds.lastKey(), null); // No line between peaks
        }
    }

    return peakSeries;
}
 
源代码2 项目: j2objc   文件: MeasureUnitTest.java
static void generateBackwardCompatibilityTest(String version) {
    Map<String, MeasureUnit> seen = new HashMap<String, MeasureUnit>();
    System.out.println();
    System.out.printf("    public void TestCompatible%s() {\n", version.replace(".", "_"));
    System.out.println("        MeasureUnit[] units = {");
    TreeMap<String, List<MeasureUnit>> allUnits = getAllUnits();
    int count = 0;
    for (Map.Entry<String, List<MeasureUnit>> entry : allUnits.entrySet()) {
        if (isTypeHidden(entry.getKey())) {
            continue;
        }
        for (MeasureUnit unit : entry.getValue()) {
            String javaName = toJAVAName(unit);
            checkForDup(seen, javaName, unit);
            System.out.printf("                MeasureUnit.%s,\n", javaName);
            count++;
        }
    }
    System.out.println("        };");
    System.out.printf("        assertEquals(\"\",  %d, units.length);\n", count);
    System.out.println("    }");
}
 
源代码3 项目: SkyEye   文件: TraceDto.java
@Override
public TraceDto mapRow(Result res, int rowNum) throws Exception {

    String traceId = new String(res.getRow());
    NavigableMap<byte[], byte[]> data = res.getFamilyMap(Constants.TABLE_TRACE_COLUMN_FAMILY.getBytes());

    String spanId;
    JSONObject spanDetail;
    TreeMap<String, JSONObject> map = new TreeMap<>();
    Set<Map.Entry<byte[], byte[]>> spanEntrySet = data.entrySet();
    for (Map.Entry<byte[], byte[]> entry : spanEntrySet) {
        spanId = new String(entry.getKey());
        spanDetail = JSON.parseObject(new String(entry.getValue()));
        map.put(spanId, spanDetail);
    }
    Set<Map.Entry<String, JSONObject>> spans = map.entrySet();


    TraceDto rtn = new TraceDto();
    rtn.setTraceId(traceId).setSpans(spans);
    return rtn;
}
 
源代码4 项目: mzmine3   文件: KovatsIndexExtractionDialog.java
/**
 * GNPS GC MS formatted table (comma-separated)
 *
 * @param values
 *
 * @return
 */
private String getCsvTable(TreeMap<KovatsIndex, Double> values) {
  StringBuilder s = new StringBuilder();
  String nl = "\n";
  // header for GNPS
  // alkane name, num carbon(int), rt (seconds)
  s.append("Compound_Name,Carbon_Number,RT" + nl);
  DecimalFormat f = new DecimalFormat("0.##");

  for (Entry<KovatsIndex, Double> e : values.entrySet()) {
    s.append(e.getKey().getAlkaneName());
    s.append(",");
    s.append(String.valueOf(e.getKey().getNumCarbon()));
    s.append(",");
    // export rt in seconds for GNPS GC
    s.append(f.format(e.getValue() * 60.0));
    s.append(nl);
  }
  return s.toString();
}
 
源代码5 项目: jadira   文件: TreeMapImplementor.java
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
public <T> T clone(T obj, CloneDriver parentContext, IdentityHashMap<Object, Object> referencesToReuse, long stackDepth) {
    
	stackDepth++;
	
    final TreeMap<Object, Object> source = (TreeMap)obj;
    
    final TreeMap copy = new TreeMap(source.comparator());
    
    for (final Map.Entry e : source.entrySet()) {
        final Object key = parentContext.clone(e.getKey(), parentContext, referencesToReuse, stackDepth);
        final Object value = parentContext.clone(e.getValue(), parentContext, referencesToReuse, stackDepth);
        
        copy.put(key, value);
    }
    return (T) copy;
}
 
源代码6 项目: netcdf-java   文件: NcMLWriter.java
public Element makeEnumTypedefElement(EnumTypedef etd) {
  Element typeElem = new Element("enumTypedef", namespace);
  typeElem.setAttribute("name", etd.getShortName());
  typeElem.setAttribute("type", etd.getBaseType().toString());

  // Use a TreeMap so that the key-value pairs are emitted in a consistent order.
  TreeMap<Integer, String> map = new TreeMap<>(etd.getMap());

  for (Map.Entry<Integer, String> entry : map.entrySet()) {
    typeElem.addContent(new Element("enum", namespace).setAttribute("key", Integer.toString(entry.getKey()))
        .addContent(entry.getValue()));
  }

  return typeElem;
}
 
源代码7 项目: j2objc   文件: MeasureUnitTest.java
static void generateCXXHConstants(String thisVersion) {
    Map<String, MeasureUnit> seen = new HashMap<String, MeasureUnit>();
    System.out.println();
    TreeMap<String, List<MeasureUnit>> allUnits = getAllUnits();
    for (Map.Entry<String, List<MeasureUnit>> entry : allUnits.entrySet()) {
        String type = entry.getKey();
        if (type.equals("currency")) {
            continue;
        }
        for (MeasureUnit unit : entry.getValue()) {
            String code = unit.getSubtype();
            String name = toCamelCase(unit);
            String javaName = toJAVAName(unit);
            checkForDup(seen, name, unit);
            if (isDraft(javaName)) {
                System.out.println("#ifndef U_HIDE_DRAFT_API");
            }
            System.out.println("    /**");
            System.out.println("     * Returns unit of " + type + ": " + code + ".");
            System.out.println("     * Caller owns returned value and must free it.");
            System.out.println("     * @param status ICU error code.");
            if (isDraft(javaName)) {
                System.out.println("     * @draft ICU " + getVersion(javaName, thisVersion));
            } else {
                System.out.println("     * @stable ICU " + getVersion(javaName, thisVersion));
            }
            System.out.println("     */");
            System.out.printf("    static MeasureUnit *create%s(UErrorCode &status);\n\n", name);
            if (isDraft(javaName)) {
                System.out.println("#endif /* U_HIDE_DRAFT_API */");
            }
        }
    }
}
 
源代码8 项目: twitter-kit-android   文件: OAuth1aParameters.java
private String getEncodedQueryParams(TreeMap<String, String> params) {
    final StringBuilder paramsBuf = new StringBuilder();
    final int numParams = params.size();
    int current = 0;
    for (Map.Entry<String, String> entry : params.entrySet()) {
        paramsBuf.append(UrlUtils.percentEncode(UrlUtils.percentEncode(entry.getKey())))
                .append("%3D")
                .append(UrlUtils.percentEncode(UrlUtils.percentEncode(entry.getValue())));
        current += 1;
        if (current < numParams) {
            paramsBuf.append("%26");
        }
    }
    return paramsBuf.toString();
}
 
源代码9 项目: Skript   文件: VariablesMap.java
@SuppressWarnings("unchecked")
void deleteFromHashMap(final String parent, final TreeMap<String, Object> current) {
	for (final Entry<String, Object> e : current.entrySet()) {
		if (e.getKey() == null)
			continue;
		hashMap.remove(parent + Variable.SEPARATOR + e.getKey());
		final Object val = e.getValue();
		if (val instanceof TreeMap) {
			deleteFromHashMap(parent + Variable.SEPARATOR + e.getKey(), (TreeMap<String, Object>) val);
		}
	}
}
 
源代码10 项目: hbase   文件: TestSimpleLoadBalancer.java
/**
 * Test the load balancing algorithm.
 *
 * Invariant is that all servers should be hosting either floor(average) or
 * ceiling(average) at both table level and cluster level
 */
@Test
public void testBalanceClusterOverall() throws Exception {
  Map<TableName, Map<ServerName, List<RegionInfo>>> clusterLoad = new TreeMap<>();
  for (int[] mockCluster : clusterStateMocks) {
    Map<ServerName, List<RegionInfo>> clusterServers = mockClusterServers(mockCluster, 30);
    List<ServerAndLoad> clusterList = convertToList(clusterServers);
    clusterLoad.put(TableName.valueOf(name.getMethodName()), clusterServers);
    HashMap<TableName, TreeMap<ServerName, List<RegionInfo>>> result =
        mockClusterServersWithTables(clusterServers);
    loadBalancer.setClusterLoad(clusterLoad);
    List<RegionPlan> clusterplans = new ArrayList<>();
    List<Pair<TableName, Integer>> regionAmountList = new ArrayList<>();
    for (Map.Entry<TableName, TreeMap<ServerName, List<RegionInfo>>> mapEntry : result
        .entrySet()) {
      TableName tableName = mapEntry.getKey();
      TreeMap<ServerName, List<RegionInfo>> servers = mapEntry.getValue();
      List<ServerAndLoad> list = convertToList(servers);
      LOG.info("Mock Cluster : " + printMock(list) + " " + printStats(list));
      List<RegionPlan> partialplans = loadBalancer.balanceTable(tableName, servers);
      if(partialplans != null) clusterplans.addAll(partialplans);
      List<ServerAndLoad> balancedClusterPerTable = reconcile(list, partialplans, servers);
      LOG.info("Mock Balance : " + printMock(balancedClusterPerTable));
      assertClusterAsBalanced(balancedClusterPerTable);
      for (Map.Entry<ServerName, List<RegionInfo>> entry : servers.entrySet()) {
        returnRegions(entry.getValue());
        returnServer(entry.getKey());
      }
    }
    List<ServerAndLoad> balancedCluster = reconcile(clusterList, clusterplans, clusterServers);
    assertTrue(assertClusterOverallAsBalanced(balancedCluster, result.keySet().size()));
  }
}
 
源代码11 项目: raft-java   文件: Snapshot.java
public void closeSnapshotDataFiles(TreeMap<String, SnapshotDataFile> snapshotDataFileMap) {
    for (Map.Entry<String, SnapshotDataFile> entry : snapshotDataFileMap.entrySet()) {
        try {
            entry.getValue().randomAccessFile.close();
        } catch (IOException ex) {
            LOG.warn("close snapshot files exception:", ex);
        }
    }
}
 
源代码12 项目: Concurnas   文件: Cloner.java
@SuppressWarnings({ "unchecked", "rawtypes" })
public Object clone(final Object t, final Cloner cloner, final CopyTracker tracker) throws IllegalAccessException
{
	final TreeMap<Object, Object> m = (TreeMap) t;
	final TreeMap result = new TreeMap(m.comparator());
	for (final Map.Entry e : m.entrySet())
	{
		final Object key = cloner.clone(tracker, e.getKey(), null);
		final Object value = cloner.clone(tracker, e.getValue(), null);
		result.put(key, value);
	}
	return result;
}
 
/**
 * Find best match in a TreeMap with bag-of-words approach.
 * @param search - string to search
 * @param map - TreeMap containing all sentences to match
 * @return - value(index 0), certainty(index 1) and key (index 2) of best match key
 */
public static ArrayList<Object> find_best_match_bw(String search, TreeMap<String, String> map){
	ArrayList<Object> result = new ArrayList<Object>();
	String cmd = null;
	double first_match_score = 0.0;
	double best_match_score = 0.0;
	String best_match_key = "";
	String best_match_value = "";
	//run through all sentences
	for(Entry<String, String> entry : map.entrySet()) {
		String key = entry.getKey();
		double first_score = StringCompare.wordInclusion(search, key);
		if (first_score >= first_match_score){
			double this_score = first_score * StringCompare.wordInclusion(key, search);
			if (this_score > best_match_score){
				first_match_score = first_score;
				best_match_score = this_score;
				best_match_key = key;
				best_match_value = entry.getValue();
			}
			//same words but same sentence?
			else if (this_score == 1.0){
				if (key.matches(Pattern.quote(search))){
					best_match_score = 1.0;
					best_match_key = key;
					best_match_value = entry.getValue();
					break;
				}
			}
		}
	}
	//System.out.println(best_match_key + " => " + best_match_value + "; score: " + best_match_score);
	//decide later if you want to keep this result by checking the certainty
	cmd = best_match_value;
	result.add(cmd);
	result.add(best_match_score);
	result.add(best_match_key);

	return result;
}
 
源代码14 项目: doctorkafka   文件: KafkaCluster.java
@Override
public String toString() {
  StringBuilder strBuilder = new StringBuilder();
  TreeMap<Integer, KafkaBroker> treeMap = new TreeMap<>(brokers);
  for (Map.Entry<Integer, KafkaBroker> entry : treeMap.entrySet()) {
    strBuilder.append("   " + entry.getKey() + " : ");
    strBuilder.append(entry.getValue() + "\n");
  }
  return strBuilder.toString();
}
 
static void storeResult(final String name, final ModelNode store) {

        if (PlatformMBeanConstants.OBJECT_NAME.getName().equals(name)) {
            store.set(ManagementFactory.RUNTIME_MXBEAN_NAME);
        } else if (ModelDescriptionConstants.NAME.equals(name)) {
           String runtimeName;
           try {
              runtimeName = ManagementFactory.getRuntimeMXBean().getName();
           } catch (ArrayIndexOutOfBoundsException e) {
              // Workaround for OSX issue
              String localAddr;
              try {
                 localAddr = InetAddress.getByName(null).toString();
              } catch (UnknownHostException uhe) {
                 localAddr = "localhost";
              }
              runtimeName = new Random().nextInt() + "@" + localAddr;
           }
           store.set(runtimeName);
        } else if (PlatformMBeanConstants.VM_NAME.equals(name)) {
            store.set(ManagementFactory.getRuntimeMXBean().getVmName());
        } else if (PlatformMBeanConstants.VM_VENDOR.equals(name)) {
            store.set(ManagementFactory.getRuntimeMXBean().getVmVendor());
        } else if (PlatformMBeanConstants.VM_VERSION.equals(name)) {
            store.set(ManagementFactory.getRuntimeMXBean().getVmVersion());
        } else if (PlatformMBeanConstants.SPEC_NAME.equals(name)) {
            store.set(ManagementFactory.getRuntimeMXBean().getSpecName());
        } else if (PlatformMBeanConstants.SPEC_VENDOR.equals(name)) {
            store.set(ManagementFactory.getRuntimeMXBean().getSpecVendor());
        } else if (PlatformMBeanConstants.SPEC_VERSION.equals(name)) {
            store.set(ManagementFactory.getRuntimeMXBean().getSpecVersion());
        } else if (PlatformMBeanConstants.MANAGEMENT_SPEC_VERSION.equals(name)) {
            store.set(ManagementFactory.getRuntimeMXBean().getManagementSpecVersion());
        } else if (PlatformMBeanConstants.CLASS_PATH.equals(name)) {
            store.set(ManagementFactory.getRuntimeMXBean().getClassPath());
        } else if (PlatformMBeanConstants.LIBRARY_PATH.equals(name)) {
            store.set(ManagementFactory.getRuntimeMXBean().getLibraryPath());
        } else if (PlatformMBeanConstants.BOOT_CLASS_PATH_SUPPORTED.equals(name)) {
            store.set(ManagementFactory.getRuntimeMXBean().isBootClassPathSupported());
        } else if (PlatformMBeanConstants.BOOT_CLASS_PATH.equals(name)) {
            store.set(ManagementFactory.getRuntimeMXBean().getBootClassPath());
        } else if (PlatformMBeanConstants.INPUT_ARGUMENTS.equals(name)) {
            store.setEmptyList();
            for (String arg : ManagementFactory.getRuntimeMXBean().getInputArguments()) {
                store.add(arg);
            }
        } else if (PlatformMBeanConstants.UPTIME.equals(name)) {
            store.set(ManagementFactory.getRuntimeMXBean().getUptime());
        } else if (PlatformMBeanConstants.START_TIME.equals(name)) {
            store.set(ManagementFactory.getRuntimeMXBean().getStartTime());
        } else if (PlatformMBeanConstants.SYSTEM_PROPERTIES.equals(name)) {
            store.setEmptyObject();
            final TreeMap<String, String> sorted = new TreeMap<>(ManagementFactory.getRuntimeMXBean().getSystemProperties());
            for (Map.Entry<String, String> prop : sorted.entrySet()) {
                final ModelNode propNode = store.get(prop.getKey());
                if (prop.getValue() != null) {
                    propNode.set(prop.getValue());
                }
            }
        } else if (RuntimeResourceDefinition.RUNTIME_READ_ATTRIBUTES.contains(name)
                || RuntimeResourceDefinition.RUNTIME_METRICS.contains(name)) {
            // Bug
            throw PlatformMBeanLogger.ROOT_LOGGER.badReadAttributeImpl(name);
        }

    }
 
@Override
public void migrate(EtcdTransaction tx)
{
    TreeMap<String, String> allRscLayer = tx.get("LINSTOR/LAYER_RESOURCE_IDS", true);

    // key: <nodeName, rscName>, value: <etcdKey, rscFlag>
    HashMap<Pair<String, String>, Pair<String, Long>> rscMap = new HashMap<>();
    {
        TreeMap<String, String> allRsc = tx.get("LINSTOR/RESOURCES", true);
        for (Entry<String, String> rsc : allRsc.entrySet())
        {
            String key = rsc.getKey();
            if (key.endsWith(RSC_FLAGS))
            {
                String nodeName;
                String rscName;
                {
                    String composedKey = extractPrimaryKey(key);
                    String[] split = EtcdUtils.splitPks(composedKey, false);
                    nodeName = split[0];
                    rscName = split[1];
                }
                long flags = Long.parseLong(rsc.getValue());

                rscMap.put(new Pair<>(nodeName, rscName), new Pair<>(key, flags));
            }
        }
    }

    HashMap<Long, LayerRscHolder> rscDataMap = new HashMap<>();
    for (Entry<String, String> rscLayer : allRscLayer.entrySet())
    {
        String etcdKey = rscLayer.getKey();
        long layerRscId = Long.parseLong(extractPrimaryKey(etcdKey));

        LayerRscHolder layerRscHolder = rscDataMap.get(layerRscId);
        {
            if (layerRscHolder == null) {
                layerRscHolder = new LayerRscHolder();
                rscDataMap.put(layerRscId, layerRscHolder);
            }
        }
        String columnName = getColumnName(etcdKey);
        switch (columnName)
        {
            case NODE_NAME:
                layerRscHolder.nodeName = rscLayer.getValue();
                break;
            case RSC_NAME:
                layerRscHolder.rscName = rscLayer.getValue();
                break;
            case LAYER_RESOURCE_KIND:
                layerRscHolder.kind = rscLayer.getValue();
                break;
        }
        if (layerRscHolder.isComplete())
        {
            Pair<String, Long> rscKeyAndFlag = rscMap.get(
                new Pair<>(
                    layerRscHolder.nodeName,
                    layerRscHolder.rscName
                )
            );
            long flag = rscKeyAndFlag.b;
            if ((flag & FLAG_DISKLESS) == FLAG_DISKLESS)
            {
                boolean update = false;
                if (layerRscHolder.kind.equals(KIND_DRBD))
                {
                    flag |= FLAG_DRBD_DISKLESS;
                    update = true;
                }
                else
                if (layerRscHolder.kind.equals(KIND_NVME))
                {
                    flag |= FLAG_NVME_INITIATOR;
                    update = true;
                }
                if (update)
                {
                    tx.put(rscKeyAndFlag.a, Long.toString(flag));
                }
            }
        }
    }
}
 
@Override
public void loadTestCaseExecutionDataFromDependencies(final TestCaseExecution testCaseExecution) throws CerberusException {

    // We get the full list of ExecutionData from dependencies.
    List<TestCaseExecutionData> testCaseExecutionData = testCaseExecutionDataDao.readTestCaseExecutionDataFromDependencies(testCaseExecution);

    // We then dedup it per property name.
    TreeMap<String, TestCaseExecutionData> newExeDataMap = new TreeMap<>();
    for (TestCaseExecutionData data : testCaseExecutionData) {
        data.setPropertyResultMessage(new MessageEvent(MessageEventEnum.PROPERTY_SUCCESS_RETRIEVE_BY_DEPENDENCY).resolveDescription("EXEID", String.valueOf(data.getId())));
        data.setId(testCaseExecution.getId());
        if (!StringUtil.isNullOrEmpty(data.getJsonResult())) {
            try {
                JSONArray array = new JSONArray(data.getJsonResult());
                List<HashMap<String, String>> libRawData = new ArrayList<>();
                for (int i = 0; i < array.length(); i++) {
                    HashMap<String, String> hashJson = new HashMap<>();
                    JSONObject obj = array.getJSONObject(i);
                    Iterator<?> nameItr = obj.keys();
                    while (nameItr.hasNext()) {
                        String name = (String) nameItr.next();
                        hashJson.put(name, obj.getString(name));
                    }
                    libRawData.add(hashJson);
                }
                data.setDataLibRawData(libRawData);
            } catch (JSONException ex) {
                LOG.warn("Exception when converting JSON Object '" + data.getJsonResult() + "' from database", ex);
                data.setDataLibRawData(null);
            }
        }

        newExeDataMap.put(data.getProperty(), data);
    }

    // And finally set the dedup result to execution object and also record all results to database.
    testCaseExecution.setTestCaseExecutionDataMap(newExeDataMap);
    for (Map.Entry<String, TestCaseExecutionData> entry : newExeDataMap.entrySet()) {
        String key = entry.getKey();
        TestCaseExecutionData value = entry.getValue();
        testCaseExecutionDataDao.create(value);
    }

}
 
源代码18 项目: jelectrum   文件: BlockChainCache.java
public void update(Jelectrum jelly, StoredBlock new_head)
    throws org.bitcoinj.store.BlockStoreException
{
  last_head = new_head.getHeader().getHash();
  //event_log.log("BlockChainCache: chain update, new head: " + new_head.getHeader().getHash() + " - " + new_head.getHeight());

  Sha256Hash genesis_hash = jelly.getNetworkParameters().getGenesisBlock().getHash();

  StoredBlock cur = new_head;

  TreeMap<Integer, Sha256Hash> to_write = new TreeMap<>();

  int reorg=0;

  while(true)
  {
    int height = cur.getHeight();
    Sha256Hash curr_hash = cur.getHeader().getHash();

    Sha256Hash exist_hash = getBlockHashAtHeight(height);
    if ((exist_hash != null) && (!exist_hash.equals(curr_hash)))
    {
      reorg++;
    }

    if (curr_hash.equals(exist_hash)) break;

    to_write.put(height, curr_hash);
    if (curr_hash.equals(genesis_hash)) break;

    cur = cur.getPrev(store);

  }
  if (to_write.size() > 1)
  {
    event_log.log("BlockChainCache: adding " + to_write.size() + " to height map");
  }

  /**
   * Write them out in order to make sure this is recoverable if interupted in the middle
   */
  for(Map.Entry<Integer, Sha256Hash> me : to_write.entrySet())
  {
    height_map.put(me.getKey(), me.getValue());
  }
  if (reorg > 0)
  {
    event_log.alarm("BlockChainCache: re-org of " + reorg + " blocks found");
  }

}
 
源代码19 项目: lams   文件: AssessmentServiceImpl.java
private LinkedHashMap<String, Integer> getMarksSummaryForSession(List<AssessmentUserDTO> userDtos, float minGrade,
    float maxGrade, Integer numBuckets) {

LinkedHashMap<String, Integer> summary = new LinkedHashMap<>();
TreeMap<Integer, Integer> inProgress = new TreeMap<>();

if (numBuckets == null) {
    numBuckets = 10;
}

int bucketSize = 1;
int intMinGrade = (int) Math.floor(minGrade);
float gradeDifference = maxGrade - minGrade;
if (gradeDifference <= 10) {
    for (int i = intMinGrade; i <= (int) Math.ceil(maxGrade); i++) {
	inProgress.put(i, 0);
    }
} else {
    int intGradeDifference = (int) Math.ceil(gradeDifference);
    bucketSize = (int) Math.ceil(intGradeDifference / numBuckets);
    for (int i = intMinGrade; i <= maxGrade; i = i + bucketSize) {
	inProgress.put(i, 0);
    }
}

for (AssessmentUserDTO userDto : userDtos) {
    float grade = userDto.getGrade();
    int bucketStart = intMinGrade;
    int bucketStop = bucketStart + bucketSize;
    boolean looking = true;
    while (bucketStart <= maxGrade && looking) {
	if (grade >= bucketStart && grade < bucketStop) {
	    inProgress.put(bucketStart, inProgress.get(bucketStart) + 1);
	    looking = false;
	} else {
	    bucketStart = bucketStop;
	    bucketStop = bucketStart + bucketSize;
	}
    }
}

for (Map.Entry<Integer, Integer> entry : inProgress.entrySet()) {
    String key;
    if (bucketSize == 1) {
	key = entry.getKey().toString();
    } else {
	if (maxGrade >= entry.getKey() && maxGrade <= entry.getKey() + bucketSize - 1) {
	    if ((int) maxGrade == entry.getKey()) {
		key = NumberUtil.formatLocalisedNumber(maxGrade, (Locale) null, 2);
	    } else {
		key = new StringBuilder().append(entry.getKey()).append(" - ")
			.append(NumberUtil.formatLocalisedNumber(maxGrade, (Locale) null, 2)).toString();
	    }
	} else {
	    key = new StringBuilder().append(entry.getKey()).append(" - ")
		    .append(entry.getKey() + bucketSize - .01).toString();
	}
    }
    summary.put(key, entry.getValue());
}

return summary;
   }
 
源代码20 项目: medialibrary   文件: TagClustering.java
@Override
public void run(MediaSet baseSet) {
    final TreeMap<String, ArrayList<Path>> map =
            new TreeMap<String, ArrayList<Path>>();
    final ArrayList<Path> untagged = new ArrayList<Path>();

    baseSet.enumerateTotalMediaItems(new MediaSet.ItemConsumer() {
        @Override
        public void consume(int index, MediaItem item) {
            Path path = item.getPath();

            String[] tags = item.getTags();
            if (tags == null || tags.length == 0) {
                untagged.add(path);
                return;
            }
            for (int j = 0; j < tags.length; j++) {
                String key = tags[j];
                ArrayList<Path> list = map.get(key);
                if (list == null) {
                    list = new ArrayList<Path>();
                    map.put(key, list);
                }
                list.add(path);
            }
        }
    });

    int m = map.size();
    mClusters = new ArrayList<ArrayList<Path>>();
    mNames = new String[m + ((untagged.size() > 0) ? 1 : 0)];
    int i = 0;
    for (Map.Entry<String, ArrayList<Path>> entry : map.entrySet()) {
        mNames[i++] = entry.getKey();
        mClusters.add(entry.getValue());
    }
    if (untagged.size() > 0) {
        mNames[i++] = mUntaggedString;
        mClusters.add(untagged);
    }
}