下面列出了java.util.LinkedHashMap#size ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
/**
* Writes {@link LinkedHashMap}.
*
* @param map Map.
* @param loadFactorFieldOff Load factor field offset.
* @param accessOrderFieldOff access order field offset.
* @param set Whether writing underlying map from {@link LinkedHashSet}.
* @throws IOException In case of error.
*/
@SuppressWarnings("TypeMayBeWeakened")
void writeLinkedHashMap(LinkedHashMap<?, ?> map, long loadFactorFieldOff, long accessOrderFieldOff, boolean set)
throws IOException {
int size = map.size();
writeInt(size);
writeFloat(getFloat(map, loadFactorFieldOff));
if (accessOrderFieldOff >= 0)
writeBoolean(getBoolean(map, accessOrderFieldOff));
else
writeBoolean(false);
for (Map.Entry<?, ?> e : map.entrySet()) {
writeObject0(e.getKey());
if (!set)
writeObject0(e.getValue());
}
}
public ArrayList<LinearConstraint> getFullPolytope(RCTuple conf){
//Includes both atomic contact and voxel boundary constraints
//DEBUG!!! I think full polytope already has voxel constr??
LinkedHashMap<DegreeOfFreedom,double[]> DOFBounds = calcDOFBounds(conf);
ArrayList<DegreeOfFreedom> DOFs = new ArrayList<>(DOFBounds.keySet());
//Much like VoxelVDWListChecker.voxelPolygon
ArrayList<LinearConstraint> polytope = getFullStericPolytope(conf, DOFs);
if(polytope==null)
return null;
int dof=0;
for(DegreeOfFreedom curDOF : DOFs){
double unitVec[] = new double[DOFBounds.size()];
unitVec[dof] = 1;
double[] bounds = DOFBounds.get(curDOF);
polytope.add(new LinearConstraint(unitVec,Relationship.GEQ,bounds[0]));
polytope.add(new LinearConstraint(unitVec,Relationship.LEQ,bounds[1]));
dof++;
}
return polytope;
}
public void putProfile(String name, LinkedHashMap < String, String > commands) {
try {
JSONObject items = new JSONObject();
items.put("name", name);
JSONArray commandArray = new JSONArray();
for (int i = 0; i < commands.size(); i++) {
JSONObject item = new JSONObject();
item.put("path", commands.keySet().toArray()[i]);
item.put("command", commands.values().toArray()[i]);
commandArray.put(item);
}
items.put("commands", commandArray);
items.put("id", UUID.randomUUID());
putItem(items);
} catch (JSONException e) {
e.printStackTrace();
}
}
/**
* 把 名=值 参数表转换成字符串 (a=1,b=2 =>a=1&b=2)
*
* @param map
* @return
*/
public static String linkedHashMapToString(LinkedHashMap<String, String> map) {
if (map != null && map.size() > 0) {
String result = "";
Iterator it = map.keySet().iterator();
while (it.hasNext()) {
String name = (String) it.next();
String value = (String) map.get(name);
result += (result.equals("")) ? "" : "&";
result += String.format("%s=%s", name, value);
}
return result;
}
return null;
}
private static NodeIdentifierWithPredicates reorderPredicates(final List<QName> keys,
final NodeIdentifierWithPredicates arg) {
if (Iterables.elementsEqual(keys, arg.keySet())) {
// Iteration order matches key order, reuse the identifier
return arg;
}
// We care about iteration order here!
final LinkedHashMap<QName, Object> map = Maps.newLinkedHashMapWithExpectedSize(arg.size());
for (QName qname : keys) {
final Object value = arg.getValue(qname);
if (value != null) {
map.put(qname, value);
}
}
if (map.size() < arg.size()) {
// Okay, this should not happen, but let's handle that anyway
LOG.debug("Extra predicates in {} while expecting {}", arg, keys);
for (Entry<QName, Object> entry : arg.entrySet()) {
map.putIfAbsent(entry.getKey(), entry.getValue());
}
}
// This copy retains iteration order and since we have more than one argument, it should always be
// and ImmutableOffsetMap -- which is guaranteed to be taken as-is
final Map<QName, Object> copy = ImmutableOffsetMap.orderedCopyOf(map);
verify(copy instanceof ImmutableOffsetMap);
return NodeIdentifierWithPredicates.of(arg.getNodeType(), (ImmutableOffsetMap<QName, Object>) copy);
}
private static LinkedHashMap<String, Field> deepCopy(LinkedHashMap<String, Field> listMap) {
LinkedHashMap<String, Field> copy = new LinkedHashMap<>(listMap.size());
for (Map.Entry<String, Field> entry: listMap.entrySet()) {
Field field = entry.getValue();
Utils.checkNotNull(field, Utils.formatL("ListMap has null element at '{}' pos", entry.getKey()));
copy.put(entry.getKey(), field.clone());
}
return copy;
}
/**
* Creates a {@link PojoSerializerSnapshotData} from configuration of a {@link PojoSerializer}.
*
* <p>This factory method is meant to be used in regular write paths, i.e. when taking a snapshot
* of the {@link PojoSerializer}. All registered subclass classes, and non-registered
* subclass classes are all present. Some POJO fields may be absent, if the originating
* {@link PojoSerializer} was a restored one with already missing fields, and was never replaced
* by a new {@link PojoSerializer} (i.e. because the serialized old data was never accessed).
*/
static <T> PojoSerializerSnapshotData<T> createFrom(
Class<T> pojoClass,
Field[] fields,
TypeSerializer<?>[] fieldSerializers,
LinkedHashMap<Class<?>, TypeSerializer<?>> registeredSubclassSerializers,
Map<Class<?>, TypeSerializer<?>> nonRegisteredSubclassSerializers) {
final LinkedOptionalMap<Field, TypeSerializerSnapshot<?>> fieldSerializerSnapshots = new LinkedOptionalMap<>(fields.length);
for (int i = 0; i < fields.length; i++) {
Field field = fields[i];
String fieldName = (field == null) ? getDummyNameForMissingField(i) : field.getName();
fieldSerializerSnapshots.put(fieldName, field, TypeSerializerUtils.snapshotBackwardsCompatible(fieldSerializers[i]));
}
LinkedHashMap<Class<?>, TypeSerializerSnapshot<?>> registeredSubclassSerializerSnapshots = new LinkedHashMap<>(registeredSubclassSerializers.size());
registeredSubclassSerializers.forEach((k, v) -> registeredSubclassSerializerSnapshots.put(k, TypeSerializerUtils.snapshotBackwardsCompatible(v)));
Map<Class<?>, TypeSerializerSnapshot<?>> nonRegisteredSubclassSerializerSnapshots = new HashMap<>(nonRegisteredSubclassSerializers.size());
nonRegisteredSubclassSerializers.forEach((k, v) -> nonRegisteredSubclassSerializerSnapshots.put(k, TypeSerializerUtils.snapshotBackwardsCompatible(v)));
return new PojoSerializerSnapshotData<>(
pojoClass,
fieldSerializerSnapshots,
optionalMapOf(registeredSubclassSerializerSnapshots, Class::getName),
optionalMapOf(nonRegisteredSubclassSerializerSnapshots, Class::getName));
}
static void capCache(LinkedHashMap<String, ?> map) {
Iterator<String> it = map.keySet().iterator();
while (map.size() > MAX_CACHE_SIZE && it.hasNext()) {
it.next();
it.remove();
}
}
public void addSummary( JobConfiguration jobConfiguration, Object summary )
{
LinkedHashMap<String, Object> summaries = summariesWithType.get( jobConfiguration.getJobType() );
if ( summaries.size() >= MAX_POOL_TYPE_SIZE )
{
String key = (String) summaries.keySet().toArray()[0];
summaries.remove( key );
}
summaries.put( jobConfiguration.getUid(), summary );
}
/**
* for attribute valueLinkedMapStringBean serialization
*/
public static byte[] serializeValueLinkedMapStringBean(LinkedHashMap<String, Bean64> value) {
if (value==null) {
return null;
}
KriptonJsonContext context=KriptonBinder.jsonBind();
try (KriptonByteArrayOutputStream stream=new KriptonByteArrayOutputStream(); JacksonWrapperSerializer wrapper=context.createSerializer(stream)) {
JsonGenerator jacksonSerializer=wrapper.jacksonGenerator;
jacksonSerializer.writeStartObject();
int fieldCount=0;
if (value!=null) {
fieldCount++;
// write wrapper tag
if (value.size()>0) {
jacksonSerializer.writeFieldName("element");
jacksonSerializer.writeStartArray();
for (Map.Entry<String, Bean64> item: value.entrySet()) {
jacksonSerializer.writeStartObject();
jacksonSerializer.writeStringField("key", item.getKey());
if (item.getValue()==null) {
jacksonSerializer.writeNullField("value");
} else {
jacksonSerializer.writeFieldName("value");
bean64BindMap.serializeOnJackson(item.getValue(), jacksonSerializer);
}
jacksonSerializer.writeEndObject();
}
jacksonSerializer.writeEndArray();
} else {
jacksonSerializer.writeNullField("element");
}
}
jacksonSerializer.writeEndObject();
jacksonSerializer.flush();
return stream.toByteArray();
} catch(Exception e) {
e.printStackTrace();
throw(new KriptonRuntimeException(e.getMessage()));
}
}
public int flatNestedLinkedHashMap() {
String notation = ".";
ArrayList<String> list = Lists.newArrayList();
list.add("AVX");
Map<String, Object> map = Maps.newHashMap();
map.put("name", "ABC");
Map<String, Object> nestedParentMap = Maps.newHashMap();
nestedParentMap.put("childString", "name");
nestedParentMap.put("childList", list);
nestedParentMap.put("childMap", map);
LinkedHashMap<String, Object> value = CommonUtils.flatNestedLinkedHashMap(notation, nestedParentMap);
CommonUtils.flatNestedMap(notation, nestedParentMap);
return value.size();
}
/**
* for attribute valueLinkedMapStringBean serialization
*/
protected String serializeValueLinkedMapStringBean(LinkedHashMap<String, Bean64> value) {
if (value==null) {
return null;
}
KriptonJsonContext context=KriptonBinder.jsonBind();
try (KriptonByteArrayOutputStream stream=new KriptonByteArrayOutputStream(); JacksonWrapperSerializer wrapper=context.createSerializer(stream)) {
JsonGenerator jacksonSerializer=wrapper.jacksonGenerator;
jacksonSerializer.writeStartObject();
int fieldCount=0;
if (value!=null) {
fieldCount++;
// write wrapper tag
if (value.size()>0) {
jacksonSerializer.writeFieldName("valueLinkedMapStringBean");
jacksonSerializer.writeStartArray();
for (Map.Entry<String, Bean64> item: value.entrySet()) {
jacksonSerializer.writeStartObject();
jacksonSerializer.writeStringField("key", item.getKey());
if (item.getValue()==null) {
jacksonSerializer.writeNullField("value");
} else {
jacksonSerializer.writeFieldName("value");
bean64BindMap.serializeOnJackson(item.getValue(), jacksonSerializer);
}
jacksonSerializer.writeEndObject();
}
jacksonSerializer.writeEndArray();
} else {
jacksonSerializer.writeNullField("valueLinkedMapStringBean");
}
}
jacksonSerializer.writeEndObject();
jacksonSerializer.flush();
return stream.toString();
} catch(Exception e) {
e.printStackTrace();
throw(new KriptonRuntimeException(e.getMessage()));
}
}
/**
* 存储Map集合
*
* @param key 键
* @param map 存储的集合
* @param <K> 指定Map的键
* @param <T> 指定Map的值
*/
public static <K, T> void setMap(String key, LinkedHashMap<K, T> map) {
if (map == null || map.isEmpty() || map.size() < 1) {
return;
}
Gson gson = new Gson();
String strJson = gson.toJson(map);
editor.putString(key, strJson);
editor.commit();
}
private void trim(int scale) {
LinkedHashMap<Point2i, Image> scaleEntry = pool.get(scale);
if (scaleEntry.size() <= tileMap.getVisibleTiles() * poolSize) {
return;
}
Iterator<Point2i> it = scaleEntry.keySet().iterator();
while (it.hasNext() && scaleEntry.size() > tileMap.getVisibleTiles() * poolSize) {
it.next();
it.remove();
}
}
@Override
public void onClick(DialogInterface dialog, int which) {
if (which == 0) {
BackupManager.makeBackup(getActivity());
} else if (which == 1) {
LinkedHashMap<File, String> filesMap = BackupManager.getAvailableBackups(getActivity());
if (filesMap != null && filesMap.size() > 0) {
new RestoreFragment(filesMap).show(getFragmentManager(), RestoreFragment.class.getName());
} else {
ToastUtils.show(getActivity(), R.string.message_no_backups);
}
}
}
/**
* 重定向页面
*
* @param locationClass 目标
* @param mReqParams 附带参数 key ->value haspMap
*/
public void locationPage(Class<?> locationClass, LinkedHashMap<String, Object> mReqParams, boolean _target) {
Intent intent = new Intent(this, locationClass);
if (mReqParams != null && mReqParams.size() > 0) {
Bundle bundle = new Bundle();
for (String key : mReqParams.keySet()) {
Object param = mReqParams.get(key);
if (param instanceof Integer) {
bundle.putInt(key, Integer.parseInt(param.toString()));
} else if (param instanceof String) {
bundle.putString(key, param.toString());
} else if (param instanceof Double) {
} else if (param instanceof Float) {
} else if (param instanceof Long) {
} else if (param instanceof Boolean) {
} else if (param instanceof Date) {
}
}
intent.putExtras(bundle);
}
if (_target) {
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
} else {
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
}
intent.addFlags(Intent.FLAG_ACTIVITY_NO_ANIMATION);
this.startActivity(intent);
}
OptimizerResult(LinkedHashMap<Goal, ClusterModelStats> statsByGoalPriority,
Set<String> violatedGoalNamesBeforeOptimization,
Set<String> violatedGoalNamesAfterOptimization,
Set<ExecutionProposal> proposals,
BrokerStats brokerStatsBeforeOptimization,
BrokerStats brokerStatsAfterOptimization,
ModelGeneration modelGeneration,
ClusterModelStats clusterModelStats,
Map<Integer, String> capacityEstimationInfoByBrokerId,
OptimizationOptions optimizationOptions,
Map<String, Double> balancednessCostByGoal) {
_clusterModelStatsComparatorByGoalName = new LinkedHashMap<>(statsByGoalPriority.size());
_statsByGoalName = new LinkedHashMap<>(statsByGoalPriority.size());
for (Map.Entry<Goal, ClusterModelStats> entry : statsByGoalPriority.entrySet()) {
String goalName = entry.getKey().name();
Goal.ClusterModelStatsComparator comparator = entry.getKey().clusterModelStatsComparator();
_clusterModelStatsComparatorByGoalName.put(goalName, comparator);
_statsByGoalName.put(goalName, entry.getValue());
}
_violatedGoalNamesBeforeOptimization = violatedGoalNamesBeforeOptimization;
_violatedGoalNamesAfterOptimization = violatedGoalNamesAfterOptimization;
_proposals = proposals;
_brokerStatsBeforeOptimization = brokerStatsBeforeOptimization;
_brokerStatsAfterOptimization = brokerStatsAfterOptimization;
_modelGeneration = modelGeneration;
_clusterModelStats = clusterModelStats;
_capacityEstimationInfoByBrokerId = capacityEstimationInfoByBrokerId;
_optimizationOptions = optimizationOptions;
// Populate on-demand balancedness score before and after.
_onDemandBalancednessScoreBefore = onDemandBalancednessScore(balancednessCostByGoal, _violatedGoalNamesBeforeOptimization);
_onDemandBalancednessScoreAfter = onDemandBalancednessScore(balancednessCostByGoal, _violatedGoalNamesAfterOptimization);
}
/**
* From R in residuals.coxph.S rowsum(rr, collapse)
*
* @param rr
* @param sets
* @return
*/
private static double[][] rowsum(double[][] rr, ArrayList<String> sets) throws Exception {
LinkedHashMap<String, Double> sumMap = new LinkedHashMap<String, Double>();
if (rr.length != sets.size()) {
throw new Exception("Cluster value for each sample are not of equal length n=" + rr.length + " cluster length=" + sets.size());
}
double[][] sum = null;
for (int j = 0; j < rr[0].length; j++) {
for (int i = 0; i < sets.size(); i++) {
String s = sets.get(i);
Double v = sumMap.get(s); //get in order
if (v == null) {
v = 0.0;
}
v = v + rr[i][j];
sumMap.put(s, v);
}
if (sum == null) {
sum = new double[sumMap.size()][rr[0].length];
}
ArrayList<String> index = new ArrayList<String>(sumMap.keySet());
//sorting does seem to make a difference in test cases at the .0000000001
// ArrayList<Integer> in = new ArrayList<Integer>();
// for (String s : index) {
// in.add(Integer.parseInt(s));
// }
// Collections.sort(index);
for (int m = 0; m < index.size(); m++) {
String key = index.get(m);
sum[m][j] = sumMap.get(key);
}
sumMap.clear();
}
return sum;
}
private static DoubleMatrixDataset<String, String> generateRandomNormalizedPheno(LinkedHashMap<String, Integer> sampleHash, final int numberRandomPhenotypes) {
LinkedHashMap<String, Integer> phenoHash = new LinkedHashMap<>();
for (int i = 0; i < numberRandomPhenotypes; ++i) {
phenoHash.put("RanPheno" + (i + 1), i);
}
DoubleMatrixDataset<String, String> phenoData = new DoubleMatrixDataset<>(sampleHash, phenoHash);
DoubleMatrix2D phenoMatrix = phenoData.getMatrix();
final int sampleCount = sampleHash.size();
IntStream.range(0, numberRandomPhenotypes).parallel().forEach(pi -> {
final ThreadLocalRandom rnd = ThreadLocalRandom.current();
for (int s = 0; s < sampleCount; ++s) {
phenoMatrix.setQuick(s, pi, rnd.nextGaussian());
}
});
phenoData.normalizeColumns();
return phenoData;
}
/**
* Return a copy of the source list.
*
* @param src
* The source list.
* @param variablesToKeep
* When non-<code>null</code>, only the bindings for the
* variables listed in this array will copied.
*
* @return The copy.
*/
private LinkedHashMap<IVariable, IConstant> copy(
final LinkedHashMap<IVariable, IConstant> src,
final IVariable[] variablesToKeep) {
final LinkedHashMap<IVariable, IConstant> dst = new LinkedHashMap<IVariable, IConstant>(
variablesToKeep != null ? variablesToKeep.length : src.size());
final Iterator<Map.Entry<IVariable, IConstant>> itr = src.entrySet()
.iterator();
while (itr.hasNext()) {
final Map.Entry<IVariable, IConstant> e = itr.next();
boolean keep = true;
if (variablesToKeep != null) {
keep = false;
for (IVariable<?> x : variablesToKeep) {
if (x == e.getKey()) {
keep = true;
break;
}
}
}
if (keep)
dst.put(e.getKey(), e.getValue());
}
return dst;
}