下面列出了java.util.Map#replaceAll ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
public static void main(String[] args) {
Map<Integer, Melon> mapOfMelon = new HashMap<>();
mapOfMelon.put(1, new Melon("Apollo", 3000));
mapOfMelon.put(2, new Melon("Jade Dew", 3500));
mapOfMelon.put(3, new Melon("Cantaloupe", 1500));
System.out.println("Replace by key\n--------------");
System.out.println("Initial map: " + mapOfMelon);
Melon melon1 = mapOfMelon.replace(2, new Melon("Gac", 1000));
System.out.println("Replaced melon from key 2: " + melon1);
System.out.println("Resulted map: " + mapOfMelon);
System.out.println("\nReplace by key and value\n--------------");
boolean melon2 = mapOfMelon.replace(1, new Melon("Apollo", 3000), new Melon("Bitter", 4300));
System.out.println("Replaced melon from key 1 and value Applo(3000g): " + melon2);
System.out.println("Resulted map: " + mapOfMelon);
System.out.println("\nReplace via BiFunction\n--------------");
BiFunction<Integer, Melon, Melon> function
= (k, v) -> v.getWeight() > 1000 ? new Melon(v.getType(), 1000) : v;
mapOfMelon.replaceAll(function);
System.out.println("Resulted map: " + mapOfMelon);
}
@Test(dataProvider = "Map<IntegerEnum,String> rw=true keys=all values=all")
public static void testReplaceAll(String description, Map<IntegerEnum, String> map) {
IntegerEnum[] EACH_KEY = new IntegerEnum[map.size()];
Set<String> EACH_REPLACE = new HashSet<>(map.size());
map.replaceAll((k,v) -> {
int idx = (null == k) ? 0 : k.ordinal(); // substitute for index.
assertNull(EACH_KEY[idx]);
EACH_KEY[idx] = (idx == 0) ? KEYS[0] : k; // substitute for comparison.
assertSame(v, map.get(k));
String replacement = v + " replaced";
EACH_REPLACE.add(replacement);
return replacement;
});
assertEquals(KEYS, EACH_KEY, description);
assertEquals(map.values().size(), EACH_REPLACE.size(), description + EACH_REPLACE);
assertTrue(EACH_REPLACE.containsAll(map.values()), description + " : " + EACH_REPLACE + " != " + map.values());
assertTrue(map.values().containsAll(EACH_REPLACE), description + " : " + EACH_REPLACE + " != " + map.values());
}
void resolveTypeReferences(Map<String, GraphQLNamedType> resolvedTypes) {
for (Map<String, MappedType> covariantTypes : this.covariantOutputTypes.values()) {
Set<String> toRemove = new HashSet<>();
for (Map.Entry<String, MappedType> entry : covariantTypes.entrySet()) {
if (entry.getValue().graphQLType instanceof GraphQLTypeReference) {
GraphQLOutputType resolvedType = (GraphQLNamedOutputType) resolvedTypes.get(entry.getKey());
if (resolvedType != null) {
entry.setValue(new MappedType(entry.getValue().javaType, resolvedType));
} else {
log.warn("Type reference " + entry.getKey() + " could not be replaced correctly. " +
"This can occur when the schema generator is initialized with " +
"additional types not built by GraphQL SPQR. If this type implements " +
"Node, in some edge cases it may end up not exposed via the 'node' query.");
//the edge case is when the primary resolver returns an interface or a union and not the node type directly
toRemove.add(entry.getKey());
}
}
}
toRemove.forEach(covariantTypes::remove);
covariantTypes.replaceAll((typeName, mapped) -> mapped.graphQLType instanceof GraphQLTypeReference
? new MappedType(mapped.javaType, (GraphQLOutputType) resolvedTypes.get(typeName)) : mapped);
}
}
@Test(dataProvider = "Map<IntegerEnum,String> rw=true keys=all values=all")
public static void testReplaceAll(String description, Map<IntegerEnum, String> map) {
IntegerEnum[] EACH_KEY = new IntegerEnum[map.size()];
Set<String> EACH_REPLACE = new HashSet<>(map.size());
map.replaceAll((k,v) -> {
int idx = (null == k) ? 0 : k.ordinal(); // substitute for index.
assertNull(EACH_KEY[idx]);
EACH_KEY[idx] = (idx == 0) ? KEYS[0] : k; // substitute for comparison.
assertSame(v, map.get(k));
String replacement = v + " replaced";
EACH_REPLACE.add(replacement);
return replacement;
});
assertEquals(KEYS, EACH_KEY, description);
assertEquals(map.values().size(), EACH_REPLACE.size(), description + EACH_REPLACE);
assertTrue(EACH_REPLACE.containsAll(map.values()), description + " : " + EACH_REPLACE + " != " + map.values());
assertTrue(map.values().containsAll(EACH_REPLACE), description + " : " + EACH_REPLACE + " != " + map.values());
}
@Test(dataProvider = "Map<IntegerEnum,String> rw=true keys=all values=all")
public static void testReplaceAll(String description, Map<IntegerEnum, String> map) {
IntegerEnum[] EACH_KEY = new IntegerEnum[map.size()];
Set<String> EACH_REPLACE = new HashSet<>(map.size());
map.replaceAll((k,v) -> {
int idx = (null == k) ? 0 : k.ordinal(); // substitute for index.
assertNull(EACH_KEY[idx]);
EACH_KEY[idx] = (idx == 0) ? KEYS[0] : k; // substitute for comparison.
assertSame(v, map.get(k));
String replacement = v + " replaced";
EACH_REPLACE.add(replacement);
return replacement;
});
assertEquals(KEYS, EACH_KEY, description);
assertEquals(map.values().size(), EACH_REPLACE.size(), description + EACH_REPLACE);
assertTrue(EACH_REPLACE.containsAll(map.values()), description + " : " + EACH_REPLACE + " != " + map.values());
assertTrue(map.values().containsAll(EACH_REPLACE), description + " : " + EACH_REPLACE + " != " + map.values());
}
/**
* Create an instance from the JSON bytes read from zookeeper. Generally this should
* only be done by a ZkStateReader.
*
* @param bytes The bytes read via a getData request to zookeeper (possibly null)
* @param zNodeVersion the version of the data in zookeeper that this instance corresponds to
* @return A new immutable Aliases object
*/
@SuppressWarnings({"unchecked", "rawtypes"})
public static Aliases fromJSON(byte[] bytes, int zNodeVersion) {
Map<String, Map> aliasMap;
if (bytes == null || bytes.length == 0) {
aliasMap = Collections.emptyMap();
} else {
aliasMap = (Map<String, Map>) Utils.fromJSON(bytes);
}
@SuppressWarnings({"rawtypes"})
Map colAliases = aliasMap.getOrDefault(COLLECTION, Collections.emptyMap());
colAliases = convertMapOfCommaDelimitedToMapOfList(colAliases); // also unmodifiable
Map<String, Map<String, String>> colMeta = aliasMap.getOrDefault(COLLECTION_METADATA, Collections.emptyMap());
colMeta.replaceAll((k, metaMap) -> Collections.unmodifiableMap(metaMap));
return new Aliases(colAliases, colMeta, zNodeVersion);
}
/**
* Given 2 maps of type <columnName, offsetValue> - one for input and one to compare input with,
* Compare and update the inputMap with either the minimum or maximum value for each columnType,
* based on comparisonType
*
* @param tableContext the {@link TableContext} instance to look up offset column data from
* @param inputMap input Offset Map to compare and modify
* @param compareMap map to compare with
* @param comparisonType Whether to update input Map with min or max of values between input Map and compared Map
* @return modified Input Offset Map
*/
public static void updateOffsetMapwithMinMax(
TableContext tableContext,
Map<String, String> inputMap,
Map<String,String> compareMap,
OffsetComparisonType comparisonType
) {
inputMap.replaceAll(
(columnName, inputValue) -> {
String comparedValue = compareMap.get(columnName);
int greaterOrNot = TableContextUtil.compareOffsetValues(tableContext,
columnName,
inputValue,
comparedValue);
if (comparisonType == OffsetComparisonType.MAXIMUM)
return (greaterOrNot <= 0) ? comparedValue : inputValue;
return (greaterOrNot <= 0) ? inputValue : comparedValue;
});
}
@Test(dataProvider = "Map<IntegerEnum,String> rw=true keys=all values=all")
public static void testReplaceAll(String description, Map<IntegerEnum, String> map) {
IntegerEnum[] EACH_KEY = new IntegerEnum[map.size()];
Set<String> EACH_REPLACE = new HashSet<>(map.size());
map.replaceAll((k,v) -> {
int idx = (null == k) ? 0 : k.ordinal(); // substitute for index.
assertNull(EACH_KEY[idx]);
EACH_KEY[idx] = (idx == 0) ? KEYS[0] : k; // substitute for comparison.
assertSame(v, map.get(k));
String replacement = v + " replaced";
EACH_REPLACE.add(replacement);
return replacement;
});
assertEquals(KEYS, EACH_KEY, description);
assertEquals(map.values().size(), EACH_REPLACE.size(), description + EACH_REPLACE);
assertTrue(EACH_REPLACE.containsAll(map.values()), description + " : " + EACH_REPLACE + " != " + map.values());
assertTrue(map.values().containsAll(EACH_REPLACE), description + " : " + EACH_REPLACE + " != " + map.values());
}
@Test(dataProvider = "Map<IntegerEnum,String> rw=true keys=all values=all")
public static void testReplaceAll(String description, Map<IntegerEnum, String> map) {
IntegerEnum[] EACH_KEY = new IntegerEnum[map.size()];
Set<String> EACH_REPLACE = new HashSet<>(map.size());
map.replaceAll((k,v) -> {
int idx = (null == k) ? 0 : k.ordinal(); // substitute for index.
assertNull(EACH_KEY[idx]);
EACH_KEY[idx] = (idx == 0) ? KEYS[0] : k; // substitute for comparison.
assertSame(v, map.get(k));
String replacement = v + " replaced";
EACH_REPLACE.add(replacement);
return replacement;
});
assertEquals(KEYS, EACH_KEY, description);
assertEquals(map.values().size(), EACH_REPLACE.size(), description + EACH_REPLACE);
assertTrue(EACH_REPLACE.containsAll(map.values()), description + " : " + EACH_REPLACE + " != " + map.values());
assertTrue(map.values().containsAll(EACH_REPLACE), description + " : " + EACH_REPLACE + " != " + map.values());
}
@Override
public Map<String, Set<ResultFeature>> load(Output output){
Map<String, Set<ResultFeature>> result = new LinkedHashMap<>();
List<org.dmg.pmml.OutputField> pmmlOutputFields = output.getOutputFields();
for(org.dmg.pmml.OutputField pmmlOutputField : pmmlOutputFields){
String segmentId = pmmlOutputField.getSegmentId();
if(segmentId == null){
continue;
}
Set<ResultFeature> resultFeatures = result.get(segmentId);
if(resultFeatures == null){
resultFeatures = EnumSet.noneOf(ResultFeature.class);
result.put(segmentId, resultFeatures);
}
resultFeatures.add(pmmlOutputField.getResultFeature());
}
result.replaceAll((key, value) -> Sets.immutableEnumSet(value));
return ImmutableMap.copyOf(result);
}
@CheckNoStats
@Test(dataProvider = "caches")
@CacheSpec(population = { Population.SINGLETON, Population.PARTIAL, Population.FULL },
removalListener = { Listener.DEFAULT, Listener.CONSUMING })
public void putAll_replace(Map<Integer, Integer> map, CacheContext context) {
Map<Integer, Integer> entries = new LinkedHashMap<>(context.original());
entries.replaceAll((key, value) -> key);
map.putAll(entries);
assertThat(map, is(equalTo(entries)));
assertThat(map, hasRemovalNotifications(context, entries.size(), RemovalCause.REPLACED));
verifyWriter(context, (verifier, writer) -> {
verifier.wroteAll(entries);
});
}
/**
* Getter of the merged variables map: scopeMap variables override inheritedMap variables and scriptMap variables override scopeMap variables
* Handles encrypted strings conversion
* @return the merged variables map
*/
private Map<String, Serializable> getMergedMap() {
Map<String, Serializable> variables = new HashMap<>(inheritedMap);
variables.putAll(scopeMap);
variables.putAll(scriptMap);
variables.replaceAll((key, value) -> {
if (value != null && value instanceof String && ((String) value).startsWith("ENC(")) {
String encryptedValue = ((String) value).substring(4, ((String) value).length() - 1);
return PropertyDecrypter.getDefaultEncryptor().decrypt(encryptedValue);
} else {
return value;
}
});
return variables;
}
private static ImmutableMap<String, String> scrubHeaders(ImmutableMap<String, String> headers) {
Map<String, String> scrubbed = new LinkedHashMap();
if (headers != null) {
scrubbed.putAll(headers);
scrubbed.replaceAll((key, value) -> HEADERS_TO_SCRUB.contains(key) ? "REDACTED" : value);
}
return ImmutableMap.copyOf(scrubbed);
}
@CheckNoWriter @CheckNoStats
@CacheSpec(population = { Population.SINGLETON, Population.PARTIAL, Population.FULL },
removalListener = { Listener.DEFAULT, Listener.REJECTING })
@Test(dataProvider = "caches", expectedExceptions = NullPointerException.class)
public void replaceAll_nullValue(Map<Integer, Integer> map, CacheContext context) {
map.replaceAll((key, value) -> null);
}
void runReplaceAllMap(Map<Integer,StringHolder> a, Map<Integer,StringHolder> b) {
a.replaceAll((Integer f, StringHolder t)->{t.concat("@JJ17r");return new StringHolder(t.getString() + "@417b");});
int x = 0;
while (x<b.size()) {
assertTrue("Expected \"" + b.get(x).getString() + "[email protected]@417b\" but found \"" + a.get(x).getString(),a.get(x).getString().equals(b.get(x).getString() + "[email protected]@417b"));
x++;
}
}
@CheckNoStats
@Test(dataProvider = "caches", expectedExceptions = WriteException.class)
@CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG,
population = { Population.SINGLETON, Population.PARTIAL, Population.FULL },
compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING)
public void replaceAll_writerFails(Map<Integer, Integer> map, CacheContext context) {
try {
map.replaceAll((key, value) -> context.absentValue());
} finally {
assertThat(map, equalTo(context.original()));
}
}
@CacheSpec
@CheckNoWriter @CheckNoStats
@Test(dataProvider = "caches")
public void replaceAll_sameValue(Map<Integer, Integer> map, CacheContext context) {
map.replaceAll((key, value) -> value);
assertThat(map, is(equalTo(context.original())));
if (context.isGuava() || context.isAsync()) {
assertThat(map, hasRemovalNotifications(context, map.size(), RemovalCause.REPLACED));
} else {
assertThat(context.consumedNotifications(), hasSize(0));
}
}
private void handleStackMerging(Stack<Variable> localStack, int jumpsrc, Instruction jumpI, int jumpdest) {
// destination already destacked, may need to map current stack
if (!canonicalStackForBranchJoinJumpdest.containsKey(jumpdest)) {
throw new IllegalStateException("target jumpdest processed, but no canonical stack defined");
}
Stack<Variable> canonicalStack = canonicalStackForBranchJoinJumpdest.get(jumpdest);
if (localStack.size() != canonicalStack.size()) {
log.println("Branch merge: stack size mismatch: canonical @" + HexPrinter.toHex(jumpdest) +
" with size " + canonicalStack.size() + " vs local @" + HexPrinter.toHex(jumpsrc) + " with size " + localStack.size());
sawMergeWithDiffStackSize = true;
}
Multimap<Variable, Variable> mapToCanonical = HashMultimap.create();
int mergeSize = Math.min(localStack.size(), canonicalStack.size());
if (mergeSize == 0) {
log.println("Branch merge: skipped merger for empty stack");
return;
}
for (int i = 1; i <= mergeSize; ++i) {
mapToCanonical.put(localStack.get(localStack.size() - i), canonicalStack.get(canonicalStack.size() - i));
}
log.println("stack merging from @" + HexPrinter.toHex(jumpsrc) + " into @" + HexPrinter.toHex(jumpdest));
mapToCanonical.asMap().forEach((variable, canonicals) ->
canonicals.forEach(canonical -> log.println(" " + canonical + " <- " + variable)));
if (mapToCanonical.size() != mapToCanonical.values().stream().distinct().count()) {
throw new IllegalStateException("a canonical variable is assigned multiple times");
}
boolean jumpCondition = findJumpCondition(jumpI, jumpdest);
// create re-assignment instructions
if (variableReassignments.containsKey(new Pair<>(jumpI, jumpCondition))) {
throw new IllegalStateException("reassignment does already exist");
}
Map<Variable, Variable> reassignments = new LinkedHashMap<>();
mapToCanonical.asMap().forEach((variable, canonicals) ->
canonicals.stream().filter(canonical -> variable != canonical)
.forEach(canonical -> reassignments.put(canonical, variable)));
// create temporary variables if need have conflicting variable swaps
Map<Variable, Variable> temporaries = new LinkedHashMap<>();
Set<Variable> wasAssignedTo = new HashSet<>();
// search all variables that are reassigned before they get assigned
reassignments.forEach((canonical, local) -> {
if (wasAssignedTo.contains(local)) {
Variable tmpVar = new Variable();
temporaries.put(local, tmpVar);
//if (isVirtualCanonicalVar(canonical) && isVirtualCanonicalVar(local)) {
if (isVirtualCanonicalVar(local)) {
virtualCanonicalVars.add(tmpVar);
}
log.println("swap conflict for: " + canonical + " <- " + local + "; created temp variable: " + tmpVar);
}
wasAssignedTo.add(canonical);
});
if (temporaries.size() > 0) {
// replace locals with temporaries, if there is a temporary
reassignments.replaceAll((canonical, local) -> temporaries.getOrDefault(local, local));
// add assignemts to temporaries at the beginning
Map<Variable, Variable> reassignmentsWithTemps = new LinkedHashMap<>();
temporaries.forEach((local, canonical) -> reassignmentsWithTemps.put(canonical, local));
reassignmentsWithTemps.putAll(reassignments);
reassignments.clear();
reassignments.putAll(reassignmentsWithTemps);
}
variableReassignments.put(new Pair<>(jumpI, jumpCondition), reassignments);
}
@CheckNoWriter @CheckNoStats
@CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING })
@Test(dataProvider = "caches", expectedExceptions = NullPointerException.class)
public void replaceAll_null(Map<Integer, Integer> map, CacheContext context) {
map.replaceAll(null);
}
@Override
public Map<String, Object> data() {
Map<String, Object> copy = new HashMap<>(delegate.data());
copy.replaceAll( (key, obj) -> unwrapIfNeeded(obj));
return copy;
}