下面列出了java.util.HashMap#putIfAbsent ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
private <T extends BuildRuleArg> T invokePopulate2(
Class<T> constructorClass,
Map<String, ?> attributes,
ImmutableSet.Builder<BuildTarget> declaredDeps)
throws CoerceFailedException {
HashMap<String, Object> attributesWithName = new HashMap<>(attributes);
attributesWithName.putIfAbsent("name", "the name");
ImmutableSet.Builder<BuildTarget> configurationDeps = ImmutableSet.builder();
T result =
marshaller.populate(
createCellRoots(filesystem).getCellNameResolver(),
filesystem,
new ThrowingSelectorListResolver(),
new ThrowingTargetConfigurationTransformer(),
new ThrowingSelectableConfigurationContext(),
TARGET,
UnconfiguredTargetConfiguration.INSTANCE,
DependencyStack.root(),
builder(constructorClass),
declaredDeps,
configurationDeps,
attributesWithName);
assertEquals(ImmutableSet.of(), configurationDeps.build());
return result;
}
/**
* After adding the import/export feature the actionId will be a issue, in the following senario:
* a) UserA export project.zip from database b) UserB import, modify and export projectB.zip c)
* UserA import projectB.zip, now the system will be confused about the uuicd, it will override
* UserA's test cases With the new design of "project" feature by adding the following logic: 1)
* regenerate all the uuicd, 2) update the createdAt and createdBy field
*
* @param actionEntities Original action entity
* @param testCaseTreeEntity testCaseTreeEntity need update
* @param ignoreTestTreeId when deep copy the tree, the tree id is also the new id associated with
* project, don't need to do the generate the id.
*/
private void refurbishEntities(
List<ActionEntity> actionEntities,
TestCaseTreeEntity testCaseTreeEntity,
boolean ignoreTestTreeId) {
HashMap<String, String> uuidMapping = new HashMap<>();
refurbishActionEntities(actionEntities, uuidMapping);
if (!ignoreTestTreeId) {
uuidMapping.putIfAbsent(
testCaseTreeEntity.getUuid().toString(), UUID.randomUUID().toString());
testCaseTreeEntity.setUuid(uuidMapping.get(testCaseTreeEntity.getUuid().toString()));
}
testCaseTreeEntity.setCreatedBy(UicdConfig.getInstance().getCurrentUser());
testCaseTreeEntity.setCreatedAt(Instant.now());
String treeDetails = testCaseTreeEntity.getTreeDetails();
for (Map.Entry<String, String> entry : uuidMapping.entrySet()) {
treeDetails = treeDetails.replace(entry.getKey(), entry.getValue());
}
testCaseTreeEntity.setTreeDetails(treeDetails);
}
public static PerChrIntervalTree<NamedGenomicRange> loadGeneMappings(final String geneMappingFile, final int window) throws IOException, NumberFormatException, Exception {
CSVReader geneMapReader = new CSVReader(new InputStreamReader(new FileInputStream(geneMappingFile), ENCODING), '\t', '\0', 1);
String[] nextLine;
HashMap<String, ArrayList<NamedGenomicRange>> genes = new HashMap<>();
while ((nextLine = geneMapReader.readNext()) != null) {
String name = nextLine[0];
String chr = nextLine[1];
int start = Integer.valueOf(nextLine[2]);
int stop = Integer.valueOf(nextLine[3]);
ArrayList<NamedGenomicRange> chrGenes = genes.getOrDefault(chr, new ArrayList<>());
chrGenes.add(new NamedGenomicRange(name, chr, start - window < 0 ? 0 : start - window, stop + window));
genes.putIfAbsent(chr, chrGenes);
}
PerChrIntervalTree<NamedGenomicRange> geneMappings = new PerChrIntervalTree<>(NamedGenomicRange.class);
for (Map.Entry<String, ArrayList<NamedGenomicRange>> entry : genes.entrySet()) {
geneMappings.addChrElements(entry.getKey(), entry.getValue());
}
return geneMappings;
}
public boolean areSentencesSimilarTwo(String[] words1, String[] words2, String[][] pairs) {
if (words1.length != words2.length) return false;
UF uf = new UF(pairs.length * 2);
int n = 0;
HashMap<String, Integer> map = new HashMap<>();
for (String[] pair : pairs) {
map.putIfAbsent(pair[0], n++);
map.putIfAbsent(pair[1], n++);
if (!uf.connected(map.get(pair[0]), map.get(pair[1]))) uf.union(map.get(pair[0]), map.get(pair[1]));
}
for (int i = 0; i < words1.length; i++) {
if (words1[i].equals(words2[i])) continue;
Integer a = map.get(words1[i]), b = map.get(words2[i]);
if (a == null || b == null) return false;
if (!uf.connected(a, b)) return false;
}
return true;
}
public boolean areSentencesSimilar(String[] words1, String[] words2, String[][] pairs) {
if (words1.length != words2.length) return false;
HashMap<String, HashSet<String>> cache = new HashMap<>();
for (String[] pair : pairs) {
cache.putIfAbsent(pair[0], new HashSet<>());
cache.putIfAbsent(pair[1], new HashSet<>());
cache.get(pair[0]).add(pair[1]);
cache.get(pair[1]).add(pair[0]);
}
for (int i = 0; i < words1.length; i++) {
if (!words1[i].equals(words2[i]) && (cache.get(words1[i]) == null || !cache.get(words1[i]).contains(words2[i]))) {
return false;
}
}
return true;
}
/**
* Extracts the map of {@link SystemStreamPartition}s to {@link TaskName} from the {@link JobModel}
*
* @return the extracted map
*/
public static Map<TaskName, Set<SystemStreamPartition>> getTaskToSystemStreamPartitions(JobModel jobModel) {
Preconditions.checkArgument(jobModel != null, "JobModel cannot be null");
Map<String, ContainerModel> containers = jobModel.getContainers();
HashMap<TaskName, Set<SystemStreamPartition>> taskToSSPs = new HashMap<>();
for (ContainerModel containerModel : containers.values()) {
for (TaskName taskName : containerModel.getTasks().keySet()) {
TaskModel taskModel = containerModel.getTasks().get(taskName);
if (taskModel.getTaskMode() != TaskMode.Active) {
// Avoid duplicate tasks
continue;
}
taskToSSPs.putIfAbsent(taskName, new HashSet<>());
taskToSSPs.get(taskName).addAll(taskModel.getSystemStreamPartitions());
}
}
return taskToSSPs;
}
private void restore(GData g, HashMap<String, ArrayList<Boolean>> s, String key) {
int lineNumber = 1;
ArrayList<Boolean> nl = new ArrayList<Boolean>();
nl.add(true);
s.putIfAbsent(key, nl);
final ArrayList<Boolean> st = s.get(key);
final int size = st.size();
g.visible = st.get(0);
if (!g.visible) hiddenData.add(g);
while ((g = g.getNext()) != null) {
final int type = g.type();
if (type > 0 && type < 6) {
if (lineNumber < size) {
g.visible = st.get(lineNumber);
} else {
g.visible = true;
}
if (!g.visible) hiddenData.add(g);
if (type == 1) {
restore(((GData1) g).myGData, s, key + "|" + lineNumber); //$NON-NLS-1$
}
lineNumber++;
}
}
}
private List<Integer> reorderBooks(final List<Integer> books) {
// Counting how often a book number appears in the basket list
HashMap<Integer, Integer> numberCount = new HashMap<>();
for (Integer book : books) {
numberCount.computeIfPresent(book, (key, value) -> value + 1);
numberCount.putIfAbsent(book, 1);
}
return books.stream()
.sorted((bookNumberOne, bookNumberTwo) -> {
Integer countOne = numberCount.get(bookNumberOne);
Integer countTwo = numberCount.get(bookNumberTwo);
// Books whose numbers appear more often should be in front of the basket list
if (countOne > countTwo) {
return -1;
} else if (countOne.equals(countTwo)) {
return 0;
} else {
return 1;
}
})
.collect(Collectors.toList());
}
static Map<String, String> getVersions() {
HashMap<String, String> ret = new HashMap<>();
try {
for (Versioned v : ServiceLoader.load(Versioned.class, ClassLoader.getSystemClassLoader())) {
String previous = ret.putIfAbsent(v.getName(), v.getVersion());
if (previous != null) {
throw new IllegalArgumentException(
String.format("%s already has a registered version.", v.getName()));
}
}
return ImmutableSortedMap.copyOf(ret);
} catch (ServiceConfigurationError e) {
throw new BatfishException("Failed to load Versioned object", e);
}
}
/**
* Returns the coordinate operation factory to use for the given properties and math transform factory.
* If the given properties are empty and the {@code mtFactory} is the system default, then this method
* returns the system default {@code CoordinateOperationFactory} instead of creating a new one.
*
* <p>It is okay to set all parameters to {@code null} in order to get the system default factory.</p>
*
* @param properties the default properties.
* @param mtFactory the math transform factory to use.
* @param crsFactory the factory to use if the operation factory needs to create CRS for intermediate steps.
* @param csFactory the factory to use if the operation factory needs to create CS for intermediate steps.
* @return the coordinate operation factory to use.
*/
public static CoordinateOperationFactory getCoordinateOperationFactory(Map<String,?> properties,
final MathTransformFactory mtFactory, final CRSFactory crsFactory, final CSFactory csFactory)
{
if (Containers.isNullOrEmpty(properties)) {
if (DefaultFactories.isDefaultInstance(MathTransformFactory.class, mtFactory) &&
DefaultFactories.isDefaultInstance(CRSFactory.class, crsFactory) &&
DefaultFactories.isDefaultInstance(CSFactory.class, csFactory))
{
return CoordinateOperations.factory();
}
properties = Collections.emptyMap();
}
final HashMap<String,Object> p = new HashMap<>(properties);
p.putIfAbsent(ReferencingFactoryContainer.CRS_FACTORY, crsFactory);
p.putIfAbsent(ReferencingFactoryContainer.CS_FACTORY, csFactory);
properties = p;
return new DefaultCoordinateOperationFactory(properties, mtFactory);
}
@Override
public Pair<Hop[], CNodeTpl> constructCplan(Hop hop, CPlanMemoTable memo, boolean compileLiterals) {
//recursively process required cplan output
HashSet<Hop> inHops = new HashSet<>();
HashMap<String, Hop> inHops2 = new HashMap<>();
HashMap<Long, CNode> tmp = new HashMap<>();
hop.resetVisitStatus();
rConstructCplan(hop, memo, tmp, inHops, inHops2, compileLiterals);
hop.resetVisitStatus();
//reorder inputs (ensure matrix is first input, and other inputs ordered by size)
Hop[] sinHops = inHops.stream()
.filter(h -> !(h.getDataType().isScalar() && tmp.get(h.getHopID()).isLiteral()))
.sorted(new HopInputComparator(inHops2.get("X"),inHops2.get("B1"))).toArray(Hop[]::new);
inHops2.putIfAbsent("X", sinHops[0]); //robustness special cases
//construct template node
ArrayList<CNode> inputs = new ArrayList<>();
for( Hop in : sinHops )
inputs.add(tmp.get(in.getHopID()));
CNode output = tmp.get(hop.getHopID());
CNodeRow tpl = new CNodeRow(inputs, output);
tpl.setRowType(TemplateUtils.getRowType(hop,
inHops2.get("X"), inHops2.get("B1")));
long n2 = tpl.getRowType()==RowType.COL_AGG_B1 ?
hop.getDim1() : hop.getDim2();
if( tpl.getRowType().isConstDim2(n2) )
tpl.setConstDim2(n2);
tpl.setNumVectorIntermediates(TemplateUtils
.determineMinVectorIntermediates(output,
inputs.isEmpty() ? null : inputs.get(0)));
tpl.getOutput().resetVisitStatus();
tpl.rReorderCommutativeBinaryOps(tpl.getOutput(), sinHops[0].getHopID());
tpl.setBeginLine(hop.getBeginLine());
// return cplan instance
return new Pair<>(sinHops, tpl);
}
@Before
public void setup() {
response = new HashMap();
response.putIfAbsent(ACCESS_TOKEN, "f529.dd6e30.d454677322aaabb0");
response.putIfAbsent(EXPIRES_IN, "43199");
when(mockRestOperations.postForEntity(any(), any(), any()))
.thenReturn(ResponseEntity.status(200).body(response));
optionalParameters = new HashMap<>();
cut = new XsuaaOAuth2TokenService(mockRestOperations);
}
public void registerClientSession(String contextName,String clientName, ClientSession clientSession) {
HashMap<String ,ClientSession> clientSessionMap = this.getContextNameSessionMap().getOrDefault(contextName, new HashMap<>());
if (clientSessionMap.containsKey(clientName)) {
ClientSession oldSession = clientSessionMap.replace(clientName, clientSession);
oldSession.close();
}else{
clientSessionMap.putIfAbsent(clientName,clientSession);
}
this.getContextNameSessionMap().putIfAbsent(contextName,clientSessionMap);
}
/**
* Returns an immutable set containing each of elements, minus duplicates, in the order each
* appears first in the source collection.
*
* <p>Comparison (equals() and hashCode()) are performed on key.apply(X) instead of on X.
*/
private <T, S> ImmutableSet<T> distinctByPredicate(
Iterable<? extends T> items, Function<T, S> key) {
HashMap<S, T> m = new HashMap<>();
for (T item : items) {
m.putIfAbsent(key.apply(item), item);
}
return ImmutableSet.copyOf(m.values());
}
/**
* Creates an asset extraction that is a merge of the latest (non deleted) asset extraction of each branch of
* the asset.
*
* @param asset
* @param pollableTask
* @return
*/
public AssetExtraction createAssetExtractionForMultipleBranches(Asset asset, PollableTask pollableTask) {
logger.debug("Get branches to be merged");
List<AssetExtractionByBranch> sortedAssetExtractionByBranches = getSordedAssetExtractionByBranches(asset);
logger.debug("Create a new asset extraction that will contain the merge");
AssetExtraction mergedAssetExtraction = createAssetExtraction(asset, pollableTask);
mergedAssetExtraction.setContentMd5(getMergedContentMd5(sortedAssetExtractionByBranches));
mergedAssetExtraction.setFilterOptionsMd5(getMergedFilterOptionsMd5(sortedAssetExtractionByBranches));
HashMap<String, AssetTextUnit> mergedAssetTextUnits = new LinkedHashMap<>();
for (AssetExtractionByBranch assetExtractionByBranch : sortedAssetExtractionByBranches) {
logger.debug("Start processing branch: {} for asset: {}", assetExtractionByBranch.getBranch().getName(), assetExtractionByBranch.getAsset().getPath());
AssetExtraction assetExtraction = assetExtractionByBranch.getAssetExtraction();
logger.debug("Get asset text units of the branch to be merged");
List<AssetTextUnit> assetTextUnitsToMerge = assetTextUnitRepository.findByAssetExtraction(assetExtraction);
for (AssetTextUnit assetTextUnit : assetTextUnitsToMerge) {
AssetTextUnit copy = copyAssetTextUnit(assetTextUnit);
copy.setBranch(assetExtractionByBranch.getBranch());
copy.setAssetExtraction(mergedAssetExtraction);
mergedAssetTextUnits.putIfAbsent(copy.getMd5(), copy);
}
}
assetTextUnitRepository.save(mergedAssetTextUnits.values());
Long mergedAssetExtractionId = mergedAssetExtraction.getId();
Long tmId = asset.getRepository().getTm().getId();
long mapExactMatches = assetMappingService.mapExactMatches(mergedAssetExtractionId, tmId, asset.getId());
logger.debug("{} text units were mapped for the merged extraction with id: {} and tmId: {}", mapExactMatches, mergedAssetExtractionId, tmId);
return mergedAssetExtraction;
}
@Override
public Pair<Hop[], CNodeTpl> constructCplan(Hop hop, CPlanMemoTable memo, boolean compileLiterals) {
//recursively process required cplan output
HashSet<Hop> inHops = new HashSet<>();
HashMap<String, Hop> inHops2 = new HashMap<>();
HashMap<Long, CNode> tmp = new HashMap<>();
hop.resetVisitStatus();
rConstructCplan(hop, memo, tmp, inHops, inHops2, compileLiterals);
hop.resetVisitStatus();
//reorder inputs (ensure matrix is first input, and other inputs ordered by size)
Hop[] sinHops = inHops.stream()
.filter(h -> !(h.getDataType().isScalar() && tmp.get(h.getHopID()).isLiteral()))
.sorted(new HopInputComparator(inHops2.get("X"),inHops2.get("B1"))).toArray(Hop[]::new);
inHops2.putIfAbsent("X", sinHops[0]); //robustness special cases
//construct template node
ArrayList<CNode> inputs = new ArrayList<>();
for( Hop in : sinHops )
inputs.add(tmp.get(in.getHopID()));
CNode output = tmp.get(hop.getHopID());
CNodeRow tpl = new CNodeRow(inputs, output);
tpl.setRowType(TemplateUtils.getRowType(hop,
inHops2.get("X"), inHops2.get("B1")));
long n2 = tpl.getRowType()==RowType.COL_AGG_B1 ?
hop.getDim1() : hop.getDim2();
if( tpl.getRowType().isConstDim2(n2) )
tpl.setConstDim2(n2);
tpl.setNumVectorIntermediates(TemplateUtils
.determineMinVectorIntermediates(output,
inputs.isEmpty() ? null : inputs.get(0)));
tpl.getOutput().resetVisitStatus();
tpl.rReorderCommutativeBinaryOps(tpl.getOutput(), sinHops[0].getHopID());
tpl.setBeginLine(hop.getBeginLine());
// return cplan instance
return new Pair<>(sinHops, tpl);
}
private static void parseSection(
PropertySet section,
String indexNameSuffix,
FloatingRateType type,
HashMap<String, ImmutableFloatingRateName> mutableMap) {
// find our names from the RHS of the key/value pairs
for (String key : section.keys()) {
ImmutableFloatingRateName name = ImmutableFloatingRateName.of(key, section.value(key) + indexNameSuffix, type);
mutableMap.put(key, name);
mutableMap.putIfAbsent(key.toUpperCase(Locale.ENGLISH), name);
}
}
public static HashMap<String, HashSet<Qtl>> loadTransEqtls(final String transQtlPath) throws IOException {
final HashMap<String, HashSet<Qtl>> transEqtls = new HashMap<>();
CSVReader transFileReader = new CSVReader(new InputStreamReader(new FileInputStream(transQtlPath), ENCODING), '\t', '\0', 0);
String[] nextLine;
while ((nextLine = transFileReader.readNext()) != null) {
Qtl eqtl = new Qtl(nextLine[3], nextLine[4], Double.valueOf(nextLine[5]));
HashSet<Qtl> snpTransEffects = transEqtls.getOrDefault(eqtl.getSnp(), new HashSet<>());
snpTransEffects.add(eqtl);
transEqtls.putIfAbsent(eqtl.getSnp(), snpTransEffects);
}
return transEqtls;
}
private static Map<String, TestSetting> readSettings(Path dir) {
Path settingsFile = dir.resolve("rlexe.xml");
if (!Files.isRegularFile(settingsFile)) {
return Collections.emptyMap();
}
try (Reader reader = bomReader(Files.newInputStream(settingsFile))) {
Document doc = Resources.xml(reader);
NodeList elements = doc.getElementsByTagName("default");
HashMap<String, TestSetting> settingMap = new HashMap<>();
for (int i = 0, length = elements.getLength(); i < length; ++i) {
Element element = (Element) elements.item(i);
String files = element.getElementsByTagName("files").item(0).getTextContent();
TestSetting setting = new TestSetting();
NodeList baseline = element.getElementsByTagName("baseline");
if (baseline.getLength() > 0) {
setting.baseline = baseline.item(0).getTextContent();
}
NodeList compileFlags = element.getElementsByTagName("compile-flags");
if (compileFlags.getLength() > 0) {
String flags = compileFlags.item(0).getTextContent();
for (String flag : flags.split("\\s+")) {
if (!flag.startsWith("-")) {
continue;
}
int sep = flag.indexOf(':');
String name;
if (sep != -1) {
name = flag.substring(0, sep).trim();
} else {
name = flag;
}
if (ignoreFlags.contains(name)) {
// Ignored flags
} else if (disableFlags.contains(name)) {
setting.disabled = true;
} else if (warnUnknownFlag) {
System.err.printf("unknown option '%s': %s%n", flag, settingsFile);
}
}
}
settingMap.putIfAbsent(files, setting);
}
return settingMap;
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
/**
* Puts the property if it is non-null and is not already in the Properties.
* @param props the Properties
* @param key the key
* @param value the value
*/
private static void putIfAbsent(HashMap<String, String> props, String key, String value) {
if (value != null) {
props.putIfAbsent(key, value);
}
}