下面列出了java.util.Collection#remove ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
@Test
public void whenTransformWithCollections2_thenTransformed() {
final Function<String, Integer> function = new Function<String, Integer>() {
@Override
public final Integer apply(final String input) {
return input.length();
}
};
final List<String> names = Lists.newArrayList("John", "Jane", "Adam", "Tom");
final Collection<Integer> result = Collections2.transform(names, function);
assertEquals(4, result.size());
assertThat(result, contains(4, 4, 4, 3));
result.remove(3);
assertEquals(3, names.size());
}
@Test
public void paperEx() {
pds.addRule(normal(1, "n1", 1, "n2"));
pds.addRule(normal(1, "n1", 1, "n3"));
pds.addRule(push(1, "n2", 1, "n7", "n4"));
pds.addRule(push(1, "n3", 1, "n7", "n5"));
pds.addRule(normal(1, "n4", 1, "n6"));
pds.addRule(normal(1, "n5", 1, "n6"));
pds.addRule(normal(1, "n7", 1, "n8"));
pds.addRule(pop(1, "n8", 1));
PAutomaton<StackSymbol, Abstraction> fa = accepts(1, "n1");
pds.poststar(fa);
Collection<Transition<StackSymbol, Abstraction>> transitions = fa.getTransitions();
transitions.remove(t(1, "n1", ACC));
transitions.remove(t(1, "n2", ACC));
transitions.remove(t(1, "n3", ACC));
transitions.remove(t(1, "n4", ACC));
transitions.remove(t(1, "n5", ACC));
transitions.remove(t(1, "n6", ACC));
transitions.remove(t(1, fa.epsilon(), a(1, "n7")));
transitions.remove(t(1, "n7", a(1, "n7")));
transitions.remove(t(1, "n8", a(1, "n7")));
transitions.remove(t(a(1, "n7"), "n4", ACC));
transitions.remove(t(a(1, "n7"), "n5", ACC));
assertTrue(transitions.isEmpty());
}
/**
* Disssociates a bunch of administrators from a server group
* @param sg the server group to process
* @param admins a collection of administrators to deassociate
* @param loggedInUser the loggedInUser needed for credentials
*/
public void dissociateAdmins(ManagedServerGroup sg, Collection admins,
User loggedInUser) {
validateAccessCredentials(loggedInUser, sg, sg.getName());
validateAdminCredentials(loggedInUser);
Set adminSet = sg.getAssociatedAdminsFor(loggedInUser);
processAdminList(sg, admins, loggedInUser);
admins.remove(loggedInUser); //can't disassociate thyself.
adminSet.removeAll(admins);
ServerGroupFactory.save(sg);
for (Iterator itr = admins.iterator(); itr.hasNext();) {
User u = (User) itr.next();
UserFactory.save(u);
}
}
private static Collection<String> getPackages(FileObject root) {
ClasspathInfo cpi = ClasspathInfo.create(root);
// create CPI from just the single source root, to avoid packages from other
// modules
ClasspathInfo rootCpi = new ClasspathInfo.Builder(
cpi.getClassPath(PathKind.BOOT)).
setClassPath(cpi.getClassPath(PathKind.COMPILE)).
setModuleSourcePath(cpi.getClassPath(PathKind.MODULE_SOURCE)).
setModuleCompilePath(cpi.getClassPath(PathKind.MODULE_COMPILE)).
setSourcePath(
ClassPathSupport.createClassPath(root)
).build();
Collection<String> pkgs = new HashSet<>(rootCpi.getClassIndex().getPackageNames("", false,
Collections.singleton(SearchScope.SOURCE)));
pkgs.remove(""); // NOI18N
return pkgs;
}
@Atomic
public ActionForward remove(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) {
Party party = FenixFramework.getDomainObject(request.getParameter("provider"));
Collection<Party> externalScholarshipProvider = Bennu.getInstance().getExternalScholarshipProviderSet();
externalScholarshipProvider.remove(party);
return redirect("/externalScholarshipProvider.do?method=list", request);
}
public boolean remove(K key, V value) {
Collection<V> values = this.data.get(key);
boolean removed = values != null && values.remove(value);
if (removed && values.isEmpty()) {
this.data.remove(key);
}
return removed;
}
/**
* Finds all split work units in the input collection and merges the file parts into the expected output files.
* @param fs {@link FileSystem} where file parts exist.
* @param workUnits Collection of {@link WorkUnitState}s possibly containing split work units.
* @return The collection of {@link WorkUnitState}s where split work units for each file have been merged.
* @throws IOException
*/
public static Collection<WorkUnitState> mergeAllSplitWorkUnits(FileSystem fs, Collection<WorkUnitState> workUnits)
throws IOException {
ListMultimap<CopyableFile, WorkUnitState> splitWorkUnitsMap = ArrayListMultimap.create();
for (WorkUnitState workUnit : workUnits) {
if (isSplitWorkUnit(workUnit)) {
CopyableFile copyableFile = (CopyableFile) CopySource.deserializeCopyEntity(workUnit);
splitWorkUnitsMap.put(copyableFile, workUnit);
}
}
for (CopyableFile file : splitWorkUnitsMap.keySet()) {
log.info(String.format("Merging split file %s.", file.getDestination()));
WorkUnitState oldWorkUnit = splitWorkUnitsMap.get(file).get(0);
Path outputDir = FileAwareInputStreamDataWriter.getOutputDir(oldWorkUnit);
CopyEntity.DatasetAndPartition datasetAndPartition =
file.getDatasetAndPartition(CopySource.deserializeCopyableDataset(oldWorkUnit));
Path parentPath = FileAwareInputStreamDataWriter.getOutputFilePath(file, outputDir, datasetAndPartition)
.getParent();
WorkUnitState newWorkUnit = mergeSplits(fs, file, splitWorkUnitsMap.get(file), parentPath);
for (WorkUnitState wu : splitWorkUnitsMap.get(file)) {
// Set to committed so that task states will not fail
wu.setWorkingState(WorkUnitState.WorkingState.COMMITTED);
workUnits.remove(wu);
}
workUnits.add(newWorkUnit);
}
return workUnits;
}
public void remove(K k, V v) {
Collection<V> s = map.get(k);
if (s != null) {
s.remove(v);
if (s.isEmpty()) {
map.remove(k);
}
}
}
@Override
public void removeListener(EventListenerIF listener, String event) {
synchronized (listeners) {
if (listeners.containsKey(event)) {
// Remove listener from list of event entry listeners. This is
// not very elegant, but it works.
Collection event_listeners = new ArrayList(Arrays.asList((Object[])listeners.get(event)));
event_listeners.remove(listener);
if (event_listeners.isEmpty())
listeners.remove(event);
else
listeners.put(event, event_listeners.toArray());
}
}
}
private void removeDependency(final IBattle blocked, final IBattle blocking) {
final Collection<IBattle> dependencies = this.dependencies.get(blocked);
dependencies.remove(blocking);
if (dependencies.isEmpty()) {
this.dependencies.remove(blocked);
}
}
/**
* Check if the given matchers can be matched to unique children.
* Caution: this only checks that each given matcher can be matched onto a unique child; there may be children
* that are not matched by any matcher, and this check will still pass!
*/
@Override
public boolean matchesSafely(@Nonnull RecordQueryPlan plan) {
Collection<Matcher<RecordQueryPlan>> remaining = new HashSet<>(childMatchers);
for (RecordQueryPlan child : plan.getChildren()) {
for (Matcher<RecordQueryPlan> matcher : remaining) {
if (matcher.matches(child)) {
remaining.remove(matcher);
break;
}
}
}
return remaining.isEmpty();
}
private void updateRelations(T source, Collection<J> targetIds, String fieldName, QueryContext queryContext, boolean add) {
ResourceInformation sourceInformation = context.getSourceEntry().getResourceInformation();
ResourceField field = sourceInformation.findFieldByUnderlyingName(fieldName);
PreconditionUtil.verify(field != null, "field not found: %s.%s", sourceInformation.getResourceType(), fieldName);
RegistryEntry targetEntry = context.getTargetEntry(field);
ResourceField oppositeField = getOppositeField(field);
ResourceFieldAccessor oppositeAccessor = oppositeField.getAccessor();
ResourceRepositoryAdapter targetRepository = targetEntry.getResourceRepository();
QueryAdapter queryAdapter = context.createEmptyQueryAdapter(targetEntry, queryContext);
Collection<Object> targets = context.findAll(targetEntry, targetIds, queryContext);
for (Object target : targets) {
// in contract to SetOwnerStrategy no need to honor idAccessor since source already loaded
if (oppositeField.isCollection()) {
Collection oppositeElements = (Collection) oppositeAccessor.getValue(target);
if (add) {
oppositeElements.add(source);
} else {
oppositeElements.remove(source);
}
targetRepository.update(target, queryAdapter);
} else {
oppositeAccessor.setValue(target, add ? source : null);
targetRepository.update(target, queryAdapter);
}
}
}
@Advice.OnMethodEnter
static void enter(@Advice.This Object self, @FieldValue Object field, @MappedBy String mappedBy) {
if ( field != null && Hibernate.isPropertyInitialized( field, mappedBy ) ) {
Collection<?> c = getter( field );
if ( c != null ) {
c.remove( self );
}
}
}
@Override
public boolean removeAll(Collection<?> c) {
boolean result = false;
for (Object o : c) {
result |= c.remove(o);
}
return result;
}
private List<Factor> sumOut(final String var, final List<Factor> factors) throws InterruptedException {
/* determine which factors will be eliminated and which stay */
List<Factor> newFactors = new ArrayList<>();
List<Factor> eliminatedFactors = new ArrayList<>();
for (Factor f : factors) {
if (!f.subDistribution.getVariables().contains(var)) {
newFactors.add(f);
} else {
eliminatedFactors.add(f);
}
}
/* first build point-wise product of distributions */
DiscreteProbabilityDistribution productDistribution = eliminatedFactors.size() > 1 ? this.multiply(eliminatedFactors) : eliminatedFactors.get(0).subDistribution;
/* compute distribution for elimination factor */
DiscreteProbabilityDistribution distOfNewFactor = new DiscreteProbabilityDistribution();
Collection<String> remainingVariablesInFactor = productDistribution.getVariables();
remainingVariablesInFactor.remove(var);
Collection<Collection<String>> entriesInReducedFactor = SetUtil.powerset(remainingVariablesInFactor);
for (Collection<String> entry : entriesInReducedFactor) {
Set<String> event = new HashSet<>(entry);
double probForEventWithVariableIsNegative = productDistribution.getProbabilities().get(event);
event.add(var);
double probForEventWithVariableIsPositive = productDistribution.getProbabilities().get(event);
event.remove(var);
distOfNewFactor.addProbability(event, probForEventWithVariableIsNegative + probForEventWithVariableIsPositive);
}
newFactors.add(new Factor(distOfNewFactor));
return newFactors;
}
/**
* Remove any instances of the given type from the collection.
*/
private static void removeInstancesOf(final Class<?> type, final Collection<?> collection) {
for (Object o : collection) {
if (type.isInstance(o)) {
collection.remove(o); // ResteasyProviderFactory's collections are all concurrent, so this is safe
}
}
}
/**
* Reduce the provided exclusions as much as possible by removing the source or
* target vertex of lower contextual grade.
* <p>
* Strategy is as follows:
* <ol>
* <li>Pick up among all current exclusions the one whose high inter has the highest contextual
* grade contribution among all exclusions,</li>
* <li>Remove the weaker inter in this chosen exclusion relation,</li>
* <li>Recompute all impacted contextual grades values,</li>
* <li>Iterate until no more exclusion is left.</li>
* </ol>
*
* @param exclusions the collection of exclusions to process
* @return the set of vertices removed
*/
public Set<Inter> reduceExclusions (Collection<? extends Relation> exclusions)
{
final Set<Inter> removed = new LinkedHashSet<>();
Relation bestRel;
do {
// Choose exclusion with the highest source or target grade
double bestCP = 0;
bestRel = null;
for (Iterator<? extends Relation> it = exclusions.iterator(); it.hasNext();) {
Relation rel = it.next();
if (containsEdge(rel)) {
final double cp = Math.max(
getEdgeSource(rel).getBestGrade(),
getEdgeTarget(rel).getBestGrade());
if (bestCP < cp) {
bestCP = cp;
bestRel = rel;
}
} else {
it.remove();
}
}
// Remove the weaker branch of the selected exclusion
if (bestRel != null) {
final Inter source = getEdgeSource(bestRel);
final double scp = source.getBestGrade();
final Inter target = getEdgeTarget(bestRel);
final double tcp = target.getBestGrade();
final Inter weaker = (scp < tcp) ? source : target;
if (weaker.isVip()) {
logger.info(
"VIP conflict {} deleting weaker {}",
bestRel.toLongString(this),
weaker);
}
// Which inters were involved in some support relation with this weaker inter?
final Set<Inter> involved = involvedInters(getSupports(weaker));
involved.remove(weaker);
final Set<Inter> weakerEnsembles = weaker.getAllEnsembles(); // Before weaker is deleted!
// Remove the weaker inter
removed.add(weaker);
weaker.remove();
// If removal of weaker has resulted in removal of an ensemble, count this ensemble
for (Inter ensemble : weakerEnsembles) {
if (ensemble.isRemoved()) {
removed.add(ensemble);
}
}
// Update contextual values for all inters that were involved with 'weaker'
for (Inter inter : involved) {
computeContextualGrade(inter);
}
exclusions.remove(bestRel);
}
} while (bestRel != null);
return removed;
}
/**
* Creates a date using the calendar date format. Specification reference:
* 5.2.1.
*
* @param bld the builder
* @param fields the fields
* @param extended true to use extended format
* @param strictISO true to only allow ISO formats
* @return true if reduced precision
* @since 1.1
*/
private static boolean dateByMonth(DateTimeFormatterBuilder bld, Collection<DateTimeFieldType> fields,
boolean extended, boolean strictISO) {
boolean reducedPrec = false;
if (fields.remove(DateTimeFieldType.year())) {
bld.append(Constants.YE);
if (fields.remove(DateTimeFieldType.monthOfYear())) {
if (fields.remove(DateTimeFieldType.dayOfMonth())) {
// YYYY-MM-DD/YYYYMMDD
appendSeparator(bld, extended);
bld.appendMonthOfYear(2);
appendSeparator(bld, extended);
bld.appendDayOfMonth(2);
} else {
// YYYY-MM/YYYY-MM
bld.appendLiteral('-');
bld.appendMonthOfYear(2);
reducedPrec = true;
}
} else {
if (fields.remove(DateTimeFieldType.dayOfMonth())) {
// YYYY--DD/YYYY--DD (non-iso)
checkNotStrictISO(fields, strictISO);
bld.appendLiteral('-');
bld.appendLiteral('-');
bld.appendDayOfMonth(2);
} else {
// YYYY/YYYY
reducedPrec = true;
}
}
} else if (fields.remove(DateTimeFieldType.monthOfYear())) {
bld.appendLiteral('-');
bld.appendLiteral('-');
bld.appendMonthOfYear(2);
if (fields.remove(DateTimeFieldType.dayOfMonth())) {
// --MM-DD/--MMDD
appendSeparator(bld, extended);
bld.appendDayOfMonth(2);
} else {
// --MM/--MM
reducedPrec = true;
}
} else if (fields.remove(DateTimeFieldType.dayOfMonth())) {
// ---DD/---DD
bld.appendLiteral('-');
bld.appendLiteral('-');
bld.appendLiteral('-');
bld.appendDayOfMonth(2);
}
return reducedPrec;
}
@CanIgnoreReturnValue
@Override
public boolean remove(@Nullable Object key, @Nullable Object value) {
Collection<V> collection = asMap().get(key);
return collection != null && collection.remove(value);
}
@Transactional(rollbackOn = {Exception.class})
public BankOrder mergeBankOrders(Collection<BankOrder> bankOrders) throws AxelorException {
if (bankOrders == null || bankOrders.size() <= 1) {
throw new AxelorException(
TraceBackRepository.CATEGORY_INCONSISTENCY,
IExceptionMessage.BANK_ORDER_MERGE_AT_LEAST_TWO_BANK_ORDERS);
}
this.checkSameElements(bankOrders);
BankOrder bankOrder = bankOrders.iterator().next();
bankOrders.remove(bankOrder);
bankOrder.setSenderLabel(null);
bankOrder.setSenderReference(null);
bankOrder.setBankOrderDate(Beans.get(AppBaseService.class).getTodayDate());
bankOrder.setSignatoryUser(null);
bankOrder.setSignatoryEbicsUser(null);
PaymentMode paymentMode = bankOrder.getPaymentMode();
for (BankOrderLine bankOrderLine : this.getAllBankOrderLineList(bankOrders)) {
bankOrder.addBankOrderLineListItem(bankOrderLine);
}
bankOrderRepo.save(bankOrder);
for (BankOrder bankOrderToRemove : bankOrders) {
bankOrderToRemove = bankOrderRepo.find(bankOrderToRemove.getId());
List<InvoicePayment> invoicePaymentList =
invoicePaymentRepo.findByBankOrder(bankOrderToRemove).fetch();
for (InvoicePayment invoicePayment : invoicePaymentList) {
invoicePayment.setBankOrder(bankOrder);
}
bankOrderRepo.remove(bankOrderToRemove);
}
if (paymentMode.getConsoBankOrderLinePerPartner()) {
consolidatePerPartner(bankOrder);
}
bankOrderService.updateTotalAmounts(bankOrder);
return bankOrderRepo.save(bankOrder);
}