下面列出了com.google.common.collect.Multimap#removeAll ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
/**
* Returns or modifies a filter that can be for querying the results corresponding to the given dimension map.
*
* For example, if a dimension map = {country=IN,page_name=front_page}, then the two entries will be added or
* over-written to the given filter.
*
* Note that if the given filter contains an entry: country=["IN", "US", "TW",...], then this entry is replaced by
* country=IN.
*
* @param dimensionMap the dimension map to add to the filter
* @param filterToDecorate if it is null, a new filter will be created; otherwise, it is modified.
* @return a filter that is modified according to the given dimension map.
*/
public static Multimap<String, String> getFilterSetFromDimensionMap(DimensionMap dimensionMap,
Multimap<String, String> filterToDecorate) {
if (filterToDecorate == null) {
filterToDecorate = HashMultimap.create();
}
for (Map.Entry<String, String> entry : dimensionMap.entrySet()) {
String dimensionName = entry.getKey();
String dimensionValue = entry.getValue();
// If dimension value is "OTHER", then we need to get all data and calculate "OTHER" part.
// In order to reproduce the data for "OTHER", the filter should remain as is.
if ( !dimensionValue.equalsIgnoreCase("OTHER") ) {
// Only add the specific dimension value to the filter because other dimension values will not be used
filterToDecorate.removeAll(dimensionName);
filterToDecorate.put(dimensionName, dimensionValue);
}
}
return filterToDecorate;
}
protected void applyMultiValuedThresholdAction(Multimap<String,String> fields, String fieldName, String singleFieldName) {
switch (helper.getThresholdAction()) {
case DROP:
if (singleFieldName != null) {
fields.removeAll(singleFieldName);
}
processField(fields, helper.getMultiValuedDropField(), aliaser.normalizeAndAlias(fieldName));
break;
case REPLACE:
if (singleFieldName != null) {
fields.removeAll(singleFieldName);
}
processField(fields, fieldName, helper.getMultiValuedThresholdReplacement());
break;
case TRUNCATE:
processField(fields, helper.getMultiValuedTruncateField(), aliaser.normalizeAndAlias(fieldName));
break;
case FAIL:
throw new IllegalArgumentException("A field : " + fieldName + " was too large to process");
}
}
protected void applyMultiValuedThresholdAction(Multimap<String,NormalizedContentInterface> fields, String fieldName, String singleFieldName) {
switch (helper.getMultiValuedThresholdAction()) {
case DROP:
if (singleFieldName != null) {
fields.removeAll(singleFieldName);
}
fields.put(helper.getMultiValuedDropField(),
new NormalizedFieldAndValue(helper.getMultiValuedDropField(), aliaser.normalizeAndAlias(fieldName)));
break;
case REPLACE:
if (singleFieldName != null) {
fields.removeAll(singleFieldName);
}
fields.put(fieldName, new NormalizedFieldAndValue(fieldName, helper.getThresholdReplacement()));
break;
case TRUNCATE:
fields.put(helper.getMultiValuedTruncateField(),
new NormalizedFieldAndValue(helper.getMultiValuedTruncateField(), aliaser.normalizeAndAlias(fieldName)));
break;
case FAIL:
throw new IllegalArgumentException("A field : " + fieldName + " was too large to process");
}
}
private static void deleteAccount(UserInfo userInfo, String loginFilePath, String groupFilePath, Properties props,
Multimap<String, String> groupsMap) throws ManageUsersException {
if (!userInfo.isLoginSet()) {
warnWithMessage(PROVIDED_USERNAME + IS_EMPTY_SKIPPING);
return;
}
if (!props.containsKey(userInfo.getLogin())) {
warnWithMessage(USER_HEADER + userInfo.getLogin() + DOES_NOT_EXIST_IN_LOGIN_FILE + loginFilePath);
}
if (!groupsMap.containsKey(userInfo.getLogin())) {
warnWithMessage(USER_HEADER + userInfo.getLogin() + DOES_NOT_EXIST_IN_GROUP_FILE + groupFilePath);
}
props.remove(userInfo.getLogin());
groupsMap.removeAll(userInfo.getLogin());
System.out.println("Deleted user " + userInfo.getLogin());
}
protected Iterable<IEObjectDescription> getAliasedElements(Iterable<IEObjectDescription> candidates) {
Multimap<QualifiedName, IEObjectDescription> keyToDescription = LinkedHashMultimap.create();
Multimap<QualifiedName, ImportNormalizer> keyToNormalizer = HashMultimap.create();
for (IEObjectDescription imported : candidates) {
QualifiedName fullyQualifiedName = imported.getName();
for (ImportNormalizer normalizer : normalizers) {
QualifiedName alias = normalizer.deresolve(fullyQualifiedName);
if (alias != null) {
QualifiedName key = alias;
if (isIgnoreCase()) {
key = key.toLowerCase();
}
keyToDescription.put(key, new AliasedEObjectDescription(alias, imported));
keyToNormalizer.put(key, normalizer);
}
}
}
for (QualifiedName name : keyToNormalizer.keySet()) {
if (keyToNormalizer.get(name).size() > 1)
keyToDescription.removeAll(name);
}
return keyToDescription.values();
}
private void writeApi(Multimap<String, MethodSpec> groupedMethods) {
// Build tree of *Ops classes that needs to be generated by this processor. The 'Ops' class
// resides at the root of the tree while other classes are nodes.
OpsSpec ops = new OpsSpec(null, null, T_OPS, groupedMethods.removeAll(""));
Collection<OpsSpec> groupOps = collectGroupOps(ops, groupedMethods);
write(buildTopClass(ops));
groupOps.forEach(g -> write(buildGroupClass(g)));
}
/**
* Get the date index ingest keys and merge them into the provided key multimap
*
* @param event
* @param eventFields
* @param index
*/
private void getBulkIngestKeys(RawRecordContainer event, Multimap<String,NormalizedContentInterface> eventFields, Multimap<BulkIngestKey,Value> index) {
if (dataTypeToTypeToFields.containsKey(event.getDataType().typeName()) && null != eventFields && !eventFields.isEmpty()) {
// date index Table Structure
// Row: date
// Colf: type
// Colq: date\0datatype\0field
// Value: shard bit set
for (Map.Entry<String,String> entry : dataTypeToTypeToFields.get(event.getDataType().typeName()).entries()) {
String type = entry.getKey();
String field = entry.getValue();
for (NormalizedContentInterface nci : eventFields.get(field)) {
KeyValue keyValue = getDateIndexEntry(getShardId(event), event.getDataType().outputName(), type, field, nci.getIndexedFieldValue(),
event.getVisibility());
if (keyValue != null) {
if (log.isDebugEnabled()) {
log.debug("Outputting " + keyValue + " to " + getDateIndexTableName());
}
BulkIngestKey bulkIngestKey = new BulkIngestKey(getDateIndexTableName(), keyValue.getKey());
if (index.containsKey(bulkIngestKey)) {
index.put(bulkIngestKey, keyValue.getValue());
DateIndexDateAggregator aggregator = new DateIndexDateAggregator();
Value value = aggregator.reduce(bulkIngestKey.getKey(), index.get(bulkIngestKey).iterator());
index.removeAll(bulkIngestKey);
index.put(bulkIngestKey, value);
} else {
index.put(bulkIngestKey, keyValue.getValue());
}
}
}
}
}
}
@Override
public Multimap<String,NormalizedContentInterface> getEventFields(RawRecordContainer event) {
// applying groupNormalizer for csv data
Multimap<String,NormalizedContentInterface> fields = super.getEventFields(event);
// drop any field configured as such
for (String field : this.helper.getIgnoredFields()) {
fields.removeAll(field);
}
return fields;
}
private static LibraryDependencyImpl convertLibInfo(
@NonNull LibInfo libInfo,
@NonNull Multimap<LibraryDependency, VariantDependencies> reverseMap,
@NonNull Map<LibInfo, LibraryDependencyImpl> convertedMap) {
LibraryDependencyImpl convertedLib = convertedMap.get(libInfo);
if (convertedLib == null) {
// first, convert the children.
@SuppressWarnings("unchecked")
List<LibInfo> children = (List<LibInfo>) (List<?>) libInfo.getDependencies();
List<LibraryDependency> convertedChildren = Lists.newArrayListWithCapacity(children.size());
for (LibInfo child : children) {
convertedChildren.add(convertLibInfo(child, reverseMap, convertedMap));
}
// now convert the libInfo
convertedLib = new LibraryDependencyImpl(
libInfo.getBundle(),
libInfo.getFolder(),
convertedChildren,
libInfo.getName(),
libInfo.getProjectVariant(),
libInfo.getProject(),
libInfo.getRequestedCoordinates(),
libInfo.getResolvedCoordinates(),
libInfo.isOptional());
// add it to the map
convertedMap.put(libInfo, convertedLib);
// and update the reversemap
// get the items associated with the libInfo. Put in a fresh list as the returned
// collection is backed by the content of the map.
Collection<VariantDependencies> values = Lists.newArrayList(reverseMap.get(libInfo));
reverseMap.removeAll(libInfo);
reverseMap.putAll(convertedLib, values);
}
return convertedLib;
}
@Override
public Multimap<String, AttributeModifier> getAttributeModifiers(EntityEquipmentSlot slot, ItemStack stack) {
Multimap<String, AttributeModifier> modifiers = super.getAttributeModifiers(slot, stack);
if (slot == EntityEquipmentSlot.MAINHAND) {
modifiers.removeAll(SharedMonsterAttributes.ATTACK_DAMAGE.getName());
modifiers.put(SharedMonsterAttributes.ATTACK_DAMAGE.getName(), new AttributeModifier(ATTACK_DAMAGE_MODIFIER, "Weapon modifier", EnchantmentHelper.getEnchantmentLevel(Enchantments.KNOCKBACK, stack), 0));
}
return modifiers;
}
public static void addProxyHeaders(Multimap<String, String> headers, X509Certificate clientCert) {
if (clientCert != null) {
List<String> proxiedEntitiesList = new ArrayList<>();
if (headers.containsKey(TimelyAuthenticationToken.PROXIED_ENTITIES_HEADER)) {
String proxiedEntities = HttpHeaderUtils.getSingleHeader(headers,
TimelyAuthenticationToken.PROXIED_ENTITIES_HEADER, false);
headers.removeAll(TimelyAuthenticationToken.PROXIED_ENTITIES_HEADER);
String[] preExistingProxiedEntities = ProxiedEntityUtils.splitProxiedDNs(proxiedEntities, false);
if (preExistingProxiedEntities.length > 0) {
proxiedEntitiesList.addAll(Arrays.asList(preExistingProxiedEntities));
}
}
String subjectDN = DnUtils.normalizeDN(clientCert.getSubjectDN().getName());
proxiedEntitiesList.add(subjectDN);
String[] newProxiedEntities = new String[proxiedEntitiesList.size()];
proxiedEntitiesList.toArray(newProxiedEntities);
headers.put(TimelyAuthenticationToken.PROXIED_ENTITIES_HEADER,
ProxiedEntityUtils.buildProxiedDN(newProxiedEntities));
List<String> proxiedIssuersList = new ArrayList<>();
if (headers.containsKey(TimelyAuthenticationToken.PROXIED_ISSUERS_HEADER)) {
String proxiedIssuers = HttpHeaderUtils.getSingleHeader(headers,
TimelyAuthenticationToken.PROXIED_ISSUERS_HEADER, false);
headers.removeAll(TimelyAuthenticationToken.PROXIED_ISSUERS_HEADER);
String[] preExistingProxiedIssuers = ProxiedEntityUtils.splitProxiedDNs(proxiedIssuers, false);
if (preExistingProxiedIssuers.length > 0) {
proxiedEntitiesList.addAll(Arrays.asList(preExistingProxiedIssuers));
}
}
String issuerDN = DnUtils.normalizeDN(clientCert.getIssuerDN().getName());
proxiedIssuersList.add(issuerDN);
String[] newProxiedIssuers = new String[proxiedIssuersList.size()];
proxiedIssuersList.toArray(newProxiedIssuers);
headers.put(TimelyAuthenticationToken.PROXIED_ISSUERS_HEADER,
ProxiedEntityUtils.buildProxiedDN(newProxiedIssuers));
}
}
protected void createReferenceUpdatesForCluster(ElementRenameArguments elementRenameArguments,
Multimap<URI, IReferenceDescription> resource2references, ResourceSet resourceSet,
IRefactoringUpdateAcceptor updateAcceptor, StatusWrapper status, IProgressMonitor monitor) {
SubMonitor progress = SubMonitor.convert(monitor, 100);
List<URI> unloadableResources = loadReferringResources(resourceSet, resource2references.keySet(), status,
progress.newChild(10));
if (progress.isCanceled()) {
throw new OperationCanceledException();
}
for (URI unloadableResouce : unloadableResources)
resource2references.removeAll(unloadableResouce);
List<IReferenceDescription> unresolvableReferences = resolveReferenceProxies(resourceSet,
resource2references.values(), status, progress.newChild(70));
if (progress.isCanceled()) {
throw new OperationCanceledException();
}
for (IReferenceDescription unresolvableReference : unresolvableReferences) {
URI unresolvableReferringResource = unresolvableReference.getSourceEObjectUri().trimFragment();
resource2references.remove(unresolvableReferringResource, unresolvableReference);
}
elementRenameArguments.getRenameStrategy().applyDeclarationChange(elementRenameArguments.getNewName(),
resourceSet);
if (progress.isCanceled()) {
throw new OperationCanceledException();
}
createReferenceUpdates(elementRenameArguments, resource2references, resourceSet, updateAcceptor,
progress.newChild(20));
if (progress.isCanceled()) {
throw new OperationCanceledException();
}
elementRenameArguments.getRenameStrategy().revertDeclarationChange(resourceSet);
}
@ExpectWarning(value="GC", num=7)
public static void testMultimap(Multimap<String, Integer> mm) {
mm.containsEntry("x", "y");
mm.containsEntry(1, 5);
mm.containsKey(1);
mm.containsValue("x");
mm.remove("x", "x");
mm.remove(1, 2);
mm.removeAll(1);
}
private static void updateUserGroups(String login, Collection<String> groups, Multimap<String, String> groupsMap) {
if (!groups.isEmpty()) {
groupsMap.removeAll(login);
for (String group : groups) {
if (!group.isEmpty()) {
groupsMap.put(login, group);
System.out.println("Adding group " + group + " to user " + login);
}
}
}
}
@NoWarning("GC")
public static void testMultimapOK2(Multimap<String, Pair<Integer,Long>> mm) {
Pair<Integer, Long> p = new Pair<Integer, Long>(1, 1L);
mm.containsEntry("x", p);
mm.containsKey("x");
mm.containsValue(p);
mm.remove("x", p);
mm.removeAll("x");
}
@Override
public Set<ISerializationContext> findByContents(EObject semanticObject, Iterable<ISerializationContext> contextCandidates) {
if (semanticObject == null)
throw new NullPointerException();
initConstraints();
Multimap<IConstraint, ISerializationContext> constraints;
if (contextCandidates != null)
constraints = getConstraints(semanticObject, contextCandidates);
else
constraints = getConstraints(semanticObject);
if (constraints.size() < 2)
return Sets.newLinkedHashSet(constraints.values());
for (IConstraint cand : Lists.newArrayList(constraints.keySet()))
if (!isValidValueQuantity(cand, semanticObject))
constraints.removeAll(cand);
if (constraints.size() < 2)
return Sets.newLinkedHashSet(constraints.values());
LinkedHashSet<ISerializationContext> result = Sets.newLinkedHashSet(constraints.values());
for (EStructuralFeature feat : semanticObject.eClass().getEAllStructuralFeatures()) {
if (transientValueUtil.isTransient(semanticObject, feat) != ValueTransient.NO)
continue;
if (feat.isMany() && ((List<?>) semanticObject.eGet(feat)).isEmpty())
continue;
Multimap<AbstractElement, ISerializationContext> assignments = collectAssignments(constraints, feat);
Set<AbstractElement> assignedElements = findAssignedElements(semanticObject, feat, assignments);
Set<ISerializationContext> keep = Sets.newHashSet();
for (AbstractElement ele : assignedElements)
keep.addAll(assignments.get(ele));
result.retainAll(keep);
}
return result;
}
@Override
protected @NonNull Multimap<@NonNull Integer, @NonNull String> getRegexes() {
Multimap<@NonNull Integer, @NonNull String> regexes = super.getRegexes();
if (!fFollowedThread.isEmpty()) {
regexes.put(IFilterProperty.BOUND, fFollowedThread);
} else {
regexes.removeAll(IFilterProperty.BOUND);
}
return regexes;
}
@BeforeClass
public static void setupClass() throws Exception {
System.setProperty("subject.dn.pattern", "(?:^|,)\\s*OU\\s*=\\s*My Department\\s*(?:,|$)");
createTestData();
setupConfiguration(conf);
AbstractColumnBasedHandler<Text> dataTypeHandler = new AbstractColumnBasedHandler<>();
dataTypeHandler.setup(new TaskAttemptContextImpl(conf, new TaskAttemptID()));
TestIngestHelper ingestHelper = new TestIngestHelper();
ingestHelper.setup(conf);
// create and process events with WKT data
RawRecordContainer record = new RawRecordContainerImpl();
Multimap<BulkIngestKey,Value> keyValues = HashMultimap.create();
int recNum = 1;
for (int i = 0; i < testData.size(); i++) {
TestData entry = testData.get(i);
record.clear();
record.setDataType(new Type(DATA_TYPE_NAME, TestIngestHelper.class, (Class) null, (String[]) null, 1, (String[]) null));
record.setRawFileName("geodata_" + recNum + ".dat");
record.setRawRecordNumber(recNum++);
record.setDate(formatter.parse(COMPOSITE_BEGIN_DATE).getTime());
record.setRawData(entry.toString().getBytes("UTF8"));
record.generateId(null);
record.setVisibility(new ColumnVisibility(AUTHS));
final Multimap<String,NormalizedContentInterface> fields = ingestHelper.getEventFields(record);
Multimap<String,NormalizedContentInterface> compositeFields = ingestHelper.getCompositeFields(fields);
for (String fieldName : compositeFields.keySet()) {
// if this is an overloaded event field, we are replacing the existing data
if (ingestHelper.isOverloadedCompositeField(fieldName))
fields.removeAll(fieldName);
fields.putAll(fieldName, compositeFields.get(fieldName));
}
Multimap kvPairs = dataTypeHandler.processBulk(new Text(), record, fields, new MockStatusReporter());
keyValues.putAll(kvPairs);
dataTypeHandler.getMetadata().addEvent(ingestHelper, record, fields);
}
keyValues.putAll(dataTypeHandler.getMetadata().getBulkMetadata());
// write these values to their respective tables
instance = new InMemoryInstance();
Connector connector = instance.getConnector("root", PASSWORD);
connector.securityOperations().changeUserAuthorizations("root", new Authorizations(AUTHS));
writeKeyValues(connector, keyValues);
}
@BeforeClass
public static void setupClass() throws Exception {
System.setProperty("subject.dn.pattern", "(?:^|,)\\s*OU\\s*=\\s*My Department\\s*(?:,|$)");
setupConfiguration(conf);
AbstractColumnBasedHandler<Text> dataTypeHandler = new AbstractColumnBasedHandler<>();
dataTypeHandler.setup(new TaskAttemptContextImpl(conf, new TaskAttemptID()));
TestIngestHelper ingestHelper = new TestIngestHelper();
ingestHelper.setup(conf);
// create and process events with WKT data
RawRecordContainer record = new RawRecordContainerImpl();
Multimap<BulkIngestKey,Value> keyValues = HashMultimap.create();
int recNum = 1;
for (int dataIdx = 0; dataIdx < 2; dataIdx++) {
String beginDate;
String[] wktData;
Integer[] wktByteLengthData;
long[] dates;
boolean useCompositeIngest;
if (dataIdx == 0) {
beginDate = LEGACY_BEGIN_DATE;
wktData = wktLegacyData;
wktByteLengthData = wktByteLengthLegacyData;
dates = legacyDates;
useCompositeIngest = false;
} else {
beginDate = COMPOSITE_BEGIN_DATE;
wktData = wktCompositeData;
wktByteLengthData = wktByteLengthCompositeData;
dates = compositeDates;
useCompositeIngest = true;
}
for (int i = 0; i < wktData.length; i++) {
record.clear();
record.setDataType(new Type(DATA_TYPE_NAME, TestIngestHelper.class, (Class) null, (String[]) null, 1, (String[]) null));
record.setRawFileName("geodata_" + recNum + ".dat");
record.setRawRecordNumber(recNum++);
record.setDate(formatter.parse(beginDate).getTime() + dates[i]);
record.setRawData((wktData[i] + "|" + ((wktByteLengthData[i] != null) ? Integer.toString(wktByteLengthData[i]) : "")).getBytes("UTF8"));
record.generateId(null);
record.setVisibility(new ColumnVisibility(AUTHS));
final Multimap<String,NormalizedContentInterface> fields = ingestHelper.getEventFields(record);
if (useCompositeIngest && ingestHelper instanceof CompositeIngest) {
Multimap<String,NormalizedContentInterface> compositeFields = ingestHelper.getCompositeFields(fields);
for (String fieldName : compositeFields.keySet()) {
// if this is an overloaded event field, we are replacing the existing data
if (ingestHelper.isOverloadedCompositeField(fieldName))
fields.removeAll(fieldName);
fields.putAll(fieldName, compositeFields.get(fieldName));
}
}
Multimap kvPairs = dataTypeHandler.processBulk(new Text(), record, fields, new MockStatusReporter());
keyValues.putAll(kvPairs);
dataTypeHandler.getMetadata().addEvent(ingestHelper, record, fields);
}
}
keyValues.putAll(dataTypeHandler.getMetadata().getBulkMetadata());
// Write the composite transition date manually
Key tdKey = new Key(new Text(GEO_FIELD), new Text(ColumnFamilyConstants.COLF_CITD), new Text(DATA_TYPE_NAME + "\0" + COMPOSITE_BEGIN_DATE), new Text(),
new SimpleDateFormat(CompositeMetadataHelper.transitionDateFormat).parse(COMPOSITE_BEGIN_DATE).getTime());
keyValues.put(new BulkIngestKey(new Text(TableName.METADATA), tdKey), new Value());
// write these values to their respective tables
instance = new InMemoryInstance();
Connector connector = instance.getConnector("root", PASSWORD);
connector.securityOperations().changeUserAuthorizations("root", new Authorizations(AUTHS));
writeKeyValues(connector, keyValues);
}
/**
* This function takes the required join on the 2 input streams for matching keys
* and allows the derived classes to implement the logic in case of non matching keys.
*
* It is designed such that for outer joins it will always assume that it is
* a left outer join and hence it considers right stream as left in case of
* right outer join keeping the code and logic the same.
*
* For each key in the left stream a corresponding key is searched in the right stream
* if a match is found then the all the objects with that key are added to Output list,
* also that key is removed from right stream as it will no longer be required in any join
* scenario.If a match is not found then it relies on derived class implementation to handle it.
*
* @param accu which is the main accumulation data structure
* @return List of objects got after joining the streams
*/
private List<Object> getAllCombo(List<Multimap<List<Object>, Object>> accu)
{
List<Object> result = new ArrayList<>();
int leftStreamIndex = getLeftStreamIndex();
Multimap<List<Object>, Object> leftStream = accu.get(leftStreamIndex);
Multimap<List<Object>, Object> rightStream = ArrayListMultimap.create(accu.get((leftStreamIndex + 1) % 2));
Map<String,PojoUtils.Getter> leftGettersStream = leftStreamIndex == 0 ? gettersStream1 : gettersStream2;
Map<String,PojoUtils.Getter> rightGettersStream = leftStreamIndex == 1 ? gettersStream1 : gettersStream2;
for (List lMap : leftStream.keySet()) {
Collection<Object> left = leftStream.get(lMap);
if (rightStream.containsKey(lMap)) {
Collection<Object> right = rightStream.get(lMap);
Object o;
try {
o = outClass.newInstance();
} catch (Throwable e) {
throw Throwables.propagate(e);
}
for (Object lObj:left) {
for (Object rObj:right) {
if (outputToInputMap != null) {
for (Map.Entry<String, KeyValPair<STREAM,String>> entry : outputToInputMap.entrySet()) {
KeyValPair<STREAM,String> kv = entry.getValue();
Object reqObject;
Map<String,PojoUtils.Getter> reqStream;
if (kv.getKey() == LEFT) {
reqObject = leftStreamIndex == 0 ? lObj : rObj;
reqStream = leftStreamIndex == 0 ? leftGettersStream : rightGettersStream;
} else {
reqObject = leftStreamIndex == 0 ? rObj : lObj;
reqStream = leftStreamIndex == 0 ? rightGettersStream : leftGettersStream;
}
setters.get(entry.getKey()).set(o,reqStream.get(entry.getValue().getValue()).get(reqObject));
}
} else {
setObjectForResult(leftGettersStream, lObj, o);
setObjectForResult(rightGettersStream, rObj, o);
}
}
result.add(o);
}
rightStream.removeAll(lMap);
} else {
addNonMatchingResult(left, leftGettersStream, result);
}
}
addNonMatchingRightStream(rightStream, rightGettersStream, result);
return result;
}