下面列出了com.google.common.collect.Iterators#concat ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
private CloseableIterator<T> executeQuery(final List<RangeReadInfo> reads) {
if (isSortFinalResultsBySortKey) {
// order the reads by sort keys
reads.sort(ScoreOrderComparator.SINGLETON);
}
final Iterator<ScoredEntry<GeoWaveRedisPersistedRow>> result =
Iterators.concat(reads.stream().map(r -> {
ByteArray partitionKey;
if ((r.partitionKey == null) || (r.partitionKey.length == 0)) {
partitionKey = EMPTY_PARTITION_KEY;
} else {
partitionKey = new ByteArray(r.partitionKey);
}
// if we don't have enough
// precision we need to make
// sure the end is inclusive
return new PartitionIteratorWrapper(
setCache.get(partitionKey).entryRange(
r.startScore,
true,
r.endScore,
r.endScore <= r.startScore),
r.partitionKey);
}).iterator());
return new CloseableIterator.Wrapper<>(transformAndFilter(result));
}
public SequentialCubeTupleIterator(List<CubeSegmentScanner> scanners, Cuboid cuboid,
Set<TblColRef> selectedDimensions, List<TblColRef> rtGroups, Set<TblColRef> groups, //
Set<FunctionDesc> selectedMetrics, TupleInfo returnTupleInfo, StorageContext context, SQLDigest sqlDigest) {
this.context = context;
this.scanners = scanners;
Set<TblColRef> selectedDims = Sets.newHashSet(selectedDimensions);
selectedDims.addAll(rtGroups);
segmentCubeTupleIterators = Lists.newArrayList();
for (CubeSegmentScanner scanner : scanners) {
segmentCubeTupleIterators.add(new SegmentCubeTupleIterator(scanner, cuboid, selectedDims, selectedMetrics, returnTupleInfo, context));
}
if (context.mergeSortPartitionResults() && !sqlDigest.isRawQuery) {
//query with limit
logger.info("Using SortedIteratorMergerWithLimit to merge segment results");
Iterator<Iterator<ITuple>> transformed = (Iterator<Iterator<ITuple>>) (Iterator<?>) segmentCubeTupleIterators.iterator();
tupleIterator = new SortedIteratorMergerWithLimit<ITuple>(transformed, context.getFinalPushDownLimit(), getTupleDimensionComparator(cuboid, groups, returnTupleInfo)).getIterator();
} else {
//normal case
logger.info("Using Iterators.concat to merge segment results");
tupleIterator = Iterators.concat(segmentCubeTupleIterators.iterator());
}
}
@Override
public Iterator<Tree> childrenIterator() {
return Iterators.concat(
Iterators.forArray(extendKeyword, openParenthesis),
parameterElements.iterator(),
Iterators.singletonIterator(closeParenthesis));
}
@Override
public Iterator<E> iterator() {
Iterator[] chrIterators = new Iterator[data.size()];
int i = 0;
for(TreeMap<Integer, E> chrData : data.values()){
chrIterators[i] = chrData.values().iterator();
++i;
}
return Iterators.concat(chrIterators);
}
@Nonnull
@Override
public Iterable<? extends Method> getMethods() {
return new Iterable<Method>() {
@Nonnull
@Override
public Iterator<Method> iterator() {
return Iterators.concat(getDirectMethods().iterator(), getVirtualMethods().iterator());
}
};
}
@Test public void testFirstArgNull() throws Exception {
try {
Iterators.concat(null, iter("a","b"));
fail("nullpointer exception expected");
} catch (NullPointerException npe){
//expected
}
}
/**
* Instantiates a new cassandra result set from a com.datastax.driver.core.ResultSet.
*/
CassandraResultSet(CassandraStatement statement, ArrayList<com.datastax.driver.core.ResultSet> resultSets) throws SQLException
{
this.statement = statement;
this.resultSetType = statement.getResultSetType();
this.fetchDirection = statement.getFetchDirection();
this.fetchSize = statement.getFetchSize();
//this.rowsIterators = Lists.newArrayList();
// We have several result sets, but we will use only the first one for metadata needs
this.driverResultSet = resultSets.get(0);
// Now we concatenate iterators of the different result sets into a single one and voilà !! ;)
currentIteratorIndex=0;
rowsIterator = driverResultSet.iterator();
for(int i=1;i<resultSets.size();i++){
rowsIterator = Iterators.concat(rowsIterator,resultSets.get(i).iterator()); // this leads to Stack Overflow Exception when there are too many resultSets
/*if(resultSets.get(i).iterator().hasNext()){
rowsIterators.add(resultSets.get(i).iterator());
}
*/
}
//colDefinitions = driverResultSet.getColumnDefinitions();
// Initialize to column values from the first row
if (hasMoreRows())
{
populateColumns();
}
meta = new CResultSetMetaData();
}
@Override
public void merge(Iterable<? extends KeyIterable<TKey, TRow>> iterables) {
Iterable<TRow> concat = Iterables.concat(iterables);
if (repeatable) {
this.iterables.addAll(iterables);
this.storedForRepeat = null;
}
if (iterator.hasNext()) {
iterator = Iterators.concat(iterator, concat.iterator());
} else {
iterator = concat.iterator();
}
}
/**
* Gets an iterator representing an immutable snapshot of all subscribers to the given event at
* the time this method is called.
*/
Iterator<Subscriber> getSubscribers(Object event) {
ImmutableSet<Class<?>> eventTypes = flattenHierarchy(event.getClass());
List<Iterator<Subscriber>> subscriberIterators = Lists.newArrayListWithCapacity(eventTypes.size());
for (Class<?> eventType : eventTypes) {
CopyOnWriteArraySet<Subscriber> eventSubscribers = subscribers.get(eventType);
if (eventSubscribers != null) {
// eager no-copy snapshot
subscriberIterators.add(eventSubscribers.iterator());
}
}
return Iterators.concat(subscriberIterators.iterator());
}
@Override
protected Iterator<T> getJoinIterator(Iterator<? extends Requestor<T>> requestors,
ConcurrentBoundedPriorityIterable<T> requestIterable) {
return Iterators.concat(Iterators.transform(requestors, new Function<Requestor<T>, Iterator<T>>() {
@Nullable
@Override
public Iterator<T> apply(Requestor<T> input) {
return input.iterator();
}
}));
}
private Iterator<RoutineInfo> customIterators() {
try {
Iterator<RoutineInfo> cAnalyzersIterator = Iterators.transform(
ftResolver.getCustomAnalyzers().entrySet().iterator(),
input -> new RoutineInfo(
input.getKey(),
RoutineType.ANALYZER.getName(),
routineSettingsToDefinition(input.getKey(), input.getValue(), RoutineType.ANALYZER)));
Iterator<RoutineInfo> cCharFiltersIterator = Iterators.transform(
ftResolver.getCustomCharFilters().entrySet().iterator(),
input -> new RoutineInfo(
input.getKey(),
RoutineType.CHAR_FILTER.getName(),
routineSettingsToDefinition(input.getKey(), input.getValue(), RoutineType.CHAR_FILTER)));
Iterator<RoutineInfo> cTokenFiltersIterator = Iterators.transform(
ftResolver.getCustomTokenFilters().entrySet().iterator(),
input -> new RoutineInfo(
input.getKey(),
RoutineType.TOKEN_FILTER.getName(),
routineSettingsToDefinition(input.getKey(), input.getValue(), RoutineType.TOKEN_FILTER)));
Iterator<RoutineInfo> cTokenizersIterator = Iterators.transform(
ftResolver.getCustomTokenizers().entrySet().iterator(),
input -> new RoutineInfo(
input.getKey(),
RoutineType.TOKENIZER.getName(),
routineSettingsToDefinition(input.getKey(), input.getValue(), RoutineType.TOKENIZER)));
return Iterators.concat(cAnalyzersIterator, cCharFiltersIterator, cTokenFiltersIterator, cTokenizersIterator);
} catch (IOException e) {
LOGGER.error("Could not retrieve custom routines", e);
return null;
}
}
@Override
public Iterator<V> iterator() {
final Set<V> seen = new HashSet<>();
return Iterators.concat(
Iterators.transform(
subsets.iterator(),
s -> Iterators.filter(
s.iterator(),
seen::add
)
)
);
}
@Nonnull
@Override
public Iterable<? extends Method> getMethods() {
return new Iterable<Method>() {
@Nonnull
@Override
public Iterator<Method> iterator() {
return Iterators.concat(getDirectMethods().iterator(), getVirtualMethods().iterator());
}
};
}
private void test_merge(int version, SegmentMode mode, String name, String path) throws IOException {
DPSegment segment = DPSegment.open(
version,
mode,
Paths.get(path),
name,
segmentSchema,
OpenOption.Overwrite).update();
addRows(segment, genRows(rowCount));
DPSegment segment2 = DPSegment.open(
version,
mode,
Paths.get(path + UUID.randomUUID().toString()),
name,
segmentSchema,
OpenOption.Overwrite).update();
addRows(segment2, genRows(rowCount));
DPSegment segment3 = DPSegment.open(
version,
mode,
Paths.get(path + UUID.randomUUID().toString()),
name,
segmentSchema,
OpenOption.Overwrite).update();
addRows(segment3, genRows(rowCount));
segment.merge(Lists.newArrayList(segment2, segment3));
segment.seal();
Iterator<Row> toCompare = Iterators.<Row>concat(
genRows(DataPack.MAX_COUNT * 3),
genRows(DataPack.MAX_COUNT * 3),
genRows(DataPack.MAX_COUNT * 3),
genRows(99),
genRows(99),
genRows(99));
rowsCmp(toCompare, segment.rowTraversal().iterator());
segment.update();
addRows(segment, genRows(rowCount));
segment.seal();
toCompare = Iterators.<Row>concat(
genRows(DataPack.MAX_COUNT * 3),
genRows(DataPack.MAX_COUNT * 3),
genRows(DataPack.MAX_COUNT * 3),
genRows(99),
genRows(99),
genRows(99),
genRows(rowCount));
rowsCmp(toCompare, segment.rowTraversal().iterator());
segment = DPSegment.open(
version,
mode,
Paths.get(path + UUID.randomUUID().toString()),
name,
segmentSchema,
OpenOption.Overwrite).update();
segment.merge(Lists.newArrayList(segment2, segment3));
toCompare = Iterators.<Row>concat(
genRows(DataPack.MAX_COUNT * 3),
genRows(DataPack.MAX_COUNT * 3),
genRows(99),
genRows(99));
rowsCmp(toCompare, segment.rowTraversal().iterator());
RSIndexTest.checkIndex(segment);
segment.close();
}
public static <X> Iterable<X> iterConcat(Iterable<X> i, Iterable<X> j) {
return new IteratorIterable<>(Iterators.concat(i.iterator(), j.iterator()), true);
}
@Override
public Iterator<Tree> childrenIterator() {
return Iterators.concat(Iterators.forArray(caseKeyword, mainExpression), whenClauses.iterator(),
Iterators.forArray(elseKeyword, elseSatements,endKeyword, caseKeyword2, semi));
}
@Override
public Iterator<Object> iterator() {
return Iterators.concat(Arrays.asList(id, address, type, capabilities).iterator(), attributes.values().iterator());
}
@DataProvider(name = "unorderedAdds")
public Iterator<Object[]> provideUnorderedAdds() throws Exception {
try {
List<ViewBinding> viewBindings = buildBindings(stepTypes, maxStepDepth);
final ViewBinding binding = viewBindings.get(0);
final int randomBatchSize = 2;
List<ModelPath> modelPaths = viewBindings.get(0).getModelPaths();
// Shuffled Adds
Iterator<Iterator<Object[]>> transform = Iterators.transform(modelPaths.iterator(), new Function<ModelPath, Iterator<Object[]>>() {
long testId = 0;
@Override
public Iterator<Object[]> apply(final ModelPath path) {
OrderIdProvider initialIdProvider = monatomic(0);
IdProviderImpl idProviderImpl = new IdProviderImpl(initialIdProvider);
final EventsAndViewId deriedEventsAndViewId = eventFireGenerator
.deriveEventsFromPath(idProviderImpl, path, idProviderImpl.nextId(), maxFanOut);
final ObjectId viewId = new ObjectId(binding.getViewClassName(), deriedEventsAndViewId.getViewId());
final Set<Id> deletedIds = new HashSet<>();
long highestId = initialIdProvider.nextId();
// Case: Entirely new adds with shuffled eventIds. Ensures delivery and time stamp order doesn't matter.
List<OrderIdProvider> idProviders = orderIdProviderGenerator.generateOrderIdProviders(seed, highestId,
new IdBatchConfig(Order.shuffle, deriedEventsAndViewId.getEvents().size(), randomBatchSize));
return Iterators.transform(idProviders.iterator(), new Function<OrderIdProvider, Object[]>() {
@Override
public Object[] apply(OrderIdProvider idProvider) {
Materialization materialization = new Materialization();
// try {
// materialization.setupModelAndMaterializer(numberOfEventProcessorThreads);
// } catch (Exception x) {
// throw new RuntimeException("Failed to setupModelAndMaterializer()" + x);
// }
EventWriterProvider writerProvider = buildEventWriterProvider(materialization, idProvider);
EventFire eventFire = new EventFire(viewId,
deriedEventsAndViewId.getEvents(),
path.getPathMembers().get(path.getPathMemberSize() - 1),
deriedEventsAndViewId.getIdTree());
Object[] buildParamaterListItem = buildParamaterListItem("addsRandomOrder",
testId, materialization, tenantIdAndCentricId, actorId, binding, path, writerProvider, eventFire, deletedIds);
testId++;
return buildParamaterListItem;
}
});
}
});
return Iterators.concat(transform);
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
/**
* {@inheritDoc}
* <br><br>
* <strong>Warning:</strong> This function loops through every object in the World, including objects from the
* cache. It shouldn't be relied on in performance critical code.
* <br><br>
* If you only want to loop through spawned objects, use {@link #dynamicIterator()}.
*/
@SuppressWarnings("unchecked")
@Override
public UnmodifiableIterator<GameObject> iterator() {
Iterator<GameObject> all = Iterators.concat(dynamicIterator(), staticIterator()); // Combine them.
return Iterators.unmodifiableIterator(all); // Make them immutable.
}
/**
* Combines multiple iterators into a single iterator. The returned iterator traverses the
* elements of each iterator in {@code inputs}. The input iterators are not polled until necessary.
*
* @param inputs
* the to be flattened iterators. May not be <code>null</code>.
* @return an iterator that provides the concatenated values of the input elements. Never <code>null</code>.
*
* @since 2.13
*/
@Inline(value="$2.$3concat($1)", imported=Iterators.class)
public static <T> Iterator<T> flatten(Iterator<? extends Iterator<? extends T>> inputs) {
return Iterators.concat(inputs);
}