下面列出了怎么用java.util.stream.StreamTestDataProvider的API类实例代码及写法,或者点击链接到github查看源代码。
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testSimpleGroupBy(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException {
Function<Integer, Integer> classifier = i -> i % 3;
// Single-level groupBy
exerciseMapTabulation(data, groupingBy(classifier),
new GroupedMapAssertion<>(classifier, HashMap.class,
new ListAssertion<>()));
exerciseMapTabulation(data, groupingByConcurrent(classifier),
new GroupedMapAssertion<>(classifier, ConcurrentHashMap.class,
new ListAssertion<>()));
// With explicit constructors
exerciseMapTabulation(data,
groupingBy(classifier, TreeMap::new, toCollection(HashSet::new)),
new GroupedMapAssertion<>(classifier, TreeMap.class,
new CollectionAssertion<Integer>(HashSet.class, false)));
exerciseMapTabulation(data,
groupingByConcurrent(classifier, ConcurrentSkipListMap::new,
toCollection(HashSet::new)),
new GroupedMapAssertion<>(classifier, ConcurrentSkipListMap.class,
new CollectionAssertion<Integer>(HashSet.class, false)));
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class,
groups = { "serialization-hostile" })
public void testSkipLimitOps(String name, TestData.OfRef<Integer> data) {
List<Integer> skips = sizes(data.size());
List<Integer> limits = skips;
for (int s : skips) {
setContext("skip", s);
for (int l : limits) {
setContext("limit", l);
testSliceMulti(data,
sliceSize(sliceSize(data.size(), s), 0, l),
st -> st.skip(s).limit(l),
st -> st.skip(s).limit(l),
st -> st.skip(s).limit(l),
st -> st.skip(s).limit(l));
}
}
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testTwoLevelPartition(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException {
Predicate<Integer> classifier = i -> i % 3 == 0;
Predicate<Integer> classifier2 = i -> i % 7 == 0;
// Two level partition
exerciseMapTabulation(data,
partitioningBy(classifier, partitioningBy(classifier2)),
new PartitionAssertion<>(classifier,
new PartitionAssertion(classifier2, new ListAssertion<>())));
// Two level partition with reduce
exerciseMapTabulation(data,
partitioningBy(classifier, reducing(0, Integer::sum)),
new PartitionAssertion<>(classifier,
new ReduceAssertion<>(0, LambdaTestHelpers.identity(), Integer::sum)));
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testOps(String name, TestData.OfRef<Integer> data) {
// @@@ More things to test here:
// - Every value in data is present in right bucket
// - Total number of values equals size of data
for (MapperData<Integer, ?> md : getMapperData(data)) {
Collector<Integer, ?, Map<Object, List<Integer>>> tab = Collectors.groupingBy(md.m);
Map<Object, List<Integer>> result =
withData(data)
.terminal(s -> s, s -> s.collect(tab))
.resultAsserter((act, exp, ord, par) -> {
if (par & !ord) {
GroupByOpTest.assertMultiMapEquals(act, exp);
}
else {
GroupByOpTest.assertObjectEquals(act, exp);
}
})
.exercise();
assertEquals(result.keySet().size(), md.expectedSize);
}
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class,
groups = { "serialization-hostile" })
public void testSkipOps(String name, TestData.OfRef<Integer> data) {
List<Integer> skips = sizes(data.size());
for (int s : skips) {
setContext("skip", s);
testSliceMulti(data,
sliceSize(data.size(), s),
st -> st.skip(s),
st -> st.skip(s),
st -> st.skip(s),
st -> st.skip(s));
testSliceMulti(data,
sliceSize(sliceSize(data.size(), s), s/2),
st -> st.skip(s).skip(s / 2),
st -> st.skip(s).skip(s / 2),
st -> st.skip(s).skip(s / 2),
st -> st.skip(s).skip(s / 2));
}
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class,
groups = { "serialization-hostile" })
public void testSkipLimitOps(String name, TestData.OfRef<Integer> data) {
List<Integer> skips = sizes(data.size());
List<Integer> limits = skips;
for (int s : skips) {
setContext("skip", s);
for (int l : limits) {
setContext("limit", l);
testSliceMulti(data,
sliceSize(sliceSize(data.size(), s), 0, l),
st -> st.skip(s).limit(l),
st -> st.skip(s).limit(l),
st -> st.skip(s).limit(l),
st -> st.skip(s).limit(l));
}
}
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testSimpleGroupBy(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException {
Function<Integer, Integer> classifier = i -> i % 3;
// Single-level groupBy
exerciseMapTabulation(data, groupingBy(classifier),
new GroupedMapAssertion<>(classifier, HashMap.class,
new ListAssertion<>()));
exerciseMapTabulation(data, groupingByConcurrent(classifier),
new GroupedMapAssertion<>(classifier, ConcurrentHashMap.class,
new ListAssertion<>()));
// With explicit constructors
exerciseMapTabulation(data,
groupingBy(classifier, TreeMap::new, toCollection(HashSet::new)),
new GroupedMapAssertion<>(classifier, TreeMap.class,
new CollectionAssertion<Integer>(HashSet.class, false)));
exerciseMapTabulation(data,
groupingByConcurrent(classifier, ConcurrentSkipListMap::new,
toCollection(HashSet::new)),
new GroupedMapAssertion<>(classifier, ConcurrentSkipListMap.class,
new CollectionAssertion<Integer>(HashSet.class, false)));
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testOps(String name, TestData.OfRef<Integer> data) {
// @@@ More things to test here:
// - Every value in data is present in right bucket
// - Total number of values equals size of data
for (MapperData<Integer, ?> md : getMapperData(data)) {
Collector<Integer, ?, Map<Object, List<Integer>>> tab = Collectors.groupingBy(md.m);
Map<Object, List<Integer>> result =
withData(data)
.terminal(s -> s, s -> s.collect(tab))
.resultAsserter((act, exp, ord, par) -> {
if (par & !ord) {
GroupByOpTest.assertMultiMapEquals(act, exp);
}
else {
GroupByOpTest.assertObjectEquals(act, exp);
}
})
.exercise();
assertEquals(result.keySet().size(), md.expectedSize);
}
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class,
groups = { "serialization-hostile" })
public void testSkipOps(String name, TestData.OfRef<Integer> data) {
List<Integer> skips = sizes(data.size());
for (int s : skips) {
setContext("skip", s);
testSliceMulti(data,
sliceSize(data.size(), s),
st -> st.skip(s),
st -> st.skip(s),
st -> st.skip(s),
st -> st.skip(s));
testSliceMulti(data,
sliceSize(sliceSize(data.size(), s), s/2),
st -> st.skip(s).skip(s / 2),
st -> st.skip(s).skip(s / 2),
st -> st.skip(s).skip(s / 2),
st -> st.skip(s).skip(s / 2));
}
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testTwoLevelPartition(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException {
Predicate<Integer> classifier = i -> i % 3 == 0;
Predicate<Integer> classifier2 = i -> i % 7 == 0;
// Two level partition
exerciseMapTabulation(data,
partitioningBy(classifier, partitioningBy(classifier2)),
new PartitionAssertion<>(classifier,
new PartitionAssertion(classifier2, new ListAssertion<>())));
// Two level partition with reduce
exerciseMapTabulation(data,
partitioningBy(classifier, reducing(0, Integer::sum)),
new PartitionAssertion<>(classifier,
new ReduceAssertion<>(0, LambdaTestHelpers.identity(), Integer::sum)));
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testSimpleGroupBy(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException {
Function<Integer, Integer> classifier = i -> i % 3;
// Single-level groupBy
exerciseMapTabulation(data, groupingBy(classifier),
new GroupedMapAssertion<>(classifier, HashMap.class,
new ListAssertion<>()));
exerciseMapTabulation(data, groupingByConcurrent(classifier),
new GroupedMapAssertion<>(classifier, ConcurrentHashMap.class,
new ListAssertion<>()));
// With explicit constructors
exerciseMapTabulation(data,
groupingBy(classifier, TreeMap::new, toCollection(HashSet::new)),
new GroupedMapAssertion<>(classifier, TreeMap.class,
new CollectionAssertion<Integer>(HashSet.class, false)));
exerciseMapTabulation(data,
groupingByConcurrent(classifier, ConcurrentSkipListMap::new,
toCollection(HashSet::new)),
new GroupedMapAssertion<>(classifier, ConcurrentSkipListMap.class,
new CollectionAssertion<Integer>(HashSet.class, false)));
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testTwoLevelPartition(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException {
Predicate<Integer> classifier = i -> i % 3 == 0;
Predicate<Integer> classifier2 = i -> i % 7 == 0;
// Two level partition
exerciseMapTabulation(data,
partitioningBy(classifier, partitioningBy(classifier2)),
new PartitionAssertion<>(classifier,
new PartitionAssertion(classifier2, new ListAssertion<>())));
// Two level partition with reduce
exerciseMapTabulation(data,
partitioningBy(classifier, reducing(0, Integer::sum)),
new PartitionAssertion<>(classifier,
new ReduceAssertion<>(0, LambdaTestHelpers.identity(), Integer::sum)));
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testOps(String name, TestData.OfRef<Integer> data) {
// @@@ More things to test here:
// - Every value in data is present in right bucket
// - Total number of values equals size of data
for (MapperData<Integer, ?> md : getMapperData(data)) {
Collector<Integer, ?, Map<Object, List<Integer>>> tab = Collectors.groupingBy(md.m);
Map<Object, List<Integer>> result =
withData(data)
.terminal(s -> s, s -> s.collect(tab))
.resultAsserter((act, exp, ord, par) -> {
if (par & !ord) {
GroupByOpTest.assertMultiMapEquals(act, exp);
}
else {
GroupByOpTest.assertObjectEquals(act, exp);
}
})
.exercise();
assertEquals(result.keySet().size(), md.expectedSize);
}
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class,
groups = { "serialization-hostile" })
public void testSkipOps(String name, TestData.OfRef<Integer> data) {
List<Integer> skips = sizes(data.size());
for (int s : skips) {
setContext("skip", s);
testSliceMulti(data,
sliceSize(data.size(), s),
st -> st.skip(s),
st -> st.skip(s),
st -> st.skip(s),
st -> st.skip(s));
testSliceMulti(data,
sliceSize(sliceSize(data.size(), s), s/2),
st -> st.skip(s).skip(s / 2),
st -> st.skip(s).skip(s / 2),
st -> st.skip(s).skip(s / 2),
st -> st.skip(s).skip(s / 2));
}
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testSimpleGroupBy(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException {
Function<Integer, Integer> classifier = i -> i % 3;
// Single-level groupBy
exerciseMapTabulation(data, groupingBy(classifier),
new GroupedMapAssertion<>(classifier, HashMap.class,
new ListAssertion<>()));
exerciseMapTabulation(data, groupingByConcurrent(classifier),
new GroupedMapAssertion<>(classifier, ConcurrentHashMap.class,
new ListAssertion<>()));
// With explicit constructors
exerciseMapTabulation(data,
groupingBy(classifier, TreeMap::new, toCollection(HashSet::new)),
new GroupedMapAssertion<>(classifier, TreeMap.class,
new CollectionAssertion<Integer>(HashSet.class, false)));
exerciseMapTabulation(data,
groupingByConcurrent(classifier, ConcurrentSkipListMap::new,
toCollection(HashSet::new)),
new GroupedMapAssertion<>(classifier, ConcurrentSkipListMap.class,
new CollectionAssertion<Integer>(HashSet.class, false)));
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class,
groups = { "serialization-hostile" })
public void testSkipOps(String name, TestData.OfRef<Integer> data) {
List<Integer> skips = sizes(data.size());
for (int s : skips) {
setContext("skip", s);
testSliceMulti(data,
sliceSize(data.size(), s),
st -> st.skip(s),
st -> st.skip(s),
st -> st.skip(s),
st -> st.skip(s));
testSliceMulti(data,
sliceSize(sliceSize(data.size(), s), s/2),
st -> st.skip(s).skip(s / 2),
st -> st.skip(s).skip(s / 2),
st -> st.skip(s).skip(s / 2),
st -> st.skip(s).skip(s / 2));
}
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testStream(String name, TestData.OfRef<Integer> data) {
for (Predicate<Integer> p : INTEGER_PREDICATES) {
setContext("p", p);
for (Kind kind : Kind.values()) {
setContext("kind", kind);
exerciseTerminalOps(data, this.<Integer>kinds().get(kind).apply(p));
exerciseTerminalOps(data, s -> s.filter(pFalse), this.<Integer>kinds().get(kind).apply(p));
exerciseTerminalOps(data, s -> s.filter(pEven), this.<Integer>kinds().get(kind).apply(p));
}
}
}
@SuppressWarnings({"rawtypes", "unchecked"})
@Test(dataProvider = "StreamTestData<Integer>.mini", dataProviderClass = StreamTestDataProvider.class)
public void testMixedSeqPar(String name, TestData.OfRef<Integer> data) {
Function<Integer, Integer> id = LambdaTestHelpers.identity();
UnaryOperator<Stream<Integer>>[] changers
= new UnaryOperator[] {
(UnaryOperator<Stream<Integer>>) s -> s,
(UnaryOperator<Stream<Integer>>) s -> s.sequential(),
(UnaryOperator<Stream<Integer>>) s -> s.parallel(),
(UnaryOperator<Stream<Integer>>) s -> s.unordered()
};
UnaryOperator<Stream<Integer>>[] stuff
= new UnaryOperator[] {
(UnaryOperator<Stream<Integer>>) s -> s,
(UnaryOperator<Stream<Integer>>) s -> s.map(id),
(UnaryOperator<Stream<Integer>>) s -> s.sorted(Comparator.naturalOrder()),
(UnaryOperator<Stream<Integer>>) s -> s.map(id).sorted(Comparator.naturalOrder()).map(id),
(UnaryOperator<Stream<Integer>>) s -> s.filter(LambdaTestHelpers.pEven).sorted(Comparator.naturalOrder()).map(id),
};
for (int c1Index = 0; c1Index < changers.length; c1Index++) {
setContext("c1Index", c1Index);
UnaryOperator<Stream<Integer>> c1 = changers[c1Index];
for (int s1Index = 0; s1Index < stuff.length; s1Index++) {
setContext("s1Index", s1Index);
UnaryOperator<Stream<Integer>> s1 = stuff[s1Index];
for (int c2Index = 0; c2Index < changers.length; c2Index++) {
setContext("c2Index", c2Index);
UnaryOperator<Stream<Integer>> c2 = changers[c2Index];
for (int s2Index = 0; s2Index < stuff.length; s2Index++) {
setContext("s2Index", s2Index);
UnaryOperator<Stream<Integer>> s2 = stuff[s2Index];
UnaryOperator<Stream<Integer>> composed = s -> s2.apply(c2.apply(s1.apply(c1.apply(s))));
exerciseOps(data, composed);
}
}
}
}
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testSimplePartition(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException {
Predicate<Integer> classifier = i -> i % 3 == 0;
// Single-level partition to downstream List
exerciseMapTabulation(data,
partitioningBy(classifier),
new PartitionAssertion<>(classifier, new ListAssertion<>()));
exerciseMapTabulation(data,
partitioningBy(classifier, toList()),
new PartitionAssertion<>(classifier, new ListAssertion<>()));
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testComposeFinisher(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException {
List<Integer> asList = exerciseTerminalOps(data, s -> s.collect(toList()));
List<Integer> asImmutableList = exerciseTerminalOps(data, s -> s.collect(collectingAndThen(toList(), Collections::unmodifiableList)));
assertEquals(asList, asImmutableList);
try {
asImmutableList.add(0);
fail("Expecting immutable result");
}
catch (UnsupportedOperationException ignored) { }
}
@Test(dataProvider = "withNull:StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testOpWithNull(String name, TestData.OfRef<Integer> data) {
Collection<Integer> node = exerciseOps(data, Stream::distinct);
assertUnique(node);
node = withData(data).
stream(s -> s.unordered().distinct()).
exercise();
assertUnique(node);
node = exerciseOps(data, s -> s.distinct().distinct());
assertUnique(node);
}
@Test(dataProvider = "withNull:StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testOpWithNullSorted(String name, TestData.OfRef<Integer> data) {
List<Integer> l = new ArrayList<>();
data.into(l).sort(cNullInteger);
// Need to inject SORTED into the sorted list source since
// sorted() with a comparator ironically clears SORTED
Collection<Integer> node = exerciseOps(new SortedTestData<>(l), Stream::distinct);
assertUnique(node);
assertSorted(node, cNullInteger);
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testDistinctDistinct(String name, TestData.OfRef<Integer> data) {
Collection<Integer> result = exerciseOpsInt(
data,
s -> s.distinct().distinct(),
s -> s.distinct().distinct(),
s -> s.distinct().distinct(),
s -> s.distinct().distinct());
assertUnique(result);
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testDistinctSorted(String name, TestData.OfRef<Integer> data) {
Collection<Integer> result = withData(data)
.stream(s -> s.distinct().sorted(),
new CollectorOps.TestParallelSizedOp<>())
.exercise();
assertUnique(result);
assertSorted(result);
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testSortedDistinct(String name, TestData.OfRef<Integer> data) {
Collection<Integer> result = withData(data)
.stream(s -> s.sorted().distinct(),
new CollectorOps.TestParallelSizedOp<>())
.exercise();
assertUnique(result);
assertSorted(result);
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testOps(String name, TestData.OfRef<Integer> data) {
exerciseOpsInt(data,
s -> Stream.concat(s, data.stream()),
s -> IntStream.concat(s, data.stream().mapToInt(Integer::intValue)),
s -> LongStream.concat(s, data.stream().mapToLong(Integer::longValue)),
s -> DoubleStream.concat(s, data.stream().mapToDouble(Integer::doubleValue)));
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testOps(String name, TestData.OfRef<Integer> data) {
assertEquals(0, (int) exerciseTerminalOps(data, s -> s.filter(pFalse), s -> s.reduce(0, rPlus, rPlus)));
Optional<Integer> seedless = exerciseTerminalOps(data, s -> s.reduce(rPlus));
Integer folded = exerciseTerminalOps(data, s -> s.reduce(0, rPlus, rPlus));
assertEquals(folded, seedless.orElse(0));
seedless = exerciseTerminalOps(data, s -> s.reduce(rMin));
folded = exerciseTerminalOps(data, s -> s.reduce(Integer.MAX_VALUE, rMin, rMin));
assertEquals(folded, seedless.orElse(Integer.MAX_VALUE));
seedless = exerciseTerminalOps(data, s -> s.reduce(rMax));
folded = exerciseTerminalOps(data, s -> s.reduce(Integer.MIN_VALUE, rMax, rMax));
assertEquals(folded, seedless.orElse(Integer.MIN_VALUE));
seedless = exerciseTerminalOps(data, s -> s.map(mDoubler), s -> s.reduce(rPlus));
folded = exerciseTerminalOps(data, s -> s.map(mDoubler), s -> s.reduce(0, rPlus, rPlus));
assertEquals(folded, seedless.orElse(0));
seedless = exerciseTerminalOps(data, s -> s.map(mDoubler), s -> s.reduce(rMin));
folded = exerciseTerminalOps(data, s -> s.map(mDoubler), s -> s.reduce(Integer.MAX_VALUE, rMin, rMin));
assertEquals(folded, seedless.orElse(Integer.MAX_VALUE));
seedless = exerciseTerminalOps(data, s -> s.map(mDoubler), s -> s.reduce(rMax));
folded = exerciseTerminalOps(data, s -> s.map(mDoubler), s -> s.reduce(Integer.MIN_VALUE, rMax, rMax));
assertEquals(folded, seedless.orElse(Integer.MIN_VALUE));
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testOps(String name, TestData.OfRef<Integer> data) {
AtomicLong expectedCount = new AtomicLong();
data.stream().forEach(e -> expectedCount.incrementAndGet());
withData(data).
terminal(Stream::count).
expectedResult(expectedCount.get()).
exercise();
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testOps(String name, TestData.OfRef<Integer> data) {
assertEquals(0, (int) exerciseTerminalOps(data, s -> s.filter(pFalse), s -> s.reduce(0, rPlus, rPlus)));
Optional<Integer> seedless = exerciseTerminalOps(data, s -> s.reduce(rPlus));
Integer folded = exerciseTerminalOps(data, s -> s.reduce(0, rPlus, rPlus));
assertEquals(folded, seedless.orElse(0));
seedless = exerciseTerminalOps(data, s -> s.reduce(rMin));
folded = exerciseTerminalOps(data, s -> s.reduce(Integer.MAX_VALUE, rMin, rMin));
assertEquals(folded, seedless.orElse(Integer.MAX_VALUE));
seedless = exerciseTerminalOps(data, s -> s.reduce(rMax));
folded = exerciseTerminalOps(data, s -> s.reduce(Integer.MIN_VALUE, rMax, rMax));
assertEquals(folded, seedless.orElse(Integer.MIN_VALUE));
seedless = exerciseTerminalOps(data, s -> s.map(mDoubler), s -> s.reduce(rPlus));
folded = exerciseTerminalOps(data, s -> s.map(mDoubler), s -> s.reduce(0, rPlus, rPlus));
assertEquals(folded, seedless.orElse(0));
seedless = exerciseTerminalOps(data, s -> s.map(mDoubler), s -> s.reduce(rMin));
folded = exerciseTerminalOps(data, s -> s.map(mDoubler), s -> s.reduce(Integer.MAX_VALUE, rMin, rMin));
assertEquals(folded, seedless.orElse(Integer.MAX_VALUE));
seedless = exerciseTerminalOps(data, s -> s.map(mDoubler), s -> s.reduce(rMax));
folded = exerciseTerminalOps(data, s -> s.map(mDoubler), s -> s.reduce(Integer.MIN_VALUE, rMax, rMax));
assertEquals(folded, seedless.orElse(Integer.MIN_VALUE));
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testStream(String name, TestData.OfRef<Integer> data) {
for (Predicate<Integer> p : INTEGER_PREDICATES) {
setContext("p", p);
for (Kind kind : Kind.values()) {
setContext("kind", kind);
exerciseTerminalOps(data, this.<Integer>kinds().get(kind).apply(p));
exerciseTerminalOps(data, s -> s.filter(pFalse), this.<Integer>kinds().get(kind).apply(p));
exerciseTerminalOps(data, s -> s.filter(pEven), this.<Integer>kinds().get(kind).apply(p));
}
}
}