下面列出了java.util.stream.IntStream#range ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
/**
* Returns all vertices in this row, sorted by column index (min to max).
*
* <p>Note: the index of a vertex in the list does not match the column index. To get the
* column index for a vertex, call {@link #getColumn(Object) getColumn(V)}.
*
* @return all vertices in this row
*/
public List<V> getVertices() {
// fill a list with vertices or null values
//@formatter:off
Integer start = verticesByColumn.firstKey();
Integer n = getColumnCount();
IntStream columnIndexes = IntStream.range(start, start + n);
List<V> vertices =
columnIndexes
.mapToObj(col -> verticesByColumn.get(col))
.filter(v -> v != null)
.collect(Collectors.toList())
;
//@formatter:on
return vertices;
}
@Override
public double integrate(final DoubleUnaryOperator integrand) {
final double lowerBound = getLowerBound();
final double upperBound = getUpperBound();
final double range = upperBound-lowerBound;
final int numberOfDoubleSizeIntervalls = (int) ((numberOfEvaluationPoints-1) / 2.0);
final double doubleIntervall = range / numberOfDoubleSizeIntervalls;
final double singleIntervall = 0.5 * doubleIntervall;
IntStream intervals = IntStream.range(1, numberOfDoubleSizeIntervalls);
if(useParallelEvaluation) {
intervals = intervals.parallel();
}
double sum = intervals.mapToDouble(
i -> integrand.applyAsDouble(lowerBound + i * doubleIntervall) + 2 * integrand.applyAsDouble(lowerBound + i * doubleIntervall + singleIntervall)
).sum();
sum += 2.0 * integrand.applyAsDouble(lowerBound + singleIntervall);
return (integrand.applyAsDouble(lowerBound) + 2.0 * sum + integrand.applyAsDouble(upperBound)) / 3.0 * singleIntervall;
}
private void updatePredictedCounts(){
StopWatch stopWatch = new StopWatch();
if (logger.isDebugEnabled()){
stopWatch.start();
}
IntStream intStream;
if (isParallel){
intStream = IntStream.range(0,numParameters).parallel();
} else {
intStream = IntStream.range(0,numParameters);
}
intStream.forEach(i -> this.predictedCounts.set(i, calPredictedCount(i)));
if (logger.isDebugEnabled()){
logger.debug("time spent on updatePredictedCounts = "+stopWatch);
}
}
private void updateClassProbMatrix(){
StopWatch stopWatch = new StopWatch();
stopWatch.start();
IntStream intStream;
if (isParallel){
intStream = IntStream.range(0,dataSet.getNumDataPoints()).parallel();
} else {
intStream = IntStream.range(0,dataSet.getNumDataPoints());
}
intStream.forEach(this::updateClassProbs);
this.isProbabilityCacheValid = true;
if (logger.isDebugEnabled()){
logger.debug("time spent on updateClassProbMatrix = "+stopWatch);
}
}
/**
* Broadcast the model for all workers
*/
private void broadcastModel(boolean par) {
IntStream stream = IntStream.range(0, _modelMap.size());
(par ? stream.parallel() : stream).forEach(workerID -> {
try {
broadcastModel(workerID);
} catch (InterruptedException e) {
throw new DMLRuntimeException("Paramserv func: some error occurred when broadcasting model", e);
}
});
}
private void updateEmpiricalCounts(){
IntStream intStream;
if (isParallel){
intStream = IntStream.range(0, numParameters).parallel();
} else {
intStream = IntStream.range(0, numParameters);
}
intStream.forEach(this::calEmpiricalCount);
}
public double penaltyValue(){
IntStream intStream;
if (isParallel){
intStream = IntStream.range(0, numClasses).parallel();
} else {
intStream = IntStream.range(0, numClasses);
}
return intStream.mapToDouble(this::penaltyValue).sum();
}
private double getValueForAllData() {
updateClassScoreMatrix();
updateAssignmentScoreMatrix();
IntStream intStream;
if (isParallel) {
intStream = IntStream.range(0,dataSet.getNumDataPoints()).parallel();
} else {
intStream = IntStream.range(0,dataSet.getNumDataPoints());
}
return intStream.mapToDouble(this::getValueForOneData).sum();
// return dataSetLogLikelihood(dataSet)*-1;
}
default double[] predict(DataSet dataSet, boolean parallel){
IntStream intStream = IntStream.range(0, dataSet.getNumDataPoints());
if (parallel){
intStream = intStream.parallel();
}
return intStream.mapToDouble(i -> predict(dataSet.getRow(i))).toArray();
}
public double penaltyValueEL(){
IntStream intStream;
if (isParallel){
intStream = IntStream.range(0, numClasses).parallel();
} else {
intStream = IntStream.range(0, numClasses);
}
return intStream.mapToDouble(this::penaltyValueEL).sum();
}
private Wp41HistoData parseData(DataMiningFacadeParams dmParams) throws IOException, InterruptedException {
int rowCount = histoClient.queryCount(dmParams.getInterval(), HistoDbHorizon.SN);
Set<HistoDbAttributeId> attributeIds = new LinkedHashSet<>((dmParams.getGensIds().size() +
dmParams.getLoadsIds().size() +
dmParams.getDanglingLinesIds().size()) * 2); // gens P, Q loads P, Q danglingLines P0, Q0
for (String genId : dmParams.getGensIds()) {
attributeIds.add(new HistoDbNetworkAttributeId(genId, HistoDbAttr.P));
attributeIds.add(new HistoDbNetworkAttributeId(genId, HistoDbAttr.Q));
}
for (String loadId : dmParams.getLoadsIds()) {
attributeIds.add(new HistoDbNetworkAttributeId(loadId, HistoDbAttr.P));
attributeIds.add(new HistoDbNetworkAttributeId(loadId, HistoDbAttr.Q));
}
for (String dlId : dmParams.getDanglingLinesIds()) {
attributeIds.add(new HistoDbNetworkAttributeId(dlId, HistoDbAttr.P0));
attributeIds.add(new HistoDbNetworkAttributeId(dlId, HistoDbAttr.Q0));
}
List<Integer> rowIndexes;
try (IntStream intStream = IntStream.range(0, rowCount)) {
rowIndexes = intStream.boxed().collect(Collectors.toList());
}
List<String> colIndexes = attributeIds.stream().map(Object::toString).collect(Collectors.toList());
ArrayTable<Integer, String, Float> hdTable = ArrayTable.create(rowIndexes, colIndexes);
// parse csv generators
try (InputStream is = histoClient.queryCsv(HistoQueryType.data, attributeIds, dmParams.getInterval(), HistoDbHorizon.SN, false, false)) {
parseCsv(is, attributeIds, hdTable, rowCount);
}
return new Wp41HistoData(dmParams.getGensIds(), dmParams.getLoadsIds(), dmParams.getDanglingLinesIds(), hdTable);
}
private double estimateProbabilityOfPartialAssignment(Assignment MAPassignment, boolean useConditionalDistributions) {
double probabilityEstimate;
final int numSamplesAverage = 150;
Assignment evidenceAugmented=new HashMapAssignment(evidence);
MAPvariables.forEach(voi -> evidenceAugmented.setValue(voi, MAPassignment.getValue(voi)));
final Assignment finalAssignment=new HashMapAssignment(MAPassignment);
IntStream auxIntStream = IntStream.range(0, numSamplesAverage);
//probabilityEstimate = auxIntStream.mapToObj(i -> obtainValuesRandomly(finalAssignment,evidenceAugmented,new Random())).mapToDouble(as -> Math.exp(this.model.getLogProbabiltyOf(as))).average().getAsDouble();
try {
probabilityEstimate = auxIntStream.mapToObj(i -> {
if (useConditionalDistributions)
return obtainValues(finalAssignment, evidenceAugmented, new Random(MAPrandom.nextInt()));
else
return obtainValuesRandomly(finalAssignment, evidenceAugmented, new Random(MAPrandom.nextInt()));
})
.mapToDouble(as -> Math.exp(this.model.getLogProbabiltyOf(as)))
.filter(Double::isFinite).average().getAsDouble();
}
catch(Exception e) {
probabilityEstimate=0;
}
return probabilityEstimate;
}
private double kl(){
if (!isProbabilityCacheValid){
updateClassProbMatrix();
}
IntStream intStream;
if (isParallel){
intStream = IntStream.range(0, dataSet.getNumDataPoints()).parallel();
} else {
intStream = IntStream.range(0, dataSet.getNumDataPoints());
}
return intStream.mapToDouble(this::kl).sum();
}
@Test
public void testSerialToParallel() {
IntStream range = IntStream.range(0, 100);
assertEquals(328350, SerialToParallel.sumOfSquares(range));
}
@Test
public void testSerialToParallel() {
IntStream range = IntStream.range(0, 100);
assertEquals(328350, SerialToParallel.sumOfSquares(range));
}
@Test
public void testSerialToParallel() {
IntStream range = IntStream.range(0, 100);
assertEquals(328350, SerialToParallel.sumOfSquares(range));
}
@Test
public void givenIntStream_whenSum_thenResultIsCorrect() {
IntStream intNumbers = IntStream.range(0, 3);
assertEquals(3, intNumbers.sum());
}
public static IntStream randomlySizedIntStream(int lower, int upper) {
return IntStream.range(0, randomIntBetween(lower, upper));
}
/**
* Returns a sequential ordered {@code IntStream} from {@link #min()}
* (inclusive) to {@link #max()} (exclusive) by an incremental step of
* {@code 1}.
* <p>
* An equivalent sequence of increasing values can be produced sequentially
* using a {@code for} loop as follows:
* <pre>{@code
* for (int i = range.min(); i < range.max(); ++i) {
* ...
* }
* }</pre>
*
* @since 3.4
*
* @return a sequential {@link IntStream} for the range of {@code int}
* elements
*/
public IntStream stream() {
return IntStream.range(_min, _max);
}
/**
* Returns an {@link IntStream} of sample indices
*
* @return {@link IntStream}
*/
private IntStream sampleIndexStream() { return IntStream.range(0, numSamples); }