下面列出了怎么用io.micrometer.core.instrument.distribution.ValueAtPercentile的API类实例代码及写法,或者点击链接到github查看源代码。
public final NumericQuery maxPercentile(Function<Search, Search> search, double percentile) {
return new Instant(name, tags, baseUnit, failedMessage, requires, search, s -> s.meters().stream()
.map(m -> {
ValueAtPercentile[] valueAtPercentiles = new ValueAtPercentile[0];
if (m instanceof DistributionSummary) {
valueAtPercentiles = ((DistributionSummary) m).takeSnapshot().percentileValues();
} else if (m instanceof Timer) {
valueAtPercentiles = ((Timer) m).takeSnapshot().percentileValues();
} else if (m instanceof LongTaskTimer) {
valueAtPercentiles = ((LongTaskTimer) m).takeSnapshot().percentileValues();
}
return Arrays.stream(valueAtPercentiles)
.filter(vap -> vap.percentile() == percentile)
.map(ValueAtPercentile::value)
.findAny()
.orElse(Double.NaN);
})
.reduce(Double.NaN, MAX_OR_NAN)
);
}
Stream<String> writeSummary(DistributionSummary summary) {
long wallTime = config().clock().wallTime();
final ValueAtPercentile[] percentileValues = summary.takeSnapshot().percentileValues();
final CountAtBucket[] histogramCounts = ((OpenTSDBDistributionSummary) summary).histogramCounts();
double count = summary.count();
List<String> metrics = new ArrayList<>();
metrics.add(writeMetricWithSuffix(summary.getId(), "count", wallTime, count));
metrics.add(writeMetricWithSuffix(summary.getId(), "sum", wallTime, summary.totalAmount()));
metrics.add(writeMetricWithSuffix(summary.getId(), "max", wallTime, summary.max()));
if (percentileValues.length > 0) {
metrics.addAll(writePercentiles(summary, wallTime, percentileValues));
}
if (histogramCounts.length > 0) {
metrics.addAll(writeHistogram(wallTime, summary, histogramCounts, count, getBaseTimeUnit()));
}
return metrics.stream();
}
Stream<String> writeTimer(Timer timer) {
long wallTime = config().clock().wallTime();
HistogramSnapshot histogramSnapshot = timer.takeSnapshot();
final ValueAtPercentile[] percentileValues = histogramSnapshot.percentileValues();
final CountAtBucket[] histogramCounts = histogramSnapshot.histogramCounts();
double count = timer.count();
List<String> metrics = new ArrayList<>();
metrics.add(writeMetricWithSuffix(timer.getId(), "count", wallTime, count));
metrics.add(writeMetricWithSuffix(timer.getId(), "sum", wallTime, timer.totalTime(getBaseTimeUnit())));
metrics.add(writeMetricWithSuffix(timer.getId(), "max", wallTime, timer.max(getBaseTimeUnit())));
if (percentileValues.length > 0) {
metrics.addAll(writePercentiles(timer, wallTime, percentileValues));
}
if (histogramCounts.length > 0) {
metrics.addAll(writeHistogram(wallTime, timer, histogramCounts, count, getBaseTimeUnit()));
}
return metrics.stream();
}
Stream<String> writeLongTaskTimer(LongTaskTimer timer) {
long wallTime = config().clock().wallTime();
HistogramSnapshot histogramSnapshot = timer.takeSnapshot();
final ValueAtPercentile[] percentileValues = histogramSnapshot.percentileValues();
final CountAtBucket[] histogramCounts = histogramSnapshot.histogramCounts();
double count = timer.activeTasks();
List<String> metrics = new ArrayList<>();
metrics.add(writeMetricWithSuffix(timer.getId(), "active.count", wallTime, count));
metrics.add(writeMetricWithSuffix(timer.getId(), "duration.sum", wallTime, timer.duration(getBaseTimeUnit())));
metrics.add(writeMetricWithSuffix(timer.getId(), "max", wallTime, timer.max(getBaseTimeUnit())));
if (percentileValues.length > 0) {
metrics.addAll(writePercentiles(timer, wallTime, percentileValues));
}
if (histogramCounts.length > 0) {
metrics.addAll(writeHistogram(wallTime, timer, histogramCounts, count, getBaseTimeUnit()));
}
return metrics.stream();
}
@SuppressWarnings("ConstantConditions")
@Override
protected io.micrometer.core.instrument.DistributionSummary newDistributionSummary(Meter.Id id, DistributionStatisticConfig distributionStatisticConfig,
double scale) {
com.netflix.spectator.api.DistributionSummary internalSummary;
if (distributionStatisticConfig.isPercentileHistogram()) {
// This doesn't report the normal count/totalTime/max stats, so we treat it as additive
internalSummary = PercentileDistributionSummary.get(registry, spectatorId(id));
} else {
internalSummary = registry.distributionSummary(spectatorId(id));
}
SpectatorDistributionSummary summary = new SpectatorDistributionSummary(id, internalSummary, clock, distributionStatisticConfig, scale);
HistogramGauges.register(summary, this,
percentile -> id.getName(),
percentile -> Tags.concat(id.getTagsAsIterable(), "percentile", DoubleFormat.decimalOrNan(percentile.percentile())),
ValueAtPercentile::value,
bucket -> id.getName(),
bucket -> Tags.concat(id.getTagsAsIterable(), "service.level.objective", DoubleFormat.wholeOrDecimal(bucket.bucket())));
return summary;
}
private List<String> writePercentiles(Meter meter, long wallTime, ValueAtPercentile[] percentileValues) {
List<String> metrics = new ArrayList<>(percentileValues.length);
boolean forTimer = meter instanceof Timer;
// satisfies https://prometheus.io/docs/concepts/metric_types/#summary
for (ValueAtPercentile v : percentileValues) {
metrics.add(writeMetric(
meter.getId().withTag(new ImmutableTag("quantile", doubleToGoString(v.percentile()))),
wallTime,
(forTimer ? v.value(getBaseTimeUnit()) : v.value())));
}
return metrics;
}
@Test
public void getPercentileMetrics() {
ValueAtPercentile[] percentiles = new ValueAtPercentile[] { new ValueAtPercentile(2.5, 2.0) };
Stream<Metric> metricsStream = converter.getMetrics(meter, percentiles);
List<Metric> metrics = metricsStream.collect(Collectors.toList());
assertEquals(1, metrics.size());
assertTrue(statisticTagExists(metrics, "250percentile"));
}
private Map<String, Object> garbageCollectorMetrics() {
Map<String, Object> resultsGarbageCollector = new HashMap<>();
Collection<Timer> timers = Search.in(this.meterRegistry).name(s -> s.contains("jvm.gc.pause")).timers();
timers.forEach(timer -> {
String key = timer.getId().getName();
HashMap<String, Number> gcPauseResults = new HashMap<>();
gcPauseResults.put("count", timer.count());
gcPauseResults.put("max", timer.max(TimeUnit.MILLISECONDS));
gcPauseResults.put("totalTime", timer.totalTime(TimeUnit.MILLISECONDS));
gcPauseResults.put("mean", timer.mean(TimeUnit.MILLISECONDS));
ValueAtPercentile[] percentiles = timer.takeSnapshot().percentileValues();
for (ValueAtPercentile percentile : percentiles) {
gcPauseResults.put(String.valueOf(percentile.percentile()), percentile.value(TimeUnit.MILLISECONDS));
}
resultsGarbageCollector.putIfAbsent(key, gcPauseResults);
});
Collection<Gauge> gauges = Search.in(this.meterRegistry).name(s -> s.contains("jvm.gc") && !s.contains("jvm.gc.pause")).gauges();
gauges.forEach(gauge -> resultsGarbageCollector.put(gauge.getId().getName(), gauge.value()));
Collection<Counter> counters = Search.in(this.meterRegistry).name(s -> s.contains("jvm.gc") && !s.contains("jvm.gc.pause")).counters();
counters.forEach(counter -> resultsGarbageCollector.put(counter.getId().getName(), counter.count()));
gauges = Search.in(this.meterRegistry).name(s -> s.contains("jvm.classes.loaded")).gauges();
Double classesLoaded = gauges.stream().map(Gauge::value).reduce((x, y) -> (x + y)).orElse((double) 0);
resultsGarbageCollector.put("classesLoaded", classesLoaded);
Collection<FunctionCounter> functionCounters = Search.in(this.meterRegistry).name(s -> s.contains("jvm.classes.unloaded")).functionCounters();
Double classesUnloaded = functionCounters.stream().map(FunctionCounter::count).reduce((x, y) -> (x + y)).orElse((double) 0);
resultsGarbageCollector.put("classesUnloaded", classesUnloaded);
return resultsGarbageCollector;
}
@Test
public void test() throws InterruptedException {
final PrometheusMeterRegistry prometheusRegistry = new PrometheusMeterRegistry(PrometheusConfig.DEFAULT);
final Timer summary = Timer.builder("test")
.distributionStatisticExpiry(Duration.ofSeconds(10))
.publishPercentiles(0.9D, 0.99D, 0.999D)
.distributionStatisticBufferLength(20)
.publishPercentileHistogram(false)
.tags(new String[]{"method", "get()"})
.register(prometheusRegistry);
// final DistributionSummary summary = DistributionSummary.builder("test")
// .distributionStatisticExpiry(Duration.ofSeconds(30))
// .publishPercentiles(0.9, 0.99, 0.999)
// .publishPercentileHistogram(false)
// .tags(new String[]{"method", "get()"})
// .register(prometheusRegistry);
AtomicInteger second = new AtomicInteger();
Executors.newScheduledThreadPool(1).scheduleAtFixedRate(() -> {
final HistogramSnapshot snapshot = summary.takeSnapshot();
final ValueAtPercentile[] valueAtPercentiles = snapshot.percentileValues();
double p90 = 0, p99 = 0, p999 = 0;
for (ValueAtPercentile percentile : valueAtPercentiles) {
if (percentile.percentile() == 0.9D) {
p90 = percentile.value(TimeUnit.MILLISECONDS);
} else if (percentile.percentile() == 0.99D) {
p99 = percentile.value(TimeUnit.MILLISECONDS);
} else {
p999 = percentile.value(TimeUnit.MILLISECONDS);
}
}
System.out.println(String.format("second: %s, p90: %s, p99: %s, p999: %s", second.incrementAndGet(), p90, p99, p999));
}, 1, 1, TimeUnit.SECONDS);
for (int j = 0; j < 100; j++) {
// for (long i = 0; i < 1000; i++) {
// summary.record(i, TimeUnit.MILLISECONDS);
// }
summary.record(j % 10, TimeUnit.MILLISECONDS);
TimeUnit.SECONDS.sleep(1);
}
for (int i = 0; i < 10; i++) {
TimeUnit.SECONDS.sleep(1);
}
for (int i = 0; i < 100; i++) {
TimeUnit.SECONDS.sleep(1);
}
}
private List<PrinterDomain> collector(Stats stats, ConcurrentMap<String, ConcurrentMap<List<String>, Double>> currentErrCollector,
ConcurrentMap<String, ConcurrentMap<List<String>, Long>> currentSummaryCollector) {
ConcurrentMap<List<String>, Counter> errCollector = stats.getErrCollector();
ConcurrentMap<List<String>, AtomicLong> gaugeCollector = stats.getGaugeCollector();
ConcurrentMap<List<String>, Timer> summaryCollector = stats.getSummaryCollector();
// 记录上一次的error数
currentErrCollector.put(buildCollectorKey(stats), parseErrCollector(errCollector));
currentSummaryCollector.put(buildCollectorKey(stats), parseSummaryCollector(summaryCollector));
List<PrinterDomain> retList = new ArrayList<>();
for (Map.Entry<List<String>, Timer> entry : summaryCollector.entrySet()) {
List<String> tag = entry.getKey();
Timer summary= entry.getValue();
Counter counter = errCollector.get(tag);
AtomicLong concurrent = gaugeCollector.get(tag);
PrinterDomain domain = new PrinterDomain();
String name = setMetricsName(stats, tag);
HistogramSnapshot snapshot = summary.takeSnapshot();
domain.setTag(name);
domain.setConcurrent(concurrent == null ? "0" : concurrent.toString());
domain.setErr(counter == null ? "0" : String.valueOf(counter.count() - getLastTimeErrCount(stats, entry.getKey())));
domain.setSum(String.valueOf(snapshot.count() - getLastTimeSummaryCount(stats, entry.getKey())));
ValueAtPercentile[] vps = snapshot.percentileValues();
for (ValueAtPercentile vp : vps) {
if (vp.percentile() == 0.9D) {
domain.setP90(String.valueOf(vp.value(TimeUnit.MILLISECONDS)));
} else if (vp.percentile() == 0.99D) {
domain.setP99(String.valueOf(vp.value(TimeUnit.MILLISECONDS)));
} else if (vp.percentile() == 0.999D) {
domain.setP999(String.valueOf(vp.value(TimeUnit.MILLISECONDS)));
} else if (vp.percentile() == 0.99999D) {
domain.setMax(String.valueOf(vp.value(TimeUnit.MILLISECONDS)));
}
}
// 计算qps
domain.setQps(String.format("%.1f", Float.parseFloat(domain.getSum()) / 60));
retList.add(domain);
}
return retList;
}
protected Stream<Metric> getMetrics(Meter meter, ValueAtPercentile[] percentiles) {
return Arrays.stream(percentiles)
.map(percentile -> toMetric(withPercentile(meter, percentile), percentile.value(getBaseTimeUnit())));
}
protected Meter.Id withPercentile(Meter meter, ValueAtPercentile percentile) {
return withStatistic(meter,
String.format("%spercentile", PERCENTILE_FORMAT.format(percentile.percentile() * 100)));
}