下面列出了怎么用org.apache.logging.log4j.Logger的API类实例代码及写法,或者点击链接到github查看源代码。
@Test
public void testPropertiesConfiguration() {
final Configuration config = context.getConfiguration();
assertNotNull("No configuration created", config);
assertEquals("Incorrect State: " + config.getState(), config.getState(), LifeCycle.State.STARTED);
final Map<String, Appender> appenders = config.getAppenders();
assertNotNull(appenders);
assertTrue("Incorrect number of Appenders: " + appenders.size(), appenders.size() == 3);
final Map<String, LoggerConfig> loggers = config.getLoggers();
assertNotNull(loggers);
assertTrue("Incorrect number of LoggerConfigs: " + loggers.size(), loggers.size() == 2);
final Filter filter = config.getFilter();
assertNotNull("No Filter", filter);
assertTrue("Not a Threshold Filter", filter instanceof ThresholdFilter);
final Logger logger = LogManager.getLogger(getClass());
logger.info("Welcome to Log4j!");
}
@VisibleForTesting
protected BesuCommand(
final Logger logger,
final Supplier<RlpBlockImporter> rlpBlockImporter,
final Function<BesuController, JsonBlockImporter> jsonBlockImporterFactory,
final Function<Blockchain, RlpBlockExporter> rlpBlockExporterFactory,
final RunnerBuilder runnerBuilder,
final BesuController.Builder controllerBuilderFactory,
final BesuPluginContextImpl besuPluginContext,
final Map<String, String> environment,
final StorageServiceImpl storageService,
final SecurityModuleServiceImpl securityModuleService) {
this.logger = logger;
this.rlpBlockImporter = rlpBlockImporter;
this.rlpBlockExporterFactory = rlpBlockExporterFactory;
this.jsonBlockImporterFactory = jsonBlockImporterFactory;
this.runnerBuilder = runnerBuilder;
this.controllerBuilderFactory = controllerBuilderFactory;
this.besuPluginContext = besuPluginContext;
this.environment = environment;
this.storageService = storageService;
this.securityModuleService = securityModuleService;
}
static Map<String, Integer> getContigLengthMap(final SAMSequenceDictionary sequenceDictionary,
final int minContigLength,
final Logger logger) {
Utils.nonNull(sequenceDictionary);
ParamUtils.isPositiveOrZero(minContigLength, "Minimum contig length must be non-negative.");
Utils.nonNull(logger);
Utils.validateArg(sequenceDictionary.getSequences().stream().map(SAMSequenceRecord::getSequenceName).noneMatch(n -> n.contains(CONTIG_DELIMITER)),
String.format("Contig names cannot contain \"%s\".", CONTIG_DELIMITER));
final Map<String, Integer> contigLengthMap = sequenceDictionary.getSequences().stream()
.filter(s -> s.getSequenceLength() >= minContigLength)
.collect(Collectors.toMap(SAMSequenceRecord::getSequenceName, SAMSequenceRecord::getSequenceLength,
(c, l) -> {
throw new IllegalArgumentException(String.format("Duplicate contig in sequence dictionary: %s", c));
},
LinkedHashMap::new));
Utils.validateArg(contigLengthMap.size() > 0,
"There must be at least one contig above the threshold length in the sequence dictionary.");
logger.info("Contigs above length threshold: " + contigLengthMap.toString());
return contigLengthMap;
}
static <T extends Locatable> void validateContigs(final Map<String, Integer> contigLengthMap,
final AbstractSampleLocatableCollection<T> locatableCollection,
final File file,
final Logger logger) {
Utils.nonNull(contigLengthMap);
Utils.nonNull(logger);
if (locatableCollection == null) {
Utils.validateArg(file == null, "File can only be null if collection is also null.");
return;
}
final Set<String> contigNames = contigLengthMap.keySet();
final Set<String> fileContigNames = locatableCollection.getRecords().stream().map(T::getContig).collect(Collectors.toSet());
if (!contigNames.containsAll(fileContigNames)) {
logger.warn(String.format("Contigs present in the file %s are missing from the sequence dictionary and will not be plotted.", file));
}
final Map<String, Integer> fileContigMaxPositionMap = locatableCollection.getIntervals().stream().filter(i -> contigNames.contains(i.getContig()))
.collect(Collectors.toMap(SimpleInterval::getContig, SimpleInterval::getEnd, Integer::max));
fileContigMaxPositionMap.forEach((c, integer) -> Utils.validateArg(integer <= contigLengthMap.get(c),
String.format("Position present in the file %s exceeds contig length in the sequence dictionary.", file)));
}
/**
* First parse the input alignments, then classify the assembly contigs based on their alignment signatures,
* and return the contigs that are classified together for downstream inference.
*/
public static AssemblyContigsClassifiedByAlignmentSignatures preprocess(final SvDiscoveryInputMetaData svDiscoveryInputMetaData,
final JavaRDD<GATKRead> assemblyRawAlignments) {
final Broadcast<SAMFileHeader> headerBroadcast = svDiscoveryInputMetaData.getSampleSpecificData().getHeaderBroadcast();
final Broadcast<Set<String>> canonicalChromosomesBroadcast = svDiscoveryInputMetaData.getReferenceData().getCanonicalChromosomesBroadcast();
final Logger toolLogger = svDiscoveryInputMetaData.getToolLogger();
final JavaRDD<AssemblyContigWithFineTunedAlignments> contigsWithChimericAlignmentsReconstructed =
AssemblyContigAlignmentsConfigPicker
.createOptimalCoverageAlignmentSetsForContigs(assemblyRawAlignments, headerBroadcast.getValue(),
canonicalChromosomesBroadcast.getValue(), 0.0, toolLogger)
.cache();
toolLogger.info( contigsWithChimericAlignmentsReconstructed.count() +
" contigs with chimeric alignments potentially giving SV signals.");
return new AssemblyContigsClassifiedByAlignmentSignatures(contigsWithChimericAlignmentsReconstructed);
}
private static ClickEvent parseClickEvent(JSONObject json, Logger logger) {
String key = json.optString("action", null);
if (key == null) {
logger.warn("Received invalid click event with no action, ignoring it: " + json.toString());
return null;
}
String value = json.optString("value", null);
if (value == null) {
logger.warn("Received invalid click event with no value, ignoring it: " + json.toString());
return null;
}
switch (key.toLowerCase()) {
case "open_url":
return new ClickEvent(ClickEvent.Action.OPEN_URL, value);
case "run_command":
return new ClickEvent(ClickEvent.Action.RUN_COMMAND, value);
case "suggest_command":
return new ClickEvent(ClickEvent.Action.SUGGEST_COMMAND, value);
case "change_page":
return new ClickEvent(ClickEvent.Action.CHANGE_PAGE, value);
default:
logger.warn("Unknown key " + key + " for click event, ignoring it: " + json.toString());
return null;
}
}
public static ReadMetadata buildMetadata( final FindBreakpointEvidenceSparkArgumentCollection params,
final SAMFileHeader header,
final JavaRDD<GATKRead> unfilteredReads,
final SVReadFilter filter,
final Logger logger ) {
Utils.validate(header.getSortOrder() == SAMFileHeader.SortOrder.coordinate,
"The reads must be coordinate sorted.");
final Set<Integer> crossContigsToIgnoreSet;
if ( params.crossContigsToIgnoreFile == null ) crossContigsToIgnoreSet = Collections.emptySet();
else crossContigsToIgnoreSet = readCrossContigsToIgnoreFile(params.crossContigsToIgnoreFile,
header.getSequenceDictionary());
final ReadMetadata readMetadata =
new ReadMetadata(crossContigsToIgnoreSet, header, params.maxTrackedFragmentLength, unfilteredReads, filter, logger);
if ( params.metadataFile != null ) {
ReadMetadata.writeMetadata(readMetadata, params.metadataFile);
}
return readMetadata;
}
/**
* Create a circuit breaker that will break if the number of estimated
* bytes grows above the limit. All estimations will be multiplied by
* the given overheadConstant. Uses the given oldBreaker to initialize
* the starting offset.
* @param limit circuit breaker limit
* @param overheadConstant constant multiplier for byte estimations
* @param oldBreaker the previous circuit breaker to inherit the used value from (starting offset)
*/
public MemoryCircuitBreaker(ByteSizeValue limit, double overheadConstant, MemoryCircuitBreaker oldBreaker, Logger logger) {
this.memoryBytesLimit = limit.getBytes();
this.overheadConstant = overheadConstant;
if (oldBreaker == null) {
this.used = new AtomicLong(0);
this.trippedCount = new AtomicLong(0);
} else {
this.used = oldBreaker.used;
this.trippedCount = oldBreaker.trippedCount;
}
this.logger = logger;
if (logger.isTraceEnabled()) {
logger.trace("Creating MemoryCircuitBreaker with a limit of {} bytes ({}) and a overhead constant of {}",
this.memoryBytesLimit, limit, this.overheadConstant);
}
}
/**
* Attempts to instantiate a class that cannot initialize and then logs the stack trace of the Error. The logger
* must not fail when using {@link ThrowableProxy} to inspect the frames of the stack trace.
*/
@Test
public void testLogStackTraceWithClassThatCannotInitialize() {
try {
// Try to create the object, which will always fail during class initialization
final AlwaysThrowsError error = new AlwaysThrowsError();
// If the error was not triggered then fail
fail("Test did not throw expected error: " + error);
} catch (final Throwable e) {
// Print the stack trace to System.out for informational purposes
// System.err.println("### Here's the stack trace that we'll log with log4j ###");
// e.printStackTrace();
// System.err.println("### End stack trace ###");
final Logger logger = LogManager.getLogger(getClass());
// This is the critical portion of the test. The log message must be printed without
// throwing a java.lang.Error when introspecting the AlwaysThrowError class in the
// stack trace.
logger.error(e.getMessage(), e);
logger.error(e);
}
}
/**
* 获取Logger
*
* @param logger
* @param clazz
* @return
*/
public Logger createLogger(Logger logger, Class clazz) {
String clKey = String.join("#", logger.getName(), clazz.getName());
Logger rl = loggerMap.get(clKey);
if (rl != null) {
return rl;
}
lock.lock();
try {
if (!LogManager.exists(clKey)) {
start(logger, clKey);
loggerMap.put(clKey, LogManager.getLogger(clKey));
}
} finally {
lock.unlock();
}
return loggerMap.get(clKey);
}
@Test
public void testJavascriptFilter() throws Exception {
final Logger logger = LogManager.getLogger("TestJavaScriptFilter");
logger.traceEntry();
logger.info("This should not be logged");
ThreadContext.put("UserId", "JohnDoe");
logger.info("This should be logged");
ThreadContext.clearMap();
final ListAppender app = getContext().getListAppender("List");
final List<String> messages = app.getMessages();
try {
assertNotNull("No Messages", messages);
assertTrue("Incorrect number of messages. Expected 2, Actual " + messages.size(), messages.size() == 2);
} finally {
app.clear();
}
}
/**
* Calculate the number of eigensamples given the user preferences and the
* result of the log-normals SVD.
*
* @param requestedNumberOfEigensamples the user requested eigenvalues (empty means the user didn't specify any in particular).
* @param numberOfCountColumns number of count columns in the original input.
* @param logNormalizedSVD SVD results on the log-normalized counts.
* @return always greater than 0.
*/
@VisibleForTesting
static int determineNumberOfEigensamples(final OptionalInt requestedNumberOfEigensamples, final int numberOfCountColumns, final SVD logNormalizedSVD, final Logger logger) {
final int numberOfEigensamples;
if (requestedNumberOfEigensamples.isPresent()) {
if (requestedNumberOfEigensamples.getAsInt() > numberOfCountColumns) {
logger.warn(String.format("The number of requested eigensamples (%d) is larger than the available number of read count columns after filtering (%d), thus we will have to use the latter.", requestedNumberOfEigensamples.getAsInt(), numberOfCountColumns));
}
numberOfEigensamples = Math.min(requestedNumberOfEigensamples.getAsInt(), numberOfCountColumns);
} else {
final double[] singularValues = logNormalizedSVD.getSingularValues();
final double mean = MathUtils.mean(singularValues, 0, singularValues.length);
final double eigenvalueCutoff = mean * JOLLIFES_RULE_MEAN_FACTOR; // Jollife's less strict version of Kaiser' Rule.
numberOfEigensamples = (int) DoubleStream.of(singularValues).filter(v -> v > eigenvalueCutoff).count();
logger.info(String.format("Jollife's rule produced %d eigensamples out of %d possibles for the reduced PoN", numberOfEigensamples, numberOfCountColumns));
}
return numberOfEigensamples;
}
public static void streamResponseBody(InputStream is, String contentType,
String contentDisposition, HttpServletResponse response,
Logger log, String logPrefix, String redirectLocation) throws IOException {
if (StringUtils.isNotEmpty(contentType)) {
response.setHeader("Content-Type", contentType);
}
if (StringUtils.isNotEmpty(contentDisposition)) {
response.setHeader("Content-Disposition", contentDisposition);
}
if (StringUtils.isNotEmpty(redirectLocation)) {
response.sendRedirect(redirectLocation);
}
if (is != null) {
try (OutputStream outputStream = response.getOutputStream()) {
Misc.streamToStream(is, outputStream);
log.debug(logPrefix + "copied response body input stream [" + is + "] to output stream [" + outputStream + "]");
}
}
}
public static boolean logErrorMessageByInterval(String key, String message, Throwable e, Logger callerLogger, Level level) {
LogHistory log = logHistoryList.get(key);
if (log == null) {
log = new LogHistory();
logHistoryList.put(key, log);
}
if ((System.currentTimeMillis() - log.lastLogTime) > 30 * 1000) {
log.lastLogTime = System.currentTimeMillis();
if (log.counter > 0) {
message += ". Messages suppressed before: " + log.counter;
}
log.counter = 0;
callerLogger.log(level, message, e);
return true;
} else {
log.counter++;
return false;
}
}
@Test
public void testAsyncLogWritesToLog() throws Exception {
final File file = new File("target", "AsyncLoggerTest.log");
// System.out.println(f.getAbsolutePath());
file.delete();
ThreadContext.push("stackvalue");
ThreadContext.put("KEY", "mapvalue");
final Logger log = LogManager.getLogger("com.foo.Bar");
final String msg = "Async logger msg";
log.info(msg, new InternalError("this is not a real error"));
CoreLoggerContexts.stopLoggerContext(false, file); // stop async thread
final BufferedReader reader = new BufferedReader(new FileReader(file));
final String line1 = reader.readLine();
reader.close();
file.delete();
assertNotNull("line1", line1);
assertTrue("line1 correct", line1.contains(msg));
assertTrue("ThreadContext.map", line1.contains("mapvalue"));
assertTrue("ThreadContext.stack", line1.contains("stackvalue"));
}
public void dumpFieldAccess(final Logger logger) {
for (final FieldAccess fa : this.fieldAccesses) {
if (fa.returnType == null) {
logger.warn("missing returnType {}", fa);
} else {
logger.trace("# {}", fa);
}
}
for (final MethodCall mc : this.methodCalls) {
if (mc.returnType == null) {
logger.warn("missing returnType {}", mc);
} else {
logger.trace("# {}", mc);
}
}
}
static ClientConfiguration buildConfiguration(Logger logger, Ec2ClientSettings clientSettings) {
final ClientConfiguration clientConfiguration = new ClientConfiguration();
// the response metadata cache is only there for diagnostics purposes,
// but can force objects from every response to the old generation.
clientConfiguration.setResponseMetadataCacheSize(0);
clientConfiguration.setProtocol(clientSettings.protocol);
if (Strings.hasText(clientSettings.proxyHost)) {
// TODO: remove this leniency, these settings should exist together and be validated
clientConfiguration.setProxyHost(clientSettings.proxyHost);
clientConfiguration.setProxyPort(clientSettings.proxyPort);
clientConfiguration.setProxyUsername(clientSettings.proxyUsername);
clientConfiguration.setProxyPassword(clientSettings.proxyPassword);
}
// Increase the number of retries in case of 5xx API responses
final Random rand = Randomness.get();
final RetryPolicy retryPolicy = new RetryPolicy(
RetryPolicy.RetryCondition.NO_RETRY_CONDITION,
(originalRequest, exception, retriesAttempted) -> {
// with 10 retries the max delay time is 320s/320000ms (10 * 2^5 * 1 * 1000)
logger.warn("EC2 API request failed, retry again. Reason was:", exception);
return 1000L * (long) (10d * Math.pow(2, retriesAttempted / 2.0d) * (1.0d + rand.nextDouble()));
},
10,
false);
clientConfiguration.setRetryPolicy(retryPolicy);
clientConfiguration.setSocketTimeout(clientSettings.readTimeoutMillis);
return clientConfiguration;
}
/**
* Set the level of the logger. If the new level is null, the logger will inherit it's level from its nearest ancestor with a non-null
* level.
*/
public static void setLevel(Logger logger, String level) {
final Level l;
if (level == null) {
l = null;
} else {
l = Level.valueOf(level);
}
setLevel(logger, l);
}
@Test
public void narrow() throws Exception {
final Logger logger = LogManager.getLogger(OutputStreamManagerTest.class);
logger.info("test");
final List<StatusData> statusData = StatusLogger.getLogger().getStatusData();
StatusData data = statusData.get(0);
if (data.getMessage().getFormattedMessage().contains("WindowsAnsiOutputStream")) {
data = statusData.get(1);
}
assertEquals(Level.ERROR, data.getLevel());
assertEquals("Could not create plugin of type class org.apache.logging.log4j.core.appender.RollingRandomAccessFileAppender for element RollingRandomAccessFile",
data.getMessage().getFormattedMessage());
assertEquals("org.apache.logging.log4j.core.config.ConfigurationException: Configuration has multiple incompatible Appenders pointing to the same resource 'target/multiIncompatibleAppender.log'",
data.getThrowable().toString());
}
@Test
public void initTest() {
Timer timer = new Timer("Log4j Initialization");
timer.start();
Logger logger = LogManager.getLogger();
timer.stop();
long elapsed = timer.getElapsedNanoTime();
System.out.println(timer.toString());
assertTrue("Initialization time exceeded threshold; elapsed " + elapsed, elapsed < 1000000000);
}
public static Version parseAnalysisVersion(Settings indexSettings, Settings settings, Logger logger) {
// check for explicit version on the specific analyzer component
String sVersion = settings.get("version");
if (sVersion != null) {
return Lucene.parseVersion(sVersion, Version.LATEST, logger);
}
// check for explicit version on the index itself as default for all analysis components
sVersion = indexSettings.get("index.analysis.version");
if (sVersion != null) {
return Lucene.parseVersion(sVersion, Version.LATEST, logger);
}
// resolve the analysis version based on the version the index was created with
return org.elasticsearch.Version.indexCreated(indexSettings).luceneVersion;
}
@Test
public void test() throws Exception {
// To ensure our custom plugin is NOT included in the log4j plugin metadata file,
// we make sure the class does not exist until after the build is finished.
// So we don't create the custom plugin class until this test is run.
final File orig = new File("target/test-classes/customplugin/FixedStringLayout.java.source");
final File f = new File(orig.getParentFile(), "FixedStringLayout.java");
assertTrue("renamed source file failed", orig.renameTo(f));
compile(f);
assertTrue("reverted source file failed", f.renameTo(orig));
// load the compiled class
Class.forName("customplugin.FixedStringLayout");
// now that the custom plugin class exists, we load the config
// with the packages element pointing to our custom plugin
ctx = Configurator.initialize("Test1", "customplugin/log4j2-741.xml");
config = ctx.getConfiguration();
listAppender = config.getAppender("List");
final Logger logger = LogManager.getLogger(PluginManagerPackagesTest.class);
logger.info("this message is ignored");
final List<String> messages = listAppender.getMessages();
assertEquals(messages.toString(), 1, messages.size());
assertEquals("abc123XYZ", messages.get(0));
}
/**
* Testing synchronization in the getLoggersInContext() method
*/
@Test
public synchronized void testGetLoggersInContextSynch() throws Exception {
final TestLoggerAdapter adapter = new TestLoggerAdapter();
final int num = 500;
final CountDownLatch startSignal = new CountDownLatch(1);
final CountDownLatch doneSignal = new CountDownLatch(num);
final RunnableThreadTest[] instances = new RunnableThreadTest[num];
LoggerContext lastUsedContext = null;
for (int i = 0; i < num; i++) {
if (i % 2 == 0) {
//every other time create a new context
lastUsedContext = new SimpleLoggerContext();
}
final RunnableThreadTest runnable = new RunnableThreadTest(i, adapter, lastUsedContext, startSignal, doneSignal);
final Thread thread = new Thread(runnable);
thread.start();
instances[i] = runnable;
}
startSignal.countDown();
doneSignal.await();
for (int i = 0; i < num; i = i + 2) {
//maps for the same context should be the same instance
final Map<String, Logger> resultMap1 = instances[i].getResultMap();
final Map<String, Logger> resultMap2 = instances[i + 1].getResultMap();
assertSame("not the same map for instances" + i + " and " + (i + 1) + ":", resultMap1, resultMap2);
assertEquals(2, resultMap1.size());
}
}
@Test
public void test() {
final Logger logger = LogManager.getLogger();
logger.info("Hello log");
try (final MongoClient mongoClient = mongoDbTestRule.getMongoClient()) {
final MongoDatabase database = mongoClient.getDatabase("testDb");
Assert.assertNotNull(database);
final MongoCollection<Document> collection = database.getCollection("testCollection");
Assert.assertNotNull(collection);
final Document first = collection.find().first();
Assert.assertNotNull(first);
Assert.assertEquals(first.toJson(), "Hello log", first.getString("message"));
}
}
@Test
public void testFlushAtEndOfBatch() throws Exception {
final File file = new File("target", "XmlFileAppenderTest.log");
// System.out.println(f.getAbsolutePath());
file.delete();
final Logger log = LogManager.getLogger("com.foo.Bar");
final String logMsg = "Message flushed with immediate flush=false";
log.info(logMsg);
CoreLoggerContexts.stopLoggerContext(false, file); // stop async thread
List<String> lines = Files.readAllLines(file.toPath(), Charset.forName("UTF8"));
file.delete();
String[] expect = {
"", // ? unsure why initial empty line...
"<Event ", //
"<Instant epochSecond=", //
logMsg, //
"</Event>", //
};
for (int i = 0; i < expect.length; i++) {
assertTrue("Expected line " + i + " to contain " + expect[i] + " but got: " + lines.get(i),
lines.get(i).contains(expect[i]));
}
final String location = "testFlushAtEndOfBatch";
assertTrue("no location", !lines.get(0).contains(location));
}
@Test
public void test() {
Logger logger = LogManager.getLogger();
logger.error("for log4j2 plugin async logger test");
Assert.assertNotNull(ThreadContext.get("PtxId"));
Assert.assertNotNull(ThreadContext.get("PspanId"));
}
@Issue("#1466")
@Test
void filterWhenRootLoggerAdditivityIsFalseShouldWork() throws IOException {
ConfigurationSource source = new ConfigurationSource(getClass().getResourceAsStream("/binder/logging/log4j2-root-logger-additivity-false.xml"));
Configurator.initialize(null, source);
Logger logger = LogManager.getLogger(Log4j2MetricsTest.class);
new Log4j2Metrics().bindTo(registry);
assertThat(registry.get("log4j2.events").tags("level", "info").counter().count()).isEqualTo(0);
logger.info("Hello, world!");
assertThat(registry.get("log4j2.events").tags("level", "info").counter().count()).isEqualTo(1);
}
public static String getNativeLogLevel() {
Logger logger = LogManager.getLogger(PythonProcess.class);
Level level = logger.getLevel();
for (Map.Entry<String, Level> entry : LOG_LEVELS.entrySet()) {
if (level.equals(entry.getValue()))
return entry.getKey().toLowerCase();
}
return null;
}
private static void evaluateIntervalsAgainstTruth(final List<SVInterval> assembledIntervals,
final SVIntervalTree<String> trueBreakpoints,
final Logger localLogger) {
final SVIntervalTree<Integer> intervals = new SVIntervalTree<>();
final int nIntervals = assembledIntervals.size();
for ( int idx = 0; idx != nIntervals; ++idx ) {
intervals.put(assembledIntervals.get(idx), idx);
}
final float falsePos = 1.f - intervals.overlapFraction(trueBreakpoints);
localLogger.info("Interval false positive rate = " + falsePos + " (" + Math.round(falsePos*nIntervals) + "/" + nIntervals + ")");
final float falseNeg = 1.f - trueBreakpoints.overlapFraction(intervals);
final int nTrue = trueBreakpoints.size();
localLogger.info("Interval false negative rate = " + falseNeg + " (" + Math.round(falseNeg*nTrue) + "/" + nTrue + ")");
}
/**
* Unwrap the specified throwable looking for any suppressed errors or errors as a root cause of the specified throwable.
*
* @param cause the root throwable
* @return an optional error if one is found suppressed or a root cause in the tree rooted at the specified throwable
*/
public static Optional<Error> maybeError(final Throwable cause, final Logger logger) {
// early terminate if the cause is already an error
if (cause instanceof Error) {
return Optional.of((Error) cause);
}
final Queue<Throwable> queue = new LinkedList<>();
queue.add(cause);
int iterations = 0;
while (queue.isEmpty() == false) {
iterations++;
// this is a guard against deeply nested or circular chains of exceptions
if (iterations > MAX_ITERATIONS) {
logger.warn("giving up looking for fatal errors", cause);
break;
}
final Throwable current = queue.remove();
if (current instanceof Error) {
return Optional.of((Error) current);
}
Collections.addAll(queue, current.getSuppressed());
if (current.getCause() != null) {
queue.add(current.getCause());
}
}
return Optional.empty();
}