下面列出了org.slf4j.ILoggerFactory#org.slf4j.Logger 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
/**
* Advice that logs when a method is entered and exited.
*
* @param joinPoint join point for advice.
* @return result.
* @throws Throwable throws {@link IllegalArgumentException}.
*/
@Around("applicationPackagePointcut() && springBeanPointcut()")
public Object logAround(ProceedingJoinPoint joinPoint) throws Throwable {
Logger log = logger(joinPoint);
if (log.isDebugEnabled()) {
log.debug("Enter: {}() with argument[s] = {}", joinPoint.getSignature().getName(), Arrays.toString(joinPoint.getArgs()));
}
try {
Object result = joinPoint.proceed();
if (log.isDebugEnabled()) {
log.debug("Exit: {}() with result = {}", joinPoint.getSignature().getName(), result);
}
return result;
} catch (IllegalArgumentException e) {
log.error("Illegal argument: {} in {}()", Arrays.toString(joinPoint.getArgs()), joinPoint.getSignature().getName());
throw e;
}
}
/**
* Prints the splash.
* @param logger The logger that should print.
*/
public void print(Logger logger, String version, String jdaVersion, String developers) {
try {
if(file.createNewFile()) {
BufferedWriter writer = new BufferedWriter(new FileWriter(file));
writer.write("SPLASH TEXT - CUSTOMISE IN " + file.getName() + "!");
writer.close();
}
printBlank(logger);
BufferedReader bufferedReader = new BufferedReader(new FileReader(file));
String line;
while((line = bufferedReader.readLine()) != null) {
logger.info(line, version, jdaVersion, developers);
}
bufferedReader.close();
printBlank(logger);
} catch(IOException exception) {
logger.error("An error occurred showing the splash.", exception);
}
}
/**
* @param log
* @param introspector
* @param clazz
* @param property
* @param wrapArray
* @since 1.5
*/
public PropertyExecutor(final Logger log, final Introspector introspector,
final Class clazz, final String property, final boolean wrapArray)
{
this.log = log;
this.introspector = introspector;
this.wrapArray = wrapArray;
// Don't allow passing in the empty string or null because
// it will either fail with a StringIndexOutOfBounds error
// or the introspector will get confused.
if (StringUtils.isNotEmpty(property))
{
discover(clazz, property);
}
}
WebServer(Logger logger, MainConfig config) {
this.logger = logger;
this.config = config;
// Process the config.js file to include data from the Web-API config files
try {
MainConfig.APConfig cfg = config.adminPanelConfig;
if (cfg != null) {
ObjectMapper om = new ObjectMapper();
String configStr = "window.config = " + om.writeValueAsString(cfg);
apConfig = configStr.getBytes(Charset.forName("utf-8"));
}
} catch (JsonProcessingException e) {
e.printStackTrace();
}
}
@Test
public void testTurningLoggingOff()
{
Map<String, Object> fooRuleAttributes =
createBrokerNameAndLevelLogInclusionRuleAttributes("fooRule",
"org.apache.qpid.foo.*",
LogLevel.INFO);
Map<String, Object> barRuleAttributes =
createBrokerNameAndLevelLogInclusionRuleAttributes("barRule",
"org.apache.qpid.foo.bar",
LogLevel.OFF);
_brokerLogger.createChild(BrokerLogInclusionRule.class, fooRuleAttributes);
_brokerLogger.createChild(BrokerLogInclusionRule.class, barRuleAttributes);
Logger barLogger = LoggerFactory.getLogger("org.apache.qpid.foo.bar");
barLogger.warn("bar message");
Logger fooLogger = LoggerFactory.getLogger("org.apache.qpid.foo.foo");
fooLogger.warn("foo message");
assertLoggedEvent(_loggerAppender, false, "bar message", barLogger.getName(), Level.WARN);
assertLoggedEvent(_loggerAppender, false, "bar message", fooLogger.getName(), Level.WARN);
assertLoggedEvent(_loggerAppender, true, "foo message", fooLogger.getName(), Level.WARN);
}
static void trace_tlv(byte[] data, Logger l) {
BerTlvParser parser = new BerTlvParser();
BerTlvs tlvs = parser.parse(data);
BerTlvLogger.log("", tlvs,
new IBerTlvLogger() {
@Override
public boolean isDebugEnabled() {
return true;
}
@Override
public void debug(String s, Object... objects) {
l.trace(s, objects);
}
}
);
}
private static org.rocksdb.Logger createRocksDbNativeLogger() {
if (LOG.isDebugEnabled()) {
// options are always needed for org.rocksdb.Logger construction (no other constructor)
// the logger level gets configured from the options in native code
try (DBOptions opts = new DBOptions().setInfoLogLevel(InfoLogLevel.DEBUG_LEVEL)) {
return new org.rocksdb.Logger(opts) {
@Override
protected void log(InfoLogLevel infoLogLevel, String logMsg) {
LOG.debug("RocksDB filter native code log: " + logMsg);
}
};
}
} else {
return null;
}
}
/**
* Default constructor used by Guice. Takes {@link Supplier} instances for {@link XPath},
* {@link Transformer}, and {@link DocumentBuilder} because the corresponding factory objects are
* <em>not</em> thread-safe. The Guice modules creating these suppliers should guarantee that the
* {@link Supplier} <em>is</em> thread-safe, however.
*
* @param adsApiConfiguration the API configuration
* @param libLogger the logger to use for errors
* @param xpathSupplier a thread-safe supplier of {@link XPath} objects
* @param transformerSupplier a thread-safe supplier of {@link Transformer} objects
* @param documentBuilderSupplier a thread-safe supplier of {@link DocumentBuilder} objects
*/
@Inject
public PrettyPrinter(
AdsApiConfiguration adsApiConfiguration,
@Named("libLogger") Logger libLogger,
Supplier<XPath> xpathSupplier,
Supplier<Transformer> transformerSupplier,
Supplier<DocumentBuilder> documentBuilderSupplier) {
String[] sensitiveXPathsArray = adsApiConfiguration.getSensitiveXPaths();
this.sensitiveXPathStrings =
sensitiveXPathsArray == null
? ImmutableList.<String>of()
: ImmutableList.<String>copyOf(sensitiveXPathsArray);
this.libLogger = libLogger;
this.xpathSupplier = xpathSupplier;
this.transformerSupplier = transformerSupplier;
this.documentBuilderSupplier = documentBuilderSupplier;
}
private void addLogstashTcpSocketAppender(LoggerContext context) {
log.info("Initializing Logstash logging");
// More documentation is available at: https://github.com/logstash/logstash-logback-encoder
LogstashTcpSocketAppender logstashAppender = new LogstashTcpSocketAppender();
logstashAppender.addDestinations(new InetSocketAddress(this.jHipsterProperties.getLogging().getLogstash().getHost(), this.jHipsterProperties.getLogging().getLogstash().getPort()));
logstashAppender.setContext(context);
logstashAppender.setEncoder(logstashEncoder());
logstashAppender.setName(LOGSTASH_APPENDER_NAME);
logstashAppender.start();
// Wrap the appender in an Async appender for performance
AsyncAppender asyncLogstashAppender = new AsyncAppender();
asyncLogstashAppender.setContext(context);
asyncLogstashAppender.setName(ASYNC_LOGSTASH_APPENDER_NAME);
asyncLogstashAppender.setQueueSize(this.jHipsterProperties.getLogging().getLogstash().getQueueSize());
asyncLogstashAppender.addAppender(logstashAppender);
asyncLogstashAppender.start();
context.getLogger(ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME).addAppender(asyncLogstashAppender);
}
ResourceManagerConnection(
final Logger log,
final JobID jobID,
final ResourceID jobManagerResourceID,
final String jobManagerRpcAddress,
final JobMasterId jobMasterId,
final String resourceManagerAddress,
final ResourceManagerId resourceManagerId,
final Executor executor) {
super(log, resourceManagerAddress, resourceManagerId, executor);
this.jobID = checkNotNull(jobID);
this.jobManagerResourceID = checkNotNull(jobManagerResourceID);
this.jobManagerRpcAddress = checkNotNull(jobManagerRpcAddress);
this.jobMasterId = checkNotNull(jobMasterId);
}
@Override
public JDBCPaginatedExpression toPaginatedExpression(JDBCExpression expression, Logger log) throws NotImplementedException, ServiceException {
try{
return new JDBCPaginatedExpression(expression);
}catch(Exception e){
throw new ServiceException(e);
}
}
@Override
public JDBCExpression newExpression(Logger log) throws NotImplementedException, ServiceException {
try{
return new JDBCExpression(this.getPagamentoFieldConverter());
}catch(Exception e){
throw new ServiceException(e);
}
}
@Test
void defaultsSuccess() throws Exception {
final ServiceRequestContext ctx = ServiceRequestContext.of(HttpRequest.of(HttpMethod.GET, "/"));
final Logger logger = LoggingTestUtil.newMockLogger(ctx, capturedCause);
final LoggingService service =
LoggingService.builder()
.logger(logger)
.newDecorator().apply(delegate);
service.serve(ctx, ctx.request());
verify(logger, times(2)).isTraceEnabled();
}
@Override
public void updateOrCreate(JDBCServiceManagerProperties jdbcProperties, Logger log, Connection connection, ISQLQueryObject sqlQueryObject, long tableId, TipoVersamento tipoVersamento, org.openspcoop2.generic_project.beans.IDMappingBehaviour idMappingResolutionBehaviour) throws NotImplementedException,ServiceException,Exception {
// default behaviour (id-mapping)
if(idMappingResolutionBehaviour==null){
idMappingResolutionBehaviour = org.openspcoop2.generic_project.beans.IDMappingBehaviour.valueOf("USE_TABLE_ID");
}
if(this.exists(jdbcProperties, log, connection, sqlQueryObject, tableId)) {
this.update(jdbcProperties, log, connection, sqlQueryObject, tableId, tipoVersamento,idMappingResolutionBehaviour);
} else {
this.create(jdbcProperties, log, connection, sqlQueryObject, tipoVersamento,idMappingResolutionBehaviour);
}
}
@Test
public void testFilters() {
Logger levelFilterLogger = LoggerFactory.getLogger("levelFilterLogger");
levelFilterLogger.error("Employee Information Update Failed");
Logger thresholdFilterLogger = LoggerFactory.getLogger("thresholdFilterLogger");
thresholdFilterLogger.trace("Employee record inserted");
Logger evaluatorFilterLogger = LoggerFactory.getLogger("evaluatorFilterLogger");
evaluatorFilterLogger.debug("Employee account deactivated");
}
@BeforeEach
void setup() {
logger = mock(Logger.class);
when(logger.isInfoEnabled()).thenReturn(true);
mailContext = FakeMailContext.builder()
.logger(logger)
.build();
mailet = new LogMessage(logger);
}
@Override
public void log(final SessionLogEntry entry) {
if (!shouldLog(entry.getLevel(), entry.getNameSpace())) {
return;
}
Logger logger = getLogger(entry.getNameSpace());
LogLevel logLevel = getLogLevel(entry.getLevel());
StringBuilder message = new StringBuilder();
message.append(getSupplementDetailString(entry));
message.append(formatMessage(entry));
switch (logLevel) {
case TRACE:
logger.trace(message.toString());
break;
case DEBUG:
logger.debug(message.toString());
break;
case INFO:
logger.info(message.toString());
break;
case WARN:
logger.warn(message.toString());
break;
case ERROR:
logger.error(message.toString());
break;
default:
throw new IllegalStateException("Not supported" + logLevel);
}
}
@Override
public void updateFields(JDBCServiceManagerProperties jdbcProperties, Logger log, Connection connection, ISQLQueryObject sqlQueryObject, long tableId, UpdateModel ... updateModels) throws NotFoundException, NotImplementedException, ServiceException, Exception {
java.util.List<Object> ids = new java.util.ArrayList<>();
ids.add(tableId);
JDBCUtilities.updateFields(jdbcProperties, log, connection, sqlQueryObject,
this.getOperatoreFieldConverter().toTable(Operatore.model()),
this._getMapTableToPKColumn(),
ids,
this.getOperatoreFieldConverter(), this, updateModels);
}
@Override
public JDBCPaginatedExpression newPaginatedExpression(Logger log) throws NotImplementedException, ServiceException {
try{
return new JDBCPaginatedExpression(this.getStampaFieldConverter());
}catch(Exception e){
throw new ServiceException(e);
}
}
@RequestMapping(value = "/logLevel/{loggerName}", method = RequestMethod.GET, produces = { "application/json" })
@ResponseBody
public String getLogLevel(@PathVariable(value = "loggerName") String loggerName) {
org.apache.log4j.Logger logger = org.apache.log4j.LogManager.getLogger(loggerName);
org.apache.log4j.Level level = logger.getEffectiveLevel();
if (level != null) {
return level.toString();
}
return null;
}
JobManagerRegisteredRpcConnection(
Logger log,
String targetAddress,
JobMasterId jobMasterId,
Executor executor) {
super(log, targetAddress, jobMasterId, executor);
}
private static void logStatusUpdate(Logger logger, TaskStatus status) {
// Periodic task reconciliation runs generate a large amount of no-op messages.
// Suppress logging for reconciliation status updates by default.
boolean debugLevel = status.hasReason() && status.getReason() == REASON_RECONCILIATION;
StringBuilder message = new StringBuilder("Received status update for task ")
.append(status.getTaskId().getValue())
.append(" in state ")
.append(status.getState());
if (status.hasSource()) {
message.append(" from ").append(status.getSource());
}
if (status.hasReason()) {
message.append(" with ").append(status.getReason());
}
if (status.hasMessage()) {
String[] lines = status.getMessage().split("\n");
message.append(": ").append(lines[0]);
if (lines.length > 1) {
message.append(" (truncated)");
}
}
if (debugLevel) {
logger.debug(message.toString());
} else {
logger.info(message.toString());
}
}
/**
* Starts an Actor System at a specific port.
*
* @param configuration The Flink configuration.
* @param listeningAddress The address to listen at.
* @param listeningPort The port to listen at.
* @param logger the logger to output log information.
* @return The ActorSystem which has been started.
* @throws Exception
*/
public static ActorSystem startActorSystem(
Configuration configuration,
String listeningAddress,
int listeningPort,
Logger logger) throws Exception {
return startActorSystem(
configuration,
listeningAddress,
listeningPort,
logger,
ForkJoinExecutorConfiguration.fromConfiguration(configuration));
}
@Override
public List<List<Object>> nativeQuery(JDBCServiceManagerProperties jdbcProperties, Logger log, Connection connection, ISQLQueryObject sqlQueryObject,
String sql,List<Class<?>> returnClassTypes,Object ... param) throws ServiceException,NotFoundException,NotImplementedException,Exception{
return org.openspcoop2.generic_project.dao.jdbc.utils.JDBCUtilities.nativeQuery(jdbcProperties, log, connection, sqlQueryObject,
sql,returnClassTypes,param);
}
@Override
public Map<String,Object> aggregate(JDBCServiceManagerProperties jdbcProperties, Logger log, Connection connection, ISQLQueryObject sqlQueryObject,
JDBCExpression expression, FunctionField ... functionField) throws ServiceException,NotFoundException,NotImplementedException,Exception {
org.openspcoop2.generic_project.dao.jdbc.utils.JDBCUtilities.setFields(sqlQueryObject,expression,functionField);
try{
List<Map<String,Object>> list = this._select(jdbcProperties, log, connection, sqlQueryObject, expression);
return list.get(0);
}finally{
org.openspcoop2.generic_project.dao.jdbc.utils.JDBCUtilities.removeFields(sqlQueryObject,expression,functionField);
}
}
public JRDataSource creaXmlDataSource(Logger log,AvvisoPagamentoInput input) throws UtilsException, JRException, JAXBException {
// WriteToSerializerType serType = WriteToSerializerType.XML_JAXB;
Marshaller jaxbMarshaller = jaxbContext.createMarshaller();
jaxbMarshaller.setProperty("com.sun.xml.bind.xmlDeclaration", Boolean.FALSE);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
JAXBElement<AvvisoPagamentoInput> jaxbElement = new JAXBElement<AvvisoPagamentoInput>(new QName("", "input"), AvvisoPagamentoInput.class, null, input);
jaxbMarshaller.marshal(jaxbElement, baos);
JRDataSource dataSource = new JRXmlDataSource(new ByteArrayInputStream(baos.toByteArray()),AvvisoPagamentoCostanti.AVVISO_PAGAMENTO_ROOT_ELEMENT_NAME);
return dataSource;
}
@Bean
ClientCacheConfigurer clientCacheDistributedSystemIdConfigurer() {
return (beanName, clientCacheFactoryBean) -> getDistributedSystemId().ifPresent(distributedSystemId -> {
Logger logger = getLogger();
if (logger.isWarnEnabled()) {
logger.warn("Distributed System Id [{}] was set on the ClientCache instance, which will not have any effect",
distributedSystemId);
}
});
}
private static void handleControllerServices(RequestItem requestItem, FlowController flowController, List<ControllerServiceStatus> controllerServiceStatusList, Logger logger) {
Collection<ControllerServiceNode> controllerServiceNodeSet = flowController.getAllControllerServices();
if (!controllerServiceNodeSet.isEmpty()) {
for (ControllerServiceNode controllerServiceNode : controllerServiceNodeSet) {
controllerServiceStatusList.add(parseControllerServiceStatusRequest(controllerServiceNode, requestItem.options, flowController, logger));
}
}
}
@Override
public void updateOrCreate(JDBCServiceManagerProperties jdbcProperties, Logger log, Connection connection, ISQLQueryObject sqlQueryObject, IdPagamentoPortale oldId, PagamentoPortale pagamentoPortale, org.openspcoop2.generic_project.beans.IDMappingBehaviour idMappingResolutionBehaviour) throws NotImplementedException,ServiceException,Exception {
// default behaviour (id-mapping)
if(idMappingResolutionBehaviour==null){
idMappingResolutionBehaviour = org.openspcoop2.generic_project.beans.IDMappingBehaviour.valueOf("USE_TABLE_ID");
}
if(this.exists(jdbcProperties, log, connection, sqlQueryObject, oldId)) {
this.update(jdbcProperties, log, connection, sqlQueryObject, oldId, pagamentoPortale,idMappingResolutionBehaviour);
} else {
this.create(jdbcProperties, log, connection, sqlQueryObject, pagamentoPortale,idMappingResolutionBehaviour);
}
}
@BeforeClass
void mockLog() {
log = mock(Logger.class);
doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
Object[] args = invocationOnMock.getArguments();
Integer d = (Integer) args[1];
percentHolder.add(d.intValue());
return null;
}
}).when(log).info(anyString(), anyFloat(), anyInt(), anyString());
}