下面列出了org.springframework.beans.propertyeditors.StringTrimmerEditor#org.apache.commons.logging.LogFactory 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
@Override
public void afterPropertiesSet() {
if (this.configure == ConfigureRedisAction.NO_OP) {
return;
}
RedisConnection connection = this.connectionFactory.getConnection();
try {
this.configure.configure(connection);
}
finally {
try {
connection.close();
}
catch (Exception ex) {
LogFactory.getLog(getClass()).error("Error closing RedisConnection", ex);
}
}
}
protected void runAllGoldReportsSerial() throws Exception {
initializeTestEnvironment();
List<Throwable> errors = Collections.synchronizedList( new ArrayList<Throwable>() );
List<ExecuteReportRunner> reports = new ArrayList<ExecuteReportRunner>();
reports.addAll( collectReports( "reports", ReportProcessingMode.legacy, errors ) );
reports.addAll( collectReports( "reports", ReportProcessingMode.migration, errors ) );
reports.addAll( collectReports( "reports", ReportProcessingMode.current, errors ) );
reports.addAll( collectReports( "reports-4.0", ReportProcessingMode.migration, errors ) );
reports.addAll( collectReports( "reports-4.0", ReportProcessingMode.current, errors ) );
for ( ExecuteReportRunner report : reports ) {
report.run();
}
if ( errors.isEmpty() == false ) {
Log log = LogFactory.getLog( GoldTestBase.class );
for ( Throwable throwable : errors ) {
log.error( "Failed", throwable );
}
Assert.fail();
}
System.out.println( findMarker() );
}
/** Default Constructor. */
public HarvesterHeritrix() throws HarvesterException {
super();
name = "HarvesterHeritrix-" + System.currentTimeMillis();
try {
heritrix = new Heritrix(name, true);
}
catch (IOException e) {
if (log.isErrorEnabled()) {
log.error("Failed to create an instance of Heritrix " + e.getMessage(), e);
}
throw new HarvesterException("Failed to create an instance of Heritrix " + e.getMessage(), e);
}
log = LogFactory.getLog(HarvesterHeritrix.class);
if (log.isDebugEnabled()) {
log.debug("Created new harvester " + name);
}
status = new HarvesterStatusDTO(name);
}
/**
* Initializes the logging.
*/
private void initLogging()
{
if (_simpleLogging)
{
// For Ant, we're forcing DdlUtils to do logging via log4j to the console
Properties props = new Properties();
String level = (_verbosity == null ? Level.INFO.toString() : _verbosity.getValue()).toUpperCase();
props.setProperty("log4j.rootCategory", level + ",A");
props.setProperty("log4j.appender.A", "org.apache.log4j.ConsoleAppender");
props.setProperty("log4j.appender.A.layout", "org.apache.log4j.PatternLayout");
props.setProperty("log4j.appender.A.layout.ConversionPattern", "%m%n");
// we don't want debug logging from Digester
props.setProperty("log4j.logger.org.apache.commons", "WARN");
LogManager.resetConfiguration();
PropertyConfigurator.configure(props);
}
_log = LogFactory.getLog(getClass());
}
/**
* Starts a thread that reads the contents of the standard output or error
* stream of the given process to not block the process. The stream is closed
* once fully processed.
*/
private void logStream(final String logType, final InputStream stream, final File file) {
new Thread() {
public void run() {
Reader reader = new InputStreamReader(stream, UTF_8);
StringBuilder out = new StringBuilder();
char[] buffer = new char[1024];
try {
for (int n = reader.read(buffer); n != -1; n = reader.read(buffer))
out.append(buffer, 0, n);
} catch (IOException e) {
} finally {
IOUtils.closeQuietly(stream);
}
String msg = out.toString();
LogFactory.getLog(TesseractOCRParser.class).debug(msg);
}
}.start();
}
@Override
public void init(Properties prop, String propPrefix) {
super.init(prop, propPrefix);
loggerName = MiscUtil.getStringProperty(props, propPrefix + "."
+ PROP_LOG4J_LOGGER);
if (loggerName == null || loggerName.isEmpty()) {
loggerName = DEFAULT_LOGGER_PREFIX + "." + getName();
logger.info("Logger property " + propPrefix + "."
+ PROP_LOG4J_LOGGER + " was not set. Constructing default="
+ loggerName);
}
logger.info("Logger name for " + getName() + " is " + loggerName);
auditLogger = LogFactory.getLog(loggerName);
logger.info("Done initializing logger for audit. name=" + getName()
+ ", loggerName=" + loggerName);
}
public static byte[] readData(InputStream inStream, OutputStream outStream, Message msg) throws IOException, MessagingException {
List<String> request = new ArrayList<String>(2);
byte[] data = readHTTP(inStream, outStream, msg.getHeaders(), request);
msg.setAttribute(MA_HTTP_REQ_TYPE, request.get(0));
msg.setAttribute(MA_HTTP_REQ_URL, request.get(1));
if (data == null) {
String healthCheckUri = Properties.getProperty("health_check_uri", "healthcheck");
if ("GET".equalsIgnoreCase(request.get(0)) && request.get(1).matches("^[/]{0,1}" + healthCheckUri + "*")) {
if (outStream != null) {
HTTPUtil.sendHTTPResponse(outStream, HttpURLConnection.HTTP_OK, null);
msg.setAttribute("isHealthCheck", "true"); // provide means for caller to know what happened
}
return null;
} else {
HTTPUtil.sendHTTPResponse(outStream, HttpURLConnection.HTTP_LENGTH_REQUIRED, null);
Log logger = LogFactory.getLog(HTTPUtil.class.getSimpleName());
logger.error("Inbound HTTP request does not provide means to determine data length: " + request.get(0) + " " + request.get(1) + "\n\tHeaders: " + printHeaders(msg.getHeaders().getAllHeaders(), "==", ";;"));
throw new IOException("Content-Length missing and no \"Transfer-Encoding\" header found to determine how to read message body.");
}
}
cleanIdHeaders(msg.getHeaders());
return data;
}
private static TaskID parseTaskIdFromTaskAttemptId(String taskAttemptId) {
// Tez in particular uses an incorrect String task1244XXX instead of task_1244 which makes the parsing fail
// this method try to cope with such issues and look at the numbers if possible
if (taskAttemptId.startsWith("task")) {
taskAttemptId = taskAttemptId.substring(4);
}
if (taskAttemptId.startsWith("_")) {
taskAttemptId = taskAttemptId.substring(1);
}
List<String> tokenize = StringUtils.tokenize(taskAttemptId, "_");
// need at least 4 entries from 123123123123_0001_r_0000_4
if (tokenize.size() < 4) {
LogFactory.getLog(HadoopCfgUtils.class).warn("Cannot parse task attempt (too little arguments) " + taskAttemptId);
return null;
}
// we parse straight away - in case of an exception we can catch the new format
try {
return new TaskID(tokenize.get(0), Integer.parseInt(tokenize.get(1)), tokenize.get(2).startsWith("m"), Integer.parseInt(tokenize.get(3)));
} catch (Exception ex) {
LogFactory.getLog(HadoopCfgUtils.class).warn("Cannot parse task attempt " + taskAttemptId);
return null;
}
}
protected VersionInfo(String component) {
info = new Properties();
String versionInfoFile = component + "-version-info.properties";
InputStream is = null;
try {
is = Thread.currentThread().getContextClassLoader()
.getResourceAsStream(versionInfoFile);
if (is == null) {
throw new IOException("Resource not found");
}
info.load(is);
} catch (IOException ex) {
LogFactory.getLog(getClass()).warn("Could not read '" +
versionInfoFile + "', " + ex.toString(), ex);
} finally {
IOUtils.closeStream(is);
}
}
/**
* Handle the supplied annotation introspection exception.
* <p>If the supplied exception is an {@link AnnotationConfigurationException},
* it will simply be thrown, allowing it to propagate to the caller, and
* nothing will be logged.
* <p>Otherwise, this method logs an introspection failure (in particular
* {@code TypeNotPresentExceptions}) before moving on, assuming nested
* Class values were not resolvable within annotation attributes and
* thereby effectively pretending there were no annotations on the specified
* element.
* @param element the element that we tried to introspect annotations on
* @param ex the exception that we encountered
* @see #rethrowAnnotationConfigurationException
*/
static void handleIntrospectionFailure(AnnotatedElement element, Exception ex) {
rethrowAnnotationConfigurationException(ex);
Log loggerToUse = logger;
if (loggerToUse == null) {
loggerToUse = LogFactory.getLog(AnnotationUtils.class);
logger = loggerToUse;
}
if (element instanceof Class && Annotation.class.isAssignableFrom((Class<?>) element)) {
// Meta-annotation lookup on an annotation type
if (loggerToUse.isDebugEnabled()) {
loggerToUse.debug("Failed to introspect meta-annotations on [" + element + "]: " + ex);
}
}
else {
// Direct annotation lookup on regular Class, Method, Field
if (loggerToUse.isInfoEnabled()) {
loggerToUse.info("Failed to introspect annotations on [" + element + "]: " + ex);
}
}
}
private void checkCompositeIdentifier() {
if ( getIdentifier() instanceof Component ) {
Component id = (Component) getIdentifier();
if ( !id.isDynamic() ) {
Class idClass = id.getComponentClass();
if ( idClass != null && !ReflectHelper.overridesEquals( idClass ) ) {
LogFactory.getLog(RootClass.class)
.warn( "composite-id class does not override equals(): "
+ id.getComponentClass().getName() );
}
if ( !ReflectHelper.overridesHashCode( idClass ) ) {
LogFactory.getLog(RootClass.class)
.warn( "composite-id class does not override hashCode(): "
+ id.getComponentClass().getName() );
}
if ( !Serializable.class.isAssignableFrom( idClass ) ) {
throw new MappingException( "composite-id class must implement Serializable: "
+ id.getComponentClass().getName() );
}
}
}
}
/**
* get Request IpAddress by request.
*
* @param request HttpServletRequest
* @return String
*/
public static final String getRequestIpAddress(HttpServletRequest request) {
String ipAddress = request.getHeader("x-forwarded-for");
if (ipAddress == null || ipAddress.length() == 0 || "unknown".equalsIgnoreCase(ipAddress)) {
ipAddress = request.getHeader("Proxy-Client-IP");
}
if (ipAddress == null || ipAddress.length() == 0 || "unknown".equalsIgnoreCase(ipAddress)) {
ipAddress = request.getHeader("WL-Proxy-Client-IP");
}
if (ipAddress == null || ipAddress.length() == 0 || "unknown".equalsIgnoreCase(ipAddress)) {
ipAddress = request.getRemoteAddr();
}
LogFactory.getLog(WebContext.class).debug(
"getRequestIpAddress() RequestIpAddress:" + ipAddress);
return ipAddress;
}
private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException {
// Rely on default serialization; just initialize state after deserialization.
ois.defaultReadObject();
// Initialize transient fields.
this.logger = LogFactory.getLog(getClass());
}
private long mmap(Emulator<?> emulator) {
Arm64RegisterContext context = emulator.getContext();
UnicornPointer addr = context.getXPointer(0);
int length = context.getXInt(1);
int prot = context.getXInt(2);
int flags = context.getXInt(3);
int fd = context.getXInt(4);
long offset = context.getXLong(5);
int tag = fd >>> 24;
if (tag != 0) {
fd = -1;
}
boolean warning = length >= 0x10000000;
long base = emulator.getMemory().mmap2(addr == null ? 0 : addr.peer, length, prot, flags, fd, (int) offset);
String msg = "mmap addr=" + addr + ", base=0x" + Long.toHexString(base) + ", length=" + length + ", prot=0x" + Integer.toHexString(prot) + ", flags=0x" + Integer.toHexString(flags) + ", fd=" + fd + ", offset=" + offset + ", tag=" + tag + ", LR=" + context.getLRPointer();
if (log.isDebugEnabled() || warning) {
if (warning) {
log.warn(msg);
} else {
log.debug(msg);
}
} else if(LogFactory.getLog("com.github.unidbg.ios.malloc").isDebugEnabled()) {
log.debug(msg);
}
return base;
}
private RegexImpl compile(String pattern)
{
RegexImpl impl = null;
int regexMethod = determineBestRegexMethod(pattern);
switch (regexMethod)
{
case 0:
impl = new CompiledRegex(Pattern.compile(pattern));
break;
case 1:
try
{
impl = new CompiledAutomaton(pattern);
}
catch (IllegalArgumentException e)
{
Log log = LogFactory.getLog(getClass());
log.debug("Got an IllegalArgumentException for Pattern: " + pattern);
log.debug(e.getMessage());
log.debug("Switching to java.util.regex");
impl = new CompiledRegex(Pattern.compile(pattern));
}
break;
default:
break;
}
return impl;
}
private static Future<?> quietlyCallListener(final ProgressListener listener,
final ProgressEvent event) {
try {
listener.progressChanged(event);
} catch(Throwable t) {
// That's right, we need to suppress all errors so as to be on par
// with the async mode where all failures will be ignored.
LogFactory.getLog(SDKProgressPublisher.class)
.debug("Failure from the event listener", t);
}
return null;
}
public HtmlParser(String baseUri, String content) {
long beginTime = System.currentTimeMillis();
log = LogFactory.getLog(HtmlParser.class);
this.baseUri = baseUri;
if (isTable(content)) {
this.document = Jsoup.parse(content, baseUri, Parser.xmlParser());
} else {
this.document = Jsoup.parse(content, baseUri);
}
long endTime = System.currentTimeMillis();
if (log.isTraceEnabled()) {
log.trace("init html parser : " + (endTime - beginTime) + "ms");
}
}
/**
* Closes anything {@link Closeable}, catches any throwable that might occur during closing and logs it as an error.
*/
public static void close(Closeable closeable) {
if (closeable != null) {
try {
closeable.close();
} catch (Throwable t) {
Log log = LogFactory.getLog(Closer.class);
log.error("Error closing object of type " + closeable.getClass().getName(), t);
}
}
}
public void nullSafeSet(PreparedStatement st, Object value, int index) throws HibernateException, SQLException {
if (value == null || defaultValue.equals(value) ) {
LogFactory.getLog( getClass() ).trace("binding null to parameter: " + index);
st.setNull(index, Types.INTEGER);
} else {
LogFactory.getLog( getClass() ).trace("binding " + value + " to parameter: " + index);
st.setInt(index, ((Integer)value).intValue());
}
}
@Override
public void afterPropertiesSet() {
try {
super.afterPropertiesSet();
}
catch (ValidationException ex) {
LogFactory.getLog(getClass()).debug("Failed to set up a Bean Validation provider", ex);
}
}
public Log(String inName) {
lm = LogManager.getLogManager();
lm.addRequestors(inName);
clazzname = inName;
// Set initial log level
// Used to be: set default log level to ERROR
// IMHO it should be lower, but at least info ( costin ).
setLevel(Log.LOG_LEVEL_INFO);
// Set log level from properties
String lvl = getStringProperty(systemPrefix + "log." + clazzname);
int i = String.valueOf(inName).lastIndexOf(".");
while (null == lvl && i > -1) {
inName = inName.substring(0, i);
lvl = getStringProperty(systemPrefix + "log." + inName);
i = inName.lastIndexOf(".");
}
if (null == lvl) {
lvl = getStringProperty(systemPrefix + "defaultlog");
}
/* Still not found so use the commons one if there is one */
if (null == lvl) {
lvl = (String) LogFactory.getFactory().getAttribute("level");
}
if (null != lvl) {
setLevel(getIntLogLevel(lvl));
}
configuredLogLevel = getLevel();
}
@Test
public void run() throws OpenEJBException, NamingException, IOException {
final File tempWar = new File("target/AppDataSourceTest");
tempWar.mkdirs();
new File(tempWar, "WEB-INF").mkdirs();
IO.writeString(new File(tempWar, "WEB-INF/resources.xml"),
"<resources>\n" +
"<Resource id=\"java:app/gace/MyDS\" type=\"DataSource\">\n" +
"DataSourceCreator=dbcp\n" +
"</Resource>\n" +
"</resources>\n");
final Collection<LogRecord> records = new ArrayList<>();
try (final Container c = new Container(new Configuration().randomHttpPort())) {
Jdk14Logger.class.cast(LogFactory.getLog(BasicDataSource.class)).getLogger().addHandler(new Handler() {
@Override
public void publish(final LogRecord record) {
if (record.getLevel() == Level.SEVERE || record.getLevel() == Level.WARNING) {
records.add(record);
}
}
@Override
public void flush() {
// no-op
}
@Override
public void close() throws SecurityException {
// no-op
}
});
c.deploy(null, tempWar);
}
// if we have the JMX bug of dbcp2 integration (in 7.0.0) then we have a WARNING record from BasicDataSource.close()
// saying:
// Failed to unregister the JMX name:
// Tomcat:type=DataSource,host=localhost,context=/AppDataSourceTest,class=javax.sql.DataSource,name="openejb/Resource/AppDataSourceTest/app/gace/MyDS"
assertTrue(records.isEmpty());
}
public CommandExecutorImpl(Object service, Method method) {
this.service = service;
this.method = method;
String[] paramTypes = new String[method.getParameterTypes().length];
for (int i = 0; i < method.getParameterTypes().length; i++) {
paramTypes[i] = method.getParameterTypes()[i].getCanonicalName();
}
CommandDefinition definition = new CommandDefinition(
method.getDeclaringClass().getName(), method.getName(), method.getReturnType().getCanonicalName(), paramTypes);
this.setCommandDefinition(definition);
this.executorLogger = LogFactory.getLog(method.getDeclaringClass().getCanonicalName());
}
@Override
public void shutdown(DataSource dataSource, String databaseName) {
try {
new EmbeddedDriver().connect(
String.format(URL_TEMPLATE, databaseName, "drop=true"), new Properties());
}
catch (SQLException ex) {
// Error code that indicates successful shutdown
if (!"08006".equals(ex.getSQLState())) {
LogFactory.getLog(getClass()).warn("Could not shut down embedded Derby database", ex);
}
}
}
@Override
public void init(Properties properties) {
String loggerName = properties.getProperty(CONF_LOGGER_NAME);
String loggerClassName = properties.getProperty(CONF_LOGGER_CLASS);
Class loggerClass;
if (loggerClassName != null) {
try {
loggerClass = Class.forName(loggerClassName);
} catch (ClassNotFoundException cnfe) {
// Could not find class name
throw new EsHadoopIllegalArgumentException("Could not locate logger class [" + loggerClassName + "].", cnfe);
}
} else {
loggerClass = null;
}
if (loggerName != null && loggerClass != null) {
throw new EsHadoopIllegalArgumentException("Both logger name and logger class provided for drop and log handler. Provide only one. Bailing out...");
}
if (loggerName != null) {
logger = LogFactory.getLog(loggerName);
} else if (loggerClass != null) {
logger = LogFactory.getLog(loggerClass);
} else {
throw new EsHadoopIllegalArgumentException("No logger name or logger class provided for drop and log handler. Provide one. Bailing out...");
}
String rawLoggerLevel = properties.getProperty(CONF_LOGGER_LEVEL, LogLevel.WARN.name());
if (!LogLevel.names.contains(rawLoggerLevel)) {
throw new EsHadoopIllegalArgumentException("Invalid logger level [" + rawLoggerLevel + "] given. Available logging levels: " + LogLevel.names.toString());
}
loggerLevel = LogLevel.valueOf(rawLoggerLevel);
}
@Test
public void testJar() throws Exception {
//picking a class that is for sure in a JAR in the classpath
String jar = JarFinder.getJar(LogFactory.class);
Assert.assertTrue(new File(jar).exists());
}
public Method getWriteMethodForActualAccess() {
Assert.state(this.writeMethod != null, "No write method available");
Set<Method> ambiguousCandidates = this.ambiguousWriteMethods;
if (ambiguousCandidates != null) {
this.ambiguousWriteMethods = null;
LogFactory.getLog(GenericTypeAwarePropertyDescriptor.class).warn("Invalid JavaBean property '" +
getName() + "' being accessed! Ambiguous write methods found next to actually used [" +
this.writeMethod + "]: " + ambiguousCandidates);
}
return this.writeMethod;
}
@Bean
Job job(JobBuilderFactory jobBuilderFactory,
StepBuilderFactory stepBuilderFactory, JdbcTemplate template,
ItemReader<Contact> fileReader,
ItemProcessor<Contact, Contact> emailProcessor,
ItemWriter<Contact> jdbcWriter) {
Step setup = stepBuilderFactory.get("clean-contact-table")
.tasklet((contribution, chunkContext) -> {
template.update("delete from CONTACT");
return RepeatStatus.FINISHED;
}).build();
Step fileToJdbc = stepBuilderFactory.get("file-to-jdbc-fileToJdbc")
.<Contact, Contact>chunk(5)
// <1>
.reader(fileReader).processor(emailProcessor).writer(jdbcWriter)
.faultTolerant().skip(InvalidEmailException.class)
// <2>
.skipPolicy((Throwable t, int skipCount) -> {
LogFactory.getLog(getClass()).info("skipping ");
return t.getClass().isAssignableFrom(InvalidEmailException.class);
}).retry(HttpStatusCodeException.class) // <3>
.retryLimit(2).build();
return jobBuilderFactory.get("etl") // <4>
.start(setup).next(fileToJdbc).build();
}
@Test
public void testServerSaslNoClientSasl() throws Exception {
HdfsConfiguration clusterConf = createSecureConfig(
"authentication,integrity,privacy");
// Set short retry timeouts so this test runs faster
clusterConf.setInt(DFSConfigKeys.DFS_CLIENT_RETRY_WINDOW_BASE, 10);
startCluster(clusterConf);
HdfsConfiguration clientConf = new HdfsConfiguration(clusterConf);
clientConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "");
LogCapturer logs = GenericTestUtils.LogCapturer.captureLogs(
LogFactory.getLog(DataNode.class));
try {
doTest(clientConf);
Assert.fail("Should fail if SASL data transfer protection is not " +
"configured or not supported in client");
} catch (IOException e) {
GenericTestUtils.assertMatches(e.getMessage(),
"could only be replicated to 0 nodes");
} finally {
logs.stopCapturing();
}
GenericTestUtils.assertMatches(logs.getOutput(),
"Failed to read expected SASL data transfer protection " +
"handshake from client at");
}
private static void initLoggers() {
((Log4JLogger) NameNode.stateChangeLog).getLogger().setLevel(Level.ALL);
((Log4JLogger) LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.ALL);
((Log4JLogger) DataNode.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger) TestFiPipelines.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger) DFSClient.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger) FiTestUtil.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger) BlockReceiverAspects.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger) DFSClientAspects.LOG).getLogger().setLevel(Level.ALL);
}