org.apache.log4j.Logger#setLevel ( )源码实例Demo

下面列出了org.apache.log4j.Logger#setLevel ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: hyperjaxb3   文件: Hyperjaxb3Mojo.java
/**
 * Sets up the verbose and debug mode depending on mvn logging level, and
 * sets up hyperjaxb logging.
 */
protected void setupLogging() {
	super.setupLogging();

	final Logger rootLogger = LogManager.getRootLogger();
	rootLogger.addAppender(new NullAppender());
	final Logger logger = LogManager.getLogger("org.jvnet.hyperjaxb3");

	final Log log = getLog();
	logger.addAppender(new Appender(getLog(), new PatternLayout(
			"%m%n        %c%n")));

	if (this.getDebug()) {
		log.debug("Logger level set to [debug].");
		logger.setLevel(Level.DEBUG);
	} else if (this.getVerbose())
		logger.setLevel(Level.INFO);
	else if (log.isWarnEnabled())
		logger.setLevel(Level.WARN);
	else
		logger.setLevel(Level.ERROR);
}
 
源代码2 项目: incubator-retired-blur   文件: TableAdmin.java
@Override
public void logging(String classNameOrLoggerName, Level level) throws BlurException, TException {
  Logger logger;
  if (classNameOrLoggerName == null) {
    logger = LogManager.getRootLogger();
  } else {
    logger = LogManager.getLogger(classNameOrLoggerName);
  }

  if (logger == null) {
    throw new BException("Logger [{0}] not found.", classNameOrLoggerName);
  }
  org.apache.log4j.Level current = logger.getLevel();
  org.apache.log4j.Level newLevel = getLevel(level);
  LOG.info("Changing Logger [{0}] from logging level [{1}] to [{2}]", logger.getName(), current, newLevel);
  logger.setLevel(newLevel);
}
 
@SuppressWarnings("rawtypes")
public static Log getLog(Class className) {
	
	Log log = LogFactory.getLog(className);
	
	final Logger logger = Logger.getLogger(className);
	try {
		logger.addAppender(
		    new FileAppender(new PatternLayout("%p - %C{1}.%M(%L) |%d{ISO8601}| %m%n"), inizLogFilePath, true));
		logger.setLevel((Level) Level.ALL);
	}
	catch (IOException e) {
		log.error("The custom logger could not be setup, defaulting on the usual logging mechanism.", e);
	}
	
	return log;
}
 
源代码4 项目: imhotep   文件: TestLocalImhotepServiceCore.java
@BeforeClass
public static void initLog4j() {
    BasicConfigurator.resetConfiguration();
    BasicConfigurator.configure();

    final Layout LAYOUT = new PatternLayout("[ %d{ISO8601} %-5p ] [%c{1}] %m%n");

    LevelRangeFilter ERROR_FILTER = new LevelRangeFilter();
    ERROR_FILTER.setLevelMin(Level.ERROR);
    ERROR_FILTER.setLevelMax(Level.FATAL);

    // everything including ERROR
    final Appender STDOUT = new ConsoleAppender(LAYOUT, ConsoleAppender.SYSTEM_OUT);

    // just things <= ERROR
    final Appender STDERR = new ConsoleAppender(LAYOUT, ConsoleAppender.SYSTEM_ERR);
    STDERR.addFilter(ERROR_FILTER);

    final Logger ROOT_LOGGER = Logger.getRootLogger();

    ROOT_LOGGER.removeAllAppenders();

    ROOT_LOGGER.setLevel(Level.WARN); // don't care about higher

    ROOT_LOGGER.addAppender(STDOUT);
    ROOT_LOGGER.addAppender(STDERR);
}
 
源代码5 项目: big-c   文件: TestFsck.java
/** Sets up log4j logger for auditlogs */
private void setupAuditLogs() throws IOException {
  File file = new File(auditLogFile);
  if (file.exists()) {
    file.delete();
  }
  Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger();
  logger.setLevel(Level.INFO);
  PatternLayout layout = new PatternLayout("%m%n");
  RollingFileAppender appender = new RollingFileAppender(layout, auditLogFile);
  logger.addAppender(appender);
}
 
源代码6 项目: Bats   文件: LoggerUtil.java
@Override
public Logger makeNewLoggerInstance(String name)
{
  Logger logger = new DefaultLogger(name);
  Level level = getLevelFor(name);
  if (level != null) {
    logger.setLevel(level);
  }
  return logger;
}
 
源代码7 项目: ignite   文件: GridAbstractTest.java
/**
 * Sets the log level for root logger ({@link #log}) to {@link Level#DEBUG}. The log level will be resetted to
 * default in {@link #afterTest()}.
 */
protected final void setRootLoggerDebugLevel() {
    Logger logger = Logger.getRootLogger();

    assertNull(logger + " level: " + Level.DEBUG, changedLevels.put(logger, logger.getLevel()));

    logger.setLevel(Level.DEBUG);
}
 
源代码8 项目: hadoop   文件: TestYarnClient.java
@Test(timeout = 30000)
public void testApplicationType() throws Exception {
  Logger rootLogger = LogManager.getRootLogger();
  rootLogger.setLevel(Level.DEBUG);
  MockRM rm = new MockRM();
  rm.start();
  RMApp app = rm.submitApp(2000);
  RMApp app1 =
      rm.submitApp(200, "name", "user",
        new HashMap<ApplicationAccessType, String>(), false, "default", -1,
        null, "MAPREDUCE");
  Assert.assertEquals("YARN", app.getApplicationType());
  Assert.assertEquals("MAPREDUCE", app1.getApplicationType());
  rm.stop();
}
 
源代码9 项目: unitime   文件: SolverServerImplementation.java
@Override
public void setLoggingLevel(String name, Integer level) {
	sLog.info("Set logging level for " + (name == null ? "root" : name) + " to " + (level == null ? "null" : Level.toLevel(level)));
	Logger logger = (name == null ? Logger.getRootLogger() : Logger.getLogger(name));
	if (level == null)
		logger.setLevel(null);
	else
		logger.setLevel(Level.toLevel(level));
}
 
源代码10 项目: hadoop   文件: TestAuditLogs.java
private void verifyAuditLogsRepeat(boolean expectSuccess, int ndupe)
    throws IOException {
  // Turn off the logs
  Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger();
  logger.setLevel(Level.OFF);

  // Close the appenders and force all logs to be flushed
  Enumeration<?> appenders = logger.getAllAppenders();
  while (appenders.hasMoreElements()) {
    Appender appender = (Appender)appenders.nextElement();
    appender.close();
  }

  BufferedReader reader = new BufferedReader(new FileReader(auditLogFile));
  String line = null;
  boolean ret = true;

  try {
    for (int i = 0; i < ndupe; i++) {
      line = reader.readLine();
      assertNotNull(line);
      assertTrue("Expected audit event not found in audit log",
          auditPattern.matcher(line).matches());
      ret &= successPattern.matcher(line).matches();
    }
    assertNull("Unexpected event in audit log", reader.readLine());
    assertTrue("Expected success=" + expectSuccess, ret == expectSuccess);
  } finally {
    reader.close();
  }
}
 
源代码11 项目: datawave   文件: MetadataTableConfigHelperTest.java
@Before
public void setup() {
    Level desiredLevel = Level.ALL;
    
    Logger log = Logger.getLogger(MetadataTableConfigHelperTest.class);
    MetadataTableConfigHelperTest.testDriverLevel = log.getLevel();
    log.setLevel(desiredLevel);
}
 
源代码12 项目: tinkerpop   文件: ContextTest.java
@Before
public void addRecordingAppender() {
    final Logger rootLogger = Logger.getRootLogger();
    rootLogger.addAppender(recordingAppender);
    originalLogLevel = rootLogger.getLevel();
    rootLogger.setLevel(Level.ALL);
}
 
源代码13 项目: ignite   文件: GridAbstractTest.java
/**
 * Called after execution of every test method in class or if {@link #beforeTest()} failed without test method
 * execution.
 * <p>
 * Do not annotate with {@link After} in overriding methods.</p>
 *
 * @throws Exception If failed.
 */
protected void afterTest() throws Exception {
    try {
        for (Logger logger : changedLevels.keySet())
            logger.setLevel(changedLevels.get(logger));
    }
    finally {
        changedLevels.clear();
    }
}
 
源代码14 项目: datawave   文件: AbstractTableConfigHelperTest.java
@BeforeClass
public static void adjustLogLevels() {
    
    Level desiredLevel = Level.ALL;
    
    Logger log = Logger.getLogger(AbstractTableConfigHelperTest.class);
    AbstractTableConfigHelperTest.testDriverLevel = log.getLevel();
    log.setLevel(desiredLevel);
}
 
public static void main(String[] args) {
	System.setProperty("hadoop.home.dir", "E:\\sumitK\\Hadoop");
	Logger rootLogger = LogManager.getRootLogger();
	rootLogger.setLevel(Level.WARN); 
	      SparkSession sparkSession = SparkSession
	      .builder()
	      .master("local")
		  .config("spark.sql.warehouse.dir","file:///E:/sumitK/Hadoop/warehouse")
	      .appName("JavaALSExample")
	      .getOrCreate();
	      
	   RDD<String> textFile = sparkSession.sparkContext().textFile("C:/Users/sumit.kumar/git/learning/src/main/resources/pep_json.json",2); 
	   
	   JavaRDD<PersonDetails> mapParser = textFile.toJavaRDD().map(v1 -> new ObjectMapper().readValue(v1, PersonDetails.class));
	   
	   mapParser.foreach(t -> System.out.println(t)); 
	  
	   Dataset<Row> anotherPeople = sparkSession.read().json(textFile);
	   
	   anotherPeople.printSchema();
	   anotherPeople.show();
	      
	      
	      Dataset<Row> json_rec = sparkSession.read().json("C:/Users/sumit.kumar/git/learning/src/main/resources/pep_json.json");
	      json_rec.printSchema();
	      
	      json_rec.show();
	      
	      StructType schema = new StructType( new StructField[] {
	    	            DataTypes.createStructField("cid", DataTypes.IntegerType, true),
	    	            DataTypes.createStructField("county", DataTypes.StringType, true),
	    	            DataTypes.createStructField("firstName", DataTypes.StringType, true),
	    	            DataTypes.createStructField("sex", DataTypes.StringType, true),
	    	            DataTypes.createStructField("year", DataTypes.StringType, true),
	    	            DataTypes.createStructField("dateOfBirth", DataTypes.TimestampType, true) });
	      
	    /*  StructType pep = new StructType(new StructField[] {
					new StructField("Count", DataTypes.StringType, true, Metadata.empty()),
					new StructField("County", DataTypes.StringType, true, Metadata.empty()),
					new StructField("First Name", DataTypes.StringType, true, Metadata.empty()),
					new StructField("Sex", DataTypes.StringType, true, Metadata.empty()),
					new StructField("Year", DataTypes.StringType, true, Metadata.empty()),
				    new StructField("timestamp", DataTypes.TimestampType, true, Metadata.empty()) });*/
	      
	     Dataset<Row> person_mod = sparkSession.read().schema(schema).json(textFile);
	     
	     person_mod.printSchema();
	     person_mod.show();
	     
	     person_mod.write().format("json").mode("overwrite").save("C:/Users/sumit.kumar/git/learning/src/main/resources/pep_out.json");

}
 
源代码16 项目: fluo   文件: LogIT.java
@Test
public void testGetMethods() {

  Column c1 = new Column("f1", "q1");
  Column c2 = new Column("f1", "q2");

  try (Transaction tx = client.newTransaction()) {
    tx.set("r1", c1, "v1");
    tx.set("r1", c2, "v2");
    tx.set("r2", c1, "v3");
    tx.set("r2", c2, "v4");
    tx.commit();
  }

  Logger logger = Logger.getLogger("fluo.tx");

  StringWriter writer = new StringWriter();
  WriterAppender appender =
      new WriterAppender(new PatternLayout("%d{ISO8601} [%-8c{2}] %-5p: %m%n"), writer);

  Level level = logger.getLevel();
  boolean additivity = logger.getAdditivity();

  try {
    logger.setLevel(Level.TRACE);
    logger.setAdditivity(false);
    logger.addAppender(appender);

    try (Snapshot snap = client.newSnapshot()) {
      Map<RowColumn, String> ret1 =
          snap.gets(Arrays.asList(new RowColumn("r1", c1), new RowColumn("r2", c2)));
      Assert.assertEquals(
          ImmutableMap.of(new RowColumn("r1", c1), "v1", new RowColumn("r2", c2), "v4"), ret1);
      Map<String, Map<Column, String>> ret2 =
          snap.gets(Arrays.asList("r1", "r2"), ImmutableSet.of(c1));
      Assert.assertEquals(
          ImmutableMap.of("r1", ImmutableMap.of(c1, "v1"), "r2", ImmutableMap.of(c1, "v3")),
          ret2);
      Map<Column, String> ret3 = snap.gets("r1", ImmutableSet.of(c1, c2));
      Assert.assertEquals(ImmutableMap.of(c1, "v1", c2, "v2"), ret3);
      Assert.assertEquals("v1", snap.gets("r1", c1));
    }

    miniFluo.waitForObservers();
  } finally {
    logger.removeAppender(appender);
    logger.setAdditivity(additivity);
    logger.setLevel(level);
  }

  String pattern = ".*txid: (\\d+) begin\\(\\) thread: \\d+";
  pattern += ".*txid: \\1 \\Qget([r1 f1 q1 , r2 f1 q2 ]) -> [r2 f1 q2 =v4, r1 f1 q1 =v1]\\E";
  pattern += ".*txid: \\1 \\Qget([r1, r2], [f1 q1 ]) -> [r1=[f1 q1 =v1], r2=[f1 q1 =v3]]\\E";
  pattern += ".*txid: \\1 \\Qget(r1, [f1 q1 , f1 q2 ]) -> [f1 q1 =v1, f1 q2 =v2]\\E";
  pattern += ".*txid: \\1 \\Qget(r1, f1 q1 ) -> v1\\E";
  pattern += ".*txid: \\1 close\\(\\).*";

  String origLogMsgs = writer.toString();
  String logMsgs = origLogMsgs.replace('\n', ' ');
  Assert.assertTrue(logMsgs.matches(pattern));
}
 
源代码17 项目: fluo   文件: LogIT.java
@Test
public void testCollisionLogging() throws Exception {
  Logger logger = Logger.getLogger("fluo.tx.collisions");

  StringWriter writer = new StringWriter();
  WriterAppender appender = new WriterAppender(new PatternLayout("%p, %m%n"), writer);

  Level level = logger.getLevel();
  boolean additivity = logger.getAdditivity();
  try {
    logger.setLevel(Level.TRACE);
    logger.setAdditivity(false);
    logger.addAppender(appender);

    try (LoaderExecutor le = client.newLoaderExecutor()) {
      for (int i = 0; i < 20; i++) {
        le.execute(new SimpleBinaryLoader());
        le.execute(new TriggerLoader(i));
      }
    }

    miniFluo.waitForObservers();
  } finally {
    logger.removeAppender(appender);
    logger.setAdditivity(additivity);
    logger.setLevel(level);
  }

  String logMsgs = writer.toString();
  logMsgs = logMsgs.replace('\n', ' ');

  Assert.assertFalse(logMsgs.contains("TriggerLoader"));

  String pattern;

  pattern = ".*txid: (\\d+) class: org.apache.fluo.integration.log.LogIT\\$SimpleBinaryLoader";
  pattern += ".*txid: \\1 collisions: \\Q[r1\\x0d=[a \\x00\\x09 ]]\\E.*";
  Assert.assertTrue(logMsgs.matches(pattern));

  pattern = ".*txid: (\\d+) trigger: \\d+ stat count  \\d+";
  pattern += ".*txid: \\1 class: org.apache.fluo.integration.log.LogIT\\$TestObserver";
  pattern += ".*txid: \\1 collisions: \\Q[all=[stat count ]]\\E.*";
  Assert.assertTrue(logMsgs.matches(pattern));
}
 
源代码18 项目: olca-app   文件: LoggerConfig.java
public static void setLevel(Level level) {
	Logger logger = Objects.equal(level, Level.ALL) ? Logger
			.getLogger("org.openlca") : Logger.getRootLogger();
	logger.setLevel(level);
	logger.info("Log-level=" + level);
}
 
源代码19 项目: siddhi   文件: CacheLFUTestCase.java
@Test(description = "cacheLFUTestCase8") // 2 primary keys & LFU & update or add func with update
public void cacheLFUTestCase8() throws InterruptedException, SQLException {
    final TestAppenderToValidateLogsForCachingTests appender = new TestAppenderToValidateLogsForCachingTests();
    final Logger logger = Logger.getRootLogger();
    logger.setLevel(Level.DEBUG);
    logger.addAppender(appender);
    SiddhiManager siddhiManager = new SiddhiManager();
    String streams = "" +
            "define stream StockStream (symbol string, price float, volume long); " +
            "define stream UpdateStockStream (symbol string, price float, volume long); " +
            "@Store(type=\"testStoreForCacheMiss\", @Cache(size=\"2\", cache.policy=\"LFU\"))\n" +
            "@PrimaryKey(\'symbol\', \'price\') " +
            "define table StockTable (symbol string, price float, volume long); ";
    String query = "" +
            "@info(name = 'query1') " +
            "from StockStream " +
            "insert into StockTable ;" +
            "" +
            "@info(name = 'query2') " +
            "from UpdateStockStream " +
            "update or insert into StockTable " +
            "   on (StockTable.symbol == symbol AND StockTable.price == price);";

    SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query);
    siddhiAppRuntime.addCallback("query2", new QueryCallback() {
        @Override
        public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) {
            EventPrinter.print(timestamp, inEvents, removeEvents);
            if (inEvents != null) {
                for (Event event : inEvents) {
                    inEventCount++;
                    switch (inEventCount) {
                        case 1:
                            Assert.assertEquals(event.getData(), new Object[]{"WSO2", 55.6f, 3L});
                            break;
                    }
                }
                eventArrived = true;
            }
        }

    });
    InputHandler stockStream = siddhiAppRuntime.getInputHandler("StockStream");
    InputHandler updateStockStream = siddhiAppRuntime.getInputHandler("UpdateStockStream");
    siddhiAppRuntime.start();

    stockStream.send(new Object[]{"WSO2", 55.6f, 1L});
    Thread.sleep(10);
    stockStream.send(new Object[]{"APPLE", 75.6f, 2L});
    Thread.sleep(10);
    updateStockStream.send(new Object[]{"WSO2", 55.6f, 3L});
    Thread.sleep(10);
    stockStream.send(new Object[]{"CISCO", 86.6f, 5L});

    Event[] events = siddhiAppRuntime.query("" +
            "from StockTable " +
            "on symbol == \"APPLE\" AND price == 75.6f ");
    EventPrinter.print(events);
    AssertJUnit.assertEquals(1, events.length);

    final List<LoggingEvent> log = appender.getLog();
    List<String> logMessages = new ArrayList<>();
    for (LoggingEvent logEvent : log) {
        String message = String.valueOf(logEvent.getMessage());
        if (message.contains(":")) {
            message = message.split(": ")[1];
        }
        logMessages.add(message);
    }
    Assert.assertEquals(logMessages.
            contains("store table size is smaller than max cache. Sending results from cache"), false);
    Assert.assertEquals(logMessages.contains("store table size is bigger than cache."), true);
    Assert.assertEquals(Collections.frequency(logMessages, "store table size is bigger than cache."), 1);
    Assert.assertEquals(logMessages.contains("cache constraints satisfied. Checking cache"), true);
    Assert.assertEquals(Collections.frequency(logMessages, "cache constraints satisfied. Checking cache"), 1);
    Assert.assertEquals(logMessages.contains("cache hit. Sending results from cache"), false);
    Assert.assertEquals(logMessages.contains("cache miss. Loading from store"), true);
    Assert.assertEquals(Collections.frequency(logMessages, "cache miss. Loading from store"), 1);
    Assert.assertEquals(logMessages.contains("store also miss. sending null"), false);
    Assert.assertEquals(logMessages.contains("sending results from cache after loading from store"), true);
    Assert.assertEquals(Collections.frequency(logMessages, "sending results from cache after loading from store"),
            1);
    Assert.assertEquals(logMessages.contains("sending results from store"), false);

    siddhiAppRuntime.shutdown();
}
 
源代码20 项目: uyuni   文件: RhnBaseTestCase.java
/**
 * Util for turning on the spew from the l10n service for
 * test cases that make calls with dummy string IDs.
 */
public static void enableLocalizationServiceLogging() {
    Logger log = Logger.getLogger(LocalizationService.class);
    log.setLevel(Level.ERROR);
}