Java Code Examples for org.apache.log4j.Logger#setLevel()

The following examples show how to use org.apache.log4j.Logger#setLevel() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: InitializerLogFactory.java    From openmrs-module-initializer with MIT License 6 votes vote down vote up
@SuppressWarnings("rawtypes")
public static Log getLog(Class className) {
	
	Log log = LogFactory.getLog(className);
	
	final Logger logger = Logger.getLogger(className);
	try {
		logger.addAppender(
		    new FileAppender(new PatternLayout("%p - %C{1}.%M(%L) |%d{ISO8601}| %m%n"), inizLogFilePath, true));
		logger.setLevel((Level) Level.ALL);
	}
	catch (IOException e) {
		log.error("The custom logger could not be setup, defaulting on the usual logging mechanism.", e);
	}
	
	return log;
}
 
Example 2
Source File: Hyperjaxb3Mojo.java    From hyperjaxb3 with BSD 2-Clause "Simplified" License 6 votes vote down vote up
/**
 * Sets up the verbose and debug mode depending on mvn logging level, and
 * sets up hyperjaxb logging.
 */
protected void setupLogging() {
	super.setupLogging();

	final Logger rootLogger = LogManager.getRootLogger();
	rootLogger.addAppender(new NullAppender());
	final Logger logger = LogManager.getLogger("org.jvnet.hyperjaxb3");

	final Log log = getLog();
	logger.addAppender(new Appender(getLog(), new PatternLayout(
			"%m%n        %c%n")));

	if (this.getDebug()) {
		log.debug("Logger level set to [debug].");
		logger.setLevel(Level.DEBUG);
	} else if (this.getVerbose())
		logger.setLevel(Level.INFO);
	else if (log.isWarnEnabled())
		logger.setLevel(Level.WARN);
	else
		logger.setLevel(Level.ERROR);
}
 
Example 3
Source File: TableAdmin.java    From incubator-retired-blur with Apache License 2.0 6 votes vote down vote up
@Override
public void logging(String classNameOrLoggerName, Level level) throws BlurException, TException {
  Logger logger;
  if (classNameOrLoggerName == null) {
    logger = LogManager.getRootLogger();
  } else {
    logger = LogManager.getLogger(classNameOrLoggerName);
  }

  if (logger == null) {
    throw new BException("Logger [{0}] not found.", classNameOrLoggerName);
  }
  org.apache.log4j.Level current = logger.getLevel();
  org.apache.log4j.Level newLevel = getLevel(level);
  LOG.info("Changing Logger [{0}] from logging level [{1}] to [{2}]", logger.getName(), current, newLevel);
  logger.setLevel(newLevel);
}
 
Example 4
Source File: TestFsck.java    From big-c with Apache License 2.0 5 votes vote down vote up
/** Sets up log4j logger for auditlogs */
private void setupAuditLogs() throws IOException {
  File file = new File(auditLogFile);
  if (file.exists()) {
    file.delete();
  }
  Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger();
  logger.setLevel(Level.INFO);
  PatternLayout layout = new PatternLayout("%m%n");
  RollingFileAppender appender = new RollingFileAppender(layout, auditLogFile);
  logger.addAppender(appender);
}
 
Example 5
Source File: AbstractTableConfigHelperTest.java    From datawave with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void adjustLogLevels() {
    
    Level desiredLevel = Level.ALL;
    
    Logger log = Logger.getLogger(AbstractTableConfigHelperTest.class);
    AbstractTableConfigHelperTest.testDriverLevel = log.getLevel();
    log.setLevel(desiredLevel);
}
 
Example 6
Source File: GridAbstractTest.java    From ignite with Apache License 2.0 5 votes vote down vote up
/**
 * Called after execution of every test method in class or if {@link #beforeTest()} failed without test method
 * execution.
 * <p>
 * Do not annotate with {@link After} in overriding methods.</p>
 *
 * @throws Exception If failed.
 */
protected void afterTest() throws Exception {
    try {
        for (Logger logger : changedLevels.keySet())
            logger.setLevel(changedLevels.get(logger));
    }
    finally {
        changedLevels.clear();
    }
}
 
Example 7
Source File: ContextTest.java    From tinkerpop with Apache License 2.0 5 votes vote down vote up
@Before
public void addRecordingAppender() {
    final Logger rootLogger = Logger.getRootLogger();
    rootLogger.addAppender(recordingAppender);
    originalLogLevel = rootLogger.getLevel();
    rootLogger.setLevel(Level.ALL);
}
 
Example 8
Source File: MetadataTableConfigHelperTest.java    From datawave with Apache License 2.0 5 votes vote down vote up
@Before
public void setup() {
    Level desiredLevel = Level.ALL;
    
    Logger log = Logger.getLogger(MetadataTableConfigHelperTest.class);
    MetadataTableConfigHelperTest.testDriverLevel = log.getLevel();
    log.setLevel(desiredLevel);
}
 
Example 9
Source File: TestAuditLogs.java    From hadoop with Apache License 2.0 5 votes vote down vote up
private void verifyAuditLogsRepeat(boolean expectSuccess, int ndupe)
    throws IOException {
  // Turn off the logs
  Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger();
  logger.setLevel(Level.OFF);

  // Close the appenders and force all logs to be flushed
  Enumeration<?> appenders = logger.getAllAppenders();
  while (appenders.hasMoreElements()) {
    Appender appender = (Appender)appenders.nextElement();
    appender.close();
  }

  BufferedReader reader = new BufferedReader(new FileReader(auditLogFile));
  String line = null;
  boolean ret = true;

  try {
    for (int i = 0; i < ndupe; i++) {
      line = reader.readLine();
      assertNotNull(line);
      assertTrue("Expected audit event not found in audit log",
          auditPattern.matcher(line).matches());
      ret &= successPattern.matcher(line).matches();
    }
    assertNull("Unexpected event in audit log", reader.readLine());
    assertTrue("Expected success=" + expectSuccess, ret == expectSuccess);
  } finally {
    reader.close();
  }
}
 
Example 10
Source File: SolverServerImplementation.java    From unitime with Apache License 2.0 5 votes vote down vote up
@Override
public void setLoggingLevel(String name, Integer level) {
	sLog.info("Set logging level for " + (name == null ? "root" : name) + " to " + (level == null ? "null" : Level.toLevel(level)));
	Logger logger = (name == null ? Logger.getRootLogger() : Logger.getLogger(name));
	if (level == null)
		logger.setLevel(null);
	else
		logger.setLevel(Level.toLevel(level));
}
 
Example 11
Source File: TestYarnClient.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@Test(timeout = 30000)
public void testApplicationType() throws Exception {
  Logger rootLogger = LogManager.getRootLogger();
  rootLogger.setLevel(Level.DEBUG);
  MockRM rm = new MockRM();
  rm.start();
  RMApp app = rm.submitApp(2000);
  RMApp app1 =
      rm.submitApp(200, "name", "user",
        new HashMap<ApplicationAccessType, String>(), false, "default", -1,
        null, "MAPREDUCE");
  Assert.assertEquals("YARN", app.getApplicationType());
  Assert.assertEquals("MAPREDUCE", app1.getApplicationType());
  rm.stop();
}
 
Example 12
Source File: GridAbstractTest.java    From ignite with Apache License 2.0 5 votes vote down vote up
/**
 * Sets the log level for root logger ({@link #log}) to {@link Level#DEBUG}. The log level will be resetted to
 * default in {@link #afterTest()}.
 */
protected final void setRootLoggerDebugLevel() {
    Logger logger = Logger.getRootLogger();

    assertNull(logger + " level: " + Level.DEBUG, changedLevels.put(logger, logger.getLevel()));

    logger.setLevel(Level.DEBUG);
}
 
Example 13
Source File: LoggerUtil.java    From Bats with Apache License 2.0 5 votes vote down vote up
@Override
public Logger makeNewLoggerInstance(String name)
{
  Logger logger = new DefaultLogger(name);
  Level level = getLevelFor(name);
  if (level != null) {
    logger.setLevel(level);
  }
  return logger;
}
 
Example 14
Source File: TestLocalImhotepServiceCore.java    From imhotep with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void initLog4j() {
    BasicConfigurator.resetConfiguration();
    BasicConfigurator.configure();

    final Layout LAYOUT = new PatternLayout("[ %d{ISO8601} %-5p ] [%c{1}] %m%n");

    LevelRangeFilter ERROR_FILTER = new LevelRangeFilter();
    ERROR_FILTER.setLevelMin(Level.ERROR);
    ERROR_FILTER.setLevelMax(Level.FATAL);

    // everything including ERROR
    final Appender STDOUT = new ConsoleAppender(LAYOUT, ConsoleAppender.SYSTEM_OUT);

    // just things <= ERROR
    final Appender STDERR = new ConsoleAppender(LAYOUT, ConsoleAppender.SYSTEM_ERR);
    STDERR.addFilter(ERROR_FILTER);

    final Logger ROOT_LOGGER = Logger.getRootLogger();

    ROOT_LOGGER.removeAllAppenders();

    ROOT_LOGGER.setLevel(Level.WARN); // don't care about higher

    ROOT_LOGGER.addAppender(STDOUT);
    ROOT_LOGGER.addAppender(STDERR);
}
 
Example 15
Source File: LoggerConfig.java    From olca-app with Mozilla Public License 2.0 4 votes vote down vote up
public static void setLevel(Level level) {
	Logger logger = Objects.equal(level, Level.ALL) ? Logger
			.getLogger("org.openlca") : Logger.getRootLogger();
	logger.setLevel(level);
	logger.info("Log-level=" + level);
}
 
Example 16
Source File: CacheLFUTestCase.java    From siddhi with Apache License 2.0 4 votes vote down vote up
@Test(description = "cacheLFUTestCase8") // 2 primary keys & LFU & update or add func with update
public void cacheLFUTestCase8() throws InterruptedException, SQLException {
    final TestAppenderToValidateLogsForCachingTests appender = new TestAppenderToValidateLogsForCachingTests();
    final Logger logger = Logger.getRootLogger();
    logger.setLevel(Level.DEBUG);
    logger.addAppender(appender);
    SiddhiManager siddhiManager = new SiddhiManager();
    String streams = "" +
            "define stream StockStream (symbol string, price float, volume long); " +
            "define stream UpdateStockStream (symbol string, price float, volume long); " +
            "@Store(type=\"testStoreForCacheMiss\", @Cache(size=\"2\", cache.policy=\"LFU\"))\n" +
            "@PrimaryKey(\'symbol\', \'price\') " +
            "define table StockTable (symbol string, price float, volume long); ";
    String query = "" +
            "@info(name = 'query1') " +
            "from StockStream " +
            "insert into StockTable ;" +
            "" +
            "@info(name = 'query2') " +
            "from UpdateStockStream " +
            "update or insert into StockTable " +
            "   on (StockTable.symbol == symbol AND StockTable.price == price);";

    SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query);
    siddhiAppRuntime.addCallback("query2", new QueryCallback() {
        @Override
        public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) {
            EventPrinter.print(timestamp, inEvents, removeEvents);
            if (inEvents != null) {
                for (Event event : inEvents) {
                    inEventCount++;
                    switch (inEventCount) {
                        case 1:
                            Assert.assertEquals(event.getData(), new Object[]{"WSO2", 55.6f, 3L});
                            break;
                    }
                }
                eventArrived = true;
            }
        }

    });
    InputHandler stockStream = siddhiAppRuntime.getInputHandler("StockStream");
    InputHandler updateStockStream = siddhiAppRuntime.getInputHandler("UpdateStockStream");
    siddhiAppRuntime.start();

    stockStream.send(new Object[]{"WSO2", 55.6f, 1L});
    Thread.sleep(10);
    stockStream.send(new Object[]{"APPLE", 75.6f, 2L});
    Thread.sleep(10);
    updateStockStream.send(new Object[]{"WSO2", 55.6f, 3L});
    Thread.sleep(10);
    stockStream.send(new Object[]{"CISCO", 86.6f, 5L});

    Event[] events = siddhiAppRuntime.query("" +
            "from StockTable " +
            "on symbol == \"APPLE\" AND price == 75.6f ");
    EventPrinter.print(events);
    AssertJUnit.assertEquals(1, events.length);

    final List<LoggingEvent> log = appender.getLog();
    List<String> logMessages = new ArrayList<>();
    for (LoggingEvent logEvent : log) {
        String message = String.valueOf(logEvent.getMessage());
        if (message.contains(":")) {
            message = message.split(": ")[1];
        }
        logMessages.add(message);
    }
    Assert.assertEquals(logMessages.
            contains("store table size is smaller than max cache. Sending results from cache"), false);
    Assert.assertEquals(logMessages.contains("store table size is bigger than cache."), true);
    Assert.assertEquals(Collections.frequency(logMessages, "store table size is bigger than cache."), 1);
    Assert.assertEquals(logMessages.contains("cache constraints satisfied. Checking cache"), true);
    Assert.assertEquals(Collections.frequency(logMessages, "cache constraints satisfied. Checking cache"), 1);
    Assert.assertEquals(logMessages.contains("cache hit. Sending results from cache"), false);
    Assert.assertEquals(logMessages.contains("cache miss. Loading from store"), true);
    Assert.assertEquals(Collections.frequency(logMessages, "cache miss. Loading from store"), 1);
    Assert.assertEquals(logMessages.contains("store also miss. sending null"), false);
    Assert.assertEquals(logMessages.contains("sending results from cache after loading from store"), true);
    Assert.assertEquals(Collections.frequency(logMessages, "sending results from cache after loading from store"),
            1);
    Assert.assertEquals(logMessages.contains("sending results from store"), false);

    siddhiAppRuntime.shutdown();
}
 
Example 17
Source File: RhnBaseTestCase.java    From uyuni with GNU General Public License v2.0 4 votes vote down vote up
/**
 * Util for turning on the spew from the l10n service for
 * test cases that make calls with dummy string IDs.
 */
public static void enableLocalizationServiceLogging() {
    Logger log = Logger.getLogger(LocalizationService.class);
    log.setLevel(Level.ERROR);
}
 
Example 18
Source File: LogIT.java    From fluo with Apache License 2.0 4 votes vote down vote up
@Test
public void testCollisionLogging() throws Exception {
  Logger logger = Logger.getLogger("fluo.tx.collisions");

  StringWriter writer = new StringWriter();
  WriterAppender appender = new WriterAppender(new PatternLayout("%p, %m%n"), writer);

  Level level = logger.getLevel();
  boolean additivity = logger.getAdditivity();
  try {
    logger.setLevel(Level.TRACE);
    logger.setAdditivity(false);
    logger.addAppender(appender);

    try (LoaderExecutor le = client.newLoaderExecutor()) {
      for (int i = 0; i < 20; i++) {
        le.execute(new SimpleBinaryLoader());
        le.execute(new TriggerLoader(i));
      }
    }

    miniFluo.waitForObservers();
  } finally {
    logger.removeAppender(appender);
    logger.setAdditivity(additivity);
    logger.setLevel(level);
  }

  String logMsgs = writer.toString();
  logMsgs = logMsgs.replace('\n', ' ');

  Assert.assertFalse(logMsgs.contains("TriggerLoader"));

  String pattern;

  pattern = ".*txid: (\\d+) class: org.apache.fluo.integration.log.LogIT\\$SimpleBinaryLoader";
  pattern += ".*txid: \\1 collisions: \\Q[r1\\x0d=[a \\x00\\x09 ]]\\E.*";
  Assert.assertTrue(logMsgs.matches(pattern));

  pattern = ".*txid: (\\d+) trigger: \\d+ stat count  \\d+";
  pattern += ".*txid: \\1 class: org.apache.fluo.integration.log.LogIT\\$TestObserver";
  pattern += ".*txid: \\1 collisions: \\Q[all=[stat count ]]\\E.*";
  Assert.assertTrue(logMsgs.matches(pattern));
}
 
Example 19
Source File: LogIT.java    From fluo with Apache License 2.0 4 votes vote down vote up
@Test
public void testGetMethods() {

  Column c1 = new Column("f1", "q1");
  Column c2 = new Column("f1", "q2");

  try (Transaction tx = client.newTransaction()) {
    tx.set("r1", c1, "v1");
    tx.set("r1", c2, "v2");
    tx.set("r2", c1, "v3");
    tx.set("r2", c2, "v4");
    tx.commit();
  }

  Logger logger = Logger.getLogger("fluo.tx");

  StringWriter writer = new StringWriter();
  WriterAppender appender =
      new WriterAppender(new PatternLayout("%d{ISO8601} [%-8c{2}] %-5p: %m%n"), writer);

  Level level = logger.getLevel();
  boolean additivity = logger.getAdditivity();

  try {
    logger.setLevel(Level.TRACE);
    logger.setAdditivity(false);
    logger.addAppender(appender);

    try (Snapshot snap = client.newSnapshot()) {
      Map<RowColumn, String> ret1 =
          snap.gets(Arrays.asList(new RowColumn("r1", c1), new RowColumn("r2", c2)));
      Assert.assertEquals(
          ImmutableMap.of(new RowColumn("r1", c1), "v1", new RowColumn("r2", c2), "v4"), ret1);
      Map<String, Map<Column, String>> ret2 =
          snap.gets(Arrays.asList("r1", "r2"), ImmutableSet.of(c1));
      Assert.assertEquals(
          ImmutableMap.of("r1", ImmutableMap.of(c1, "v1"), "r2", ImmutableMap.of(c1, "v3")),
          ret2);
      Map<Column, String> ret3 = snap.gets("r1", ImmutableSet.of(c1, c2));
      Assert.assertEquals(ImmutableMap.of(c1, "v1", c2, "v2"), ret3);
      Assert.assertEquals("v1", snap.gets("r1", c1));
    }

    miniFluo.waitForObservers();
  } finally {
    logger.removeAppender(appender);
    logger.setAdditivity(additivity);
    logger.setLevel(level);
  }

  String pattern = ".*txid: (\\d+) begin\\(\\) thread: \\d+";
  pattern += ".*txid: \\1 \\Qget([r1 f1 q1 , r2 f1 q2 ]) -> [r2 f1 q2 =v4, r1 f1 q1 =v1]\\E";
  pattern += ".*txid: \\1 \\Qget([r1, r2], [f1 q1 ]) -> [r1=[f1 q1 =v1], r2=[f1 q1 =v3]]\\E";
  pattern += ".*txid: \\1 \\Qget(r1, [f1 q1 , f1 q2 ]) -> [f1 q1 =v1, f1 q2 =v2]\\E";
  pattern += ".*txid: \\1 \\Qget(r1, f1 q1 ) -> v1\\E";
  pattern += ".*txid: \\1 close\\(\\).*";

  String origLogMsgs = writer.toString();
  String logMsgs = origLogMsgs.replace('\n', ' ');
  Assert.assertTrue(logMsgs.matches(pattern));
}
 
Example 20
Source File: JsonFileOperations.java    From Apache-Spark-2x-for-Java-Developers with MIT License 4 votes vote down vote up
public static void main(String[] args) {
	System.setProperty("hadoop.home.dir", "E:\\sumitK\\Hadoop");
	Logger rootLogger = LogManager.getRootLogger();
	rootLogger.setLevel(Level.WARN); 
	      SparkSession sparkSession = SparkSession
	      .builder()
	      .master("local")
		  .config("spark.sql.warehouse.dir","file:///E:/sumitK/Hadoop/warehouse")
	      .appName("JavaALSExample")
	      .getOrCreate();
	      
	   RDD<String> textFile = sparkSession.sparkContext().textFile("C:/Users/sumit.kumar/git/learning/src/main/resources/pep_json.json",2); 
	   
	   JavaRDD<PersonDetails> mapParser = textFile.toJavaRDD().map(v1 -> new ObjectMapper().readValue(v1, PersonDetails.class));
	   
	   mapParser.foreach(t -> System.out.println(t)); 
	  
	   Dataset<Row> anotherPeople = sparkSession.read().json(textFile);
	   
	   anotherPeople.printSchema();
	   anotherPeople.show();
	      
	      
	      Dataset<Row> json_rec = sparkSession.read().json("C:/Users/sumit.kumar/git/learning/src/main/resources/pep_json.json");
	      json_rec.printSchema();
	      
	      json_rec.show();
	      
	      StructType schema = new StructType( new StructField[] {
	    	            DataTypes.createStructField("cid", DataTypes.IntegerType, true),
	    	            DataTypes.createStructField("county", DataTypes.StringType, true),
	    	            DataTypes.createStructField("firstName", DataTypes.StringType, true),
	    	            DataTypes.createStructField("sex", DataTypes.StringType, true),
	    	            DataTypes.createStructField("year", DataTypes.StringType, true),
	    	            DataTypes.createStructField("dateOfBirth", DataTypes.TimestampType, true) });
	      
	    /*  StructType pep = new StructType(new StructField[] {
					new StructField("Count", DataTypes.StringType, true, Metadata.empty()),
					new StructField("County", DataTypes.StringType, true, Metadata.empty()),
					new StructField("First Name", DataTypes.StringType, true, Metadata.empty()),
					new StructField("Sex", DataTypes.StringType, true, Metadata.empty()),
					new StructField("Year", DataTypes.StringType, true, Metadata.empty()),
				    new StructField("timestamp", DataTypes.TimestampType, true, Metadata.empty()) });*/
	      
	     Dataset<Row> person_mod = sparkSession.read().schema(schema).json(textFile);
	     
	     person_mod.printSchema();
	     person_mod.show();
	     
	     person_mod.write().format("json").mode("overwrite").save("C:/Users/sumit.kumar/git/learning/src/main/resources/pep_out.json");

}