org.apache.log4j.WriterAppender Java Examples

The following examples show how to use org.apache.log4j.WriterAppender. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: LogStreamServlet.java    From hadoop-ozone with Apache License 2.0 6 votes vote down vote up
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
    throws IOException {

  WriterAppender appender =
      new WriterAppender(new PatternLayout(PATTERN), resp.getWriter());
  appender.setThreshold(Level.TRACE);

  try {
    Logger.getRootLogger().addAppender(appender);
    try {
      Thread.sleep(Integer.MAX_VALUE);
    } catch (InterruptedException e) {
      Thread.currentThread().interrupt();
    }
  } finally {
    Logger.getRootLogger().removeAppender(appender);
  }
}
 
Example #2
Source File: HeliumBundleFactory.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
private synchronized void configureLogger() {
  org.apache.log4j.Logger npmLogger = org.apache.log4j.Logger.getLogger(
      "com.github.eirslett.maven.plugins.frontend.lib.DefaultYarnRunner");
  Enumeration appenders = org.apache.log4j.Logger.getRootLogger().getAllAppenders();

  if (appenders != null) {
    while (appenders.hasMoreElements()) {
      Appender appender = (Appender) appenders.nextElement();
      appender.addFilter(new Filter() {

        @Override
        public int decide(LoggingEvent loggingEvent) {
          if (loggingEvent.getLoggerName().contains("DefaultYarnRunner")) {
            return DENY;
          } else {
            return NEUTRAL;
          }
        }
      });
    }
  }
  npmLogger.addAppender(new WriterAppender(
      new PatternLayout("%m%n"),
      out
  ));
}
 
Example #3
Source File: LogLevelTest.java    From azure-cosmosdb-java with MIT License 6 votes vote down vote up
/**
 * This test will try to create document with netty wire INFO logging and validate it.
 * 
 * @throws Exception
 */
@Test(groups = { "simple" }, timeOut = TIMEOUT)
public void createDocumentWithInfoLevel() throws Exception {
    LogManager.getRootLogger().setLevel(Level.INFO);
    LogManager.getLogger(NETWORK_LOGGING_CATEGORY).setLevel(Level.INFO);
    StringWriter consoleWriter = new StringWriter();
    WriterAppender appender = new WriterAppender(new PatternLayout(), consoleWriter);
    Logger.getLogger(NETWORK_LOGGING_CATEGORY).addAppender(appender);

    AsyncDocumentClient client = this.clientBuilder().build();
    try {
        Document docDefinition = getDocumentDefinition();
        Observable<ResourceResponse<Document>> createObservable = client
                .createDocument(getCollectionLink(), docDefinition, null, false);
        ResourceResponseValidator<Document> validator = new ResourceResponseValidator.Builder<Document>()
                .withId(docDefinition.getId())
                .build();
        validateSuccess(createObservable, validator);

        assertThat(consoleWriter.toString()).isEmpty();
    } finally {
        safeClose(client);
    }
}
 
Example #4
Source File: LogLevelTest.java    From azure-cosmosdb-java with MIT License 6 votes vote down vote up
/**
 * This test will try to create document with netty wire ERROR logging and validate it.
 * 
 * @throws Exception
 */
@Test(groups = { "simple" }, timeOut = TIMEOUT)
public void createDocumentWithErrorClient() throws Exception {
    LogManager.getRootLogger().setLevel(Level.INFO);
    LogManager.getLogger(NETWORK_LOGGING_CATEGORY).setLevel(Level.ERROR);
    StringWriter consoleWriter = new StringWriter();
    WriterAppender appender = new WriterAppender(new PatternLayout(), consoleWriter);
    Logger.getLogger(NETWORK_LOGGING_CATEGORY).addAppender(appender);

    AsyncDocumentClient client = this.clientBuilder().build();
    try {
        Document docDefinition = getDocumentDefinition();
        Observable<ResourceResponse<Document>> createObservable = client
                .createDocument(getCollectionLink(), docDefinition, null, false);
        ResourceResponseValidator<Document> validator = new ResourceResponseValidator.Builder<Document>()
                .withId(docDefinition.getId())
                .build();
        validateSuccess(createObservable, validator);

        assertThat(consoleWriter.toString()).isEmpty();
    } finally {
        safeClose(client);
    }
}
 
Example #5
Source File: LogLevelTest.java    From azure-cosmosdb-java with MIT License 6 votes vote down vote up
@Test(groups = { "simple" }, timeOut = TIMEOUT)
public void createDocumentWithDebugLevelAtRoot() throws Exception {
    LogManager.getRootLogger().setLevel(Level.INFO);
    LogManager.getLogger(COSMOS_DB_LOGGING_CATEGORY).setLevel(Level.DEBUG);
    StringWriter consoleWriter = new StringWriter();
    WriterAppender appender = new WriterAppender(new PatternLayout(), consoleWriter);
    Logger.getLogger(NETWORK_LOGGING_CATEGORY).addAppender(appender);

    AsyncDocumentClient client = this.clientBuilder().build();
    try {
        Document docDefinition = getDocumentDefinition();
        Observable<ResourceResponse<Document>> createObservable = client
                .createDocument(getCollectionLink(), docDefinition, null, false);
        ResourceResponseValidator<Document> validator = new ResourceResponseValidator.Builder<Document>()
                .withId(docDefinition.getId())
                .build();
        validateSuccess(createObservable, validator);

        assertThat(consoleWriter.toString()).isEmpty();
    } finally {
        safeClose(client);
    }
}
 
Example #6
Source File: LogLevelTest.java    From azure-cosmosdb-java with MIT License 6 votes vote down vote up
@Test(groups = { "simple" }, timeOut = TIMEOUT)
public void createDocumentWithTraceLevelAtRoot() throws Exception {
    LogManager.getRootLogger().setLevel(Level.INFO);
    LogManager.getLogger(COSMOS_DB_LOGGING_CATEGORY).setLevel(Level.TRACE);
    StringWriter consoleWriter = new StringWriter();
    WriterAppender appender = new WriterAppender(new PatternLayout(), consoleWriter);
    Logger.getLogger(NETWORK_LOGGING_CATEGORY).addAppender(appender);

    AsyncDocumentClient client = this.clientBuilder().build();
    try {
        Document docDefinition = getDocumentDefinition();
        Observable<ResourceResponse<Document>> createObservable = client
                .createDocument(getCollectionLink(), docDefinition, null, false);
        ResourceResponseValidator<Document> validator = new ResourceResponseValidator.Builder<Document>()
                .withId(docDefinition.getId())
                .build();
        validateSuccess(createObservable, validator);

        assertThat(consoleWriter.toString()).contains(LOG_PATTERN_1);
        assertThat(consoleWriter.toString()).contains(LOG_PATTERN_2);
        assertThat(consoleWriter.toString()).contains(LOG_PATTERN_3);
        assertThat(consoleWriter.toString()).contains(LOG_PATTERN_4);
    } finally {
        safeClose(client);
    }
}
 
Example #7
Source File: LogLevelTest.java    From azure-cosmosdb-java with MIT License 6 votes vote down vote up
/**
 * This test will try to create document with netty wire WARN logging and validate it.
 * 
 * @throws Exception
 */
@Test(groups = { "simple" }, timeOut = TIMEOUT)
public void createDocumentWithWarningLevel() throws Exception {
    LogManager.getRootLogger().setLevel(Level.INFO);
    LogManager.getLogger(NETWORK_LOGGING_CATEGORY).setLevel(Level.WARN);
    StringWriter consoleWriter = new StringWriter();
    WriterAppender appender = new WriterAppender(new PatternLayout(), consoleWriter);
    Logger.getLogger(NETWORK_LOGGING_CATEGORY).addAppender(appender);

    AsyncDocumentClient client = this.clientBuilder().build();
    try {
        Document docDefinition = getDocumentDefinition();
        Observable<ResourceResponse<Document>> createObservable = client
                .createDocument(getCollectionLink(), docDefinition, null, false);
        ResourceResponseValidator<Document> validator = new ResourceResponseValidator.Builder<Document>()
                .withId(docDefinition.getId())
                .build();
        validateSuccess(createObservable, validator);

        assertThat(consoleWriter.toString()).isEmpty();
    } finally {
        safeClose(client);
    }
}
 
Example #8
Source File: LogLevelTest.java    From azure-cosmosdb-java with MIT License 6 votes vote down vote up
/**
 * This test will try to create document with netty wire DEBUG logging and validate it.
 * 
 * @throws Exception
 */
@Test(groups = { "simple" }, timeOut = TIMEOUT)
public void createDocumentWithDebugLevel() throws Exception {
    LogManager.getLogger(NETWORK_LOGGING_CATEGORY).setLevel(Level.DEBUG);
    StringWriter consoleWriter = new StringWriter();
    WriterAppender appender = new WriterAppender(new PatternLayout(), consoleWriter);
    LogManager.getLogger(NETWORK_LOGGING_CATEGORY).addAppender(appender);

    AsyncDocumentClient client = this.clientBuilder().build();
    try {
        Document docDefinition = getDocumentDefinition();
        Observable<ResourceResponse<Document>> createObservable = client
                .createDocument(getCollectionLink(), docDefinition, null, false);
        ResourceResponseValidator<Document> validator = new ResourceResponseValidator.Builder<Document>()
                .withId(docDefinition.getId())
                .build();
        validateSuccess(createObservable, validator);

        assertThat(consoleWriter.toString()).isEmpty();

    } finally {
        safeClose(client);
    }
}
 
Example #9
Source File: HtmlLogFile.java    From olca-app with Mozilla Public License 2.0 5 votes vote down vote up
private static WriterAppender createAppender(File logFile)
		throws IOException {
	HTMLLayout layout = new HTMLLayout();
	RollingFileAppender app = new RollingFileAppender(layout,
			logFile.getAbsolutePath(), true);
	app.setMaxFileSize("3MB");
	app.setMaxBackupIndex(3);
	return app;
}
 
Example #10
Source File: GenericTestUtils.java    From hadoop-ozone with Apache License 2.0 5 votes vote down vote up
private LogCapturer(Logger logger) {
  this.logger = logger;
  Appender defaultAppender = Logger.getRootLogger().getAppender("stdout");
  if (defaultAppender == null) {
    defaultAppender = Logger.getRootLogger().getAppender("console");
  }
  final Layout layout = (defaultAppender == null) ? new PatternLayout() :
      defaultAppender.getLayout();
  this.appender = new WriterAppender(layout, sw);
  logger.addAppender(this.appender);
}
 
Example #11
Source File: HtmlLogFile.java    From olca-app with Mozilla Public License 2.0 5 votes vote down vote up
static void create(Logger logger) {
	try {
		File logFile = createLogFile();
		WriterAppender appender = createAppender(logFile);
		logger.addAppender(appender);
	} catch (Exception e) {
		logger.log(Level.ERROR, e.getMessage(), e);
	}
}
 
Example #12
Source File: WriterAppenderManager.java    From pentaho-hadoop-shims with Apache License 2.0 5 votes vote down vote up
public WriterAppenderManager( LogChannelInterface logChannelInterface, LogLevel logLevel, String name,
                              LogWriter logWriter ) {
  // Set up an appender that will send all pig log messages to Kettle's log
  // via logBasic().
  KettleLoggingPrintWriter klps = new KettleLoggingPrintWriter( logChannelInterface );
  pigToKettleAppender = new WriterAppender( new Log4jKettleLayout( true ), klps );

  Logger pigLogger = Logger.getLogger( "org.apache.pig" );
  Level log4jLevel = getLog4jLevel( logLevel );
  pigLogger.setLevel( log4jLevel );
  String logFileName = "pdi-" + name; //$NON-NLS-1$
  Log4jFileAppender appender = null;
  this.logWriter = logWriter;
  try {
    appender = LogWriter.createFileAppender( logFileName, true, false );
    logWriter.addAppender( appender );
    logChannelInterface.setLogLevel( logLevel );
    if ( pigLogger != null ) {
      pigLogger.addAppender( pigToKettleAppender );
    }
  } catch ( Exception e ) {
    logChannelInterface.logError( BaseMessages
      .getString( PKG, "JobEntryPigScriptExecutor.FailedToOpenLogFile", logFileName, e.toString() ) ); //$NON-NLS-1$
    logChannelInterface.logError( Const.getStackTracker( e ) );
  }
  this.appender = appender;
}
 
Example #13
Source File: TestRegionServerReportForDuty.java    From hbase with Apache License 2.0 5 votes vote down vote up
LogCapturer(org.apache.log4j.Logger logger) {
  this.logger = logger;
  Appender defaultAppender = org.apache.log4j.Logger.getRootLogger().getAppender("stdout");
  if (defaultAppender == null) {
    defaultAppender = org.apache.log4j.Logger.getRootLogger().getAppender("console");
  }
  final Layout layout = (defaultAppender == null) ? new PatternLayout() :
      defaultAppender.getLayout();
  this.appender = new WriterAppender(layout, sw);
  this.logger.addAppender(this.appender);
}
 
Example #14
Source File: FbLog4jTest.java    From takes with MIT License 5 votes vote down vote up
/**
 * Helper method to set up stream.
 * @return ByteArrayOutputStream for logging
 */
private ByteArrayOutputStream setUpLoggerStream() {
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    final WriterAppender appender = new WriterAppender(
        new SimpleLayout(),
        baos
    );
    appender.setThreshold(Level.ERROR);
    appender.activateOptions();
    Logger.getRootLogger().addAppender(appender);
    return baos;
}
 
Example #15
Source File: PurgeUtilityLog.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private void init() {
  logName = "PurgeUtilityLog." + getThreadName();
  logger = Logger.getLogger( logName );
  logger.setLevel( logLevel );
  IPurgeUtilityLayout layout;
  if ( layoutClass == PurgeUtilityHTMLLayout.class ) {
    layout = new PurgeUtilityHTMLLayout( logLevel );
  } else {
    layout = new PurgeUtilityTextLayout( logLevel );
  }
  layout.setTitle( "Purge Utility Log" );
  writeAppender =
      new WriterAppender( (Layout) layout, new OutputStreamWriter( outputStream, Charset.forName( "utf-8" ) ) );
  logger.addAppender( writeAppender );
}
 
Example #16
Source File: TestLog4Json.java    From big-c with Apache License 2.0 5 votes vote down vote up
public Logger createLogger(Writer writer) {
  TestLoggerRepository repo = new TestLoggerRepository();
  Logger logger = repo.getLogger("test");
  Log4Json layout = new Log4Json();
  WriterAppender appender = new WriterAppender(layout, writer);
  logger.addAppender(appender);
  return logger;
}
 
Example #17
Source File: TestYARNRunner.java    From big-c with Apache License 2.0 5 votes vote down vote up
@Test(timeout=20000)
public void testWarnCommandOpts() throws Exception {
  Logger logger = Logger.getLogger(YARNRunner.class);
  
  ByteArrayOutputStream bout = new ByteArrayOutputStream();
  Layout layout = new SimpleLayout();
  Appender appender = new WriterAppender(layout, bout);
  logger.addAppender(appender);
  
  JobConf jobConf = new JobConf();
  
  jobConf.set(MRJobConfig.MR_AM_ADMIN_COMMAND_OPTS, "-Djava.net.preferIPv4Stack=true -Djava.library.path=foo");
  jobConf.set(MRJobConfig.MR_AM_COMMAND_OPTS, "-Xmx1024m -Djava.library.path=bar");
  
  YARNRunner yarnRunner = new YARNRunner(jobConf);
  
  @SuppressWarnings("unused")
  ApplicationSubmissionContext submissionContext =
      buildSubmitContext(yarnRunner, jobConf);
 
  String logMsg = bout.toString();
  assertTrue(logMsg.contains("WARN - Usage of -Djava.library.path in " + 
  		"yarn.app.mapreduce.am.admin-command-opts can cause programs to no " +
      "longer function if hadoop native libraries are used. These values " + 
  		"should be set as part of the LD_LIBRARY_PATH in the app master JVM " +
      "env using yarn.app.mapreduce.am.admin.user.env config settings."));
  assertTrue(logMsg.contains("WARN - Usage of -Djava.library.path in " + 
      "yarn.app.mapreduce.am.command-opts can cause programs to no longer " +
      "function if hadoop native libraries are used. These values should " +
      "be set as part of the LD_LIBRARY_PATH in the app master JVM env " +
      "using yarn.app.mapreduce.am.env config settings."));
}
 
Example #18
Source File: ListAppender.java    From ambari-logsearch with Apache License 2.0 5 votes vote down vote up
@Override
protected void append(LoggingEvent event) {
  StringWriter stringWriter = new StringWriter();
  WriterAppender writerAppender = new WriterAppender(layout, stringWriter);
  writerAppender.append(event);
  logList.add(stringWriter.toString());
}
 
Example #19
Source File: LogLevelTest.java    From azure-cosmosdb-java with MIT License 5 votes vote down vote up
/**
 * This test will try to create document with netty wire TRACE logging and validate it.
 * 
 * @throws Exception
 */
@Test(groups = { "simple" }, timeOut = TIMEOUT)
public void createDocumentWithTraceLevel() throws Exception {
    LogManager.getRootLogger().setLevel(Level.INFO);
    LogManager.getLogger(NETWORK_LOGGING_CATEGORY).setLevel(Level.TRACE);
    StringWriter consoleWriter = new StringWriter();
    WriterAppender appender = new WriterAppender(new PatternLayout(), consoleWriter);
    Logger.getLogger(NETWORK_LOGGING_CATEGORY).addAppender(appender);

    AsyncDocumentClient client = this.clientBuilder().build();
    try {
        Document docDefinition = getDocumentDefinition();
        Observable<ResourceResponse<Document>> createObservable = client
                .createDocument(getCollectionLink(), docDefinition, null, false);
        ResourceResponseValidator<Document> validator = new ResourceResponseValidator.Builder<Document>()
                .withId(docDefinition.getId())
                .build();
        validateSuccess(createObservable, validator);

        assertThat(consoleWriter.toString()).contains(LOG_PATTERN_1);
        assertThat(consoleWriter.toString()).contains(LOG_PATTERN_2);
        assertThat(consoleWriter.toString()).contains(LOG_PATTERN_3);
        assertThat(consoleWriter.toString()).contains(LOG_PATTERN_4);

    } finally {
        safeClose(client);
    }
}
 
Example #20
Source File: ProxyHostTest.java    From azure-cosmosdb-java with MIT License 5 votes vote down vote up
/**
 * This test will try to create document via http proxy server with netty wire logging and validate it.
 *
 * @throws Exception
 */
@Test(groups = { "simple" }, timeOut = TIMEOUT)
public void createDocumentWithValidHttpProxyWithNettyWireLogging() throws Exception {
    LogManager.getRootLogger().setLevel(Level.INFO);
    LogManager.getLogger(LogLevelTest.NETWORK_LOGGING_CATEGORY).setLevel(Level.TRACE);
    AsyncDocumentClient clientWithRightProxy = null;
    try {
        StringWriter consoleWriter = new StringWriter();
        WriterAppender appender = new WriterAppender(new PatternLayout(), consoleWriter);
        Logger.getLogger(LogLevelTest.NETWORK_LOGGING_CATEGORY).addAppender(appender);

        ConnectionPolicy connectionPolicy =new ConnectionPolicy();
        connectionPolicy.setProxy(PROXY_HOST, PROXY_PORT);
        clientWithRightProxy = new Builder().withServiceEndpoint(TestConfigurations.HOST)
                .withMasterKeyOrResourceToken(TestConfigurations.MASTER_KEY)
                .withConnectionPolicy(connectionPolicy)
                .withConsistencyLevel(ConsistencyLevel.Session).build();
        Document docDefinition = getDocumentDefinition();
        Observable<ResourceResponse<Document>> createObservable = clientWithRightProxy
                .createDocument(getCollectionLink(), docDefinition, null, false);
        ResourceResponseValidator<Document> validator = new ResourceResponseValidator.Builder<Document>()
                .withId(docDefinition.getId())
                .build();
        validateSuccess(createObservable, validator);

        assertThat(consoleWriter.toString()).contains(LogLevelTest.LOG_PATTERN_1);
        assertThat(consoleWriter.toString()).contains(LogLevelTest.LOG_PATTERN_2);
        assertThat(consoleWriter.toString()).contains(LogLevelTest.LOG_PATTERN_3);
    } finally {
        safeClose(clientWithRightProxy);
    }
}
 
Example #21
Source File: TestLog4Json.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public Logger createLogger(Writer writer) {
  TestLoggerRepository repo = new TestLoggerRepository();
  Logger logger = repo.getLogger("test");
  Log4Json layout = new Log4Json();
  WriterAppender appender = new WriterAppender(layout, writer);
  logger.addAppender(appender);
  return logger;
}
 
Example #22
Source File: TestYARNRunner.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@Test(timeout=20000)
public void testWarnCommandOpts() throws Exception {
  Logger logger = Logger.getLogger(YARNRunner.class);
  
  ByteArrayOutputStream bout = new ByteArrayOutputStream();
  Layout layout = new SimpleLayout();
  Appender appender = new WriterAppender(layout, bout);
  logger.addAppender(appender);
  
  JobConf jobConf = new JobConf();
  
  jobConf.set(MRJobConfig.MR_AM_ADMIN_COMMAND_OPTS, "-Djava.net.preferIPv4Stack=true -Djava.library.path=foo");
  jobConf.set(MRJobConfig.MR_AM_COMMAND_OPTS, "-Xmx1024m -Djava.library.path=bar");
  
  YARNRunner yarnRunner = new YARNRunner(jobConf);
  
  @SuppressWarnings("unused")
  ApplicationSubmissionContext submissionContext =
      buildSubmitContext(yarnRunner, jobConf);
 
  String logMsg = bout.toString();
  assertTrue(logMsg.contains("WARN - Usage of -Djava.library.path in " + 
  		"yarn.app.mapreduce.am.admin-command-opts can cause programs to no " +
      "longer function if hadoop native libraries are used. These values " + 
  		"should be set as part of the LD_LIBRARY_PATH in the app master JVM " +
      "env using yarn.app.mapreduce.am.admin.user.env config settings."));
  assertTrue(logMsg.contains("WARN - Usage of -Djava.library.path in " + 
      "yarn.app.mapreduce.am.command-opts can cause programs to no longer " +
      "function if hadoop native libraries are used. These values should " +
      "be set as part of the LD_LIBRARY_PATH in the app master JVM env " +
      "using yarn.app.mapreduce.am.env config settings."));
}
 
Example #23
Source File: Util.java    From spork with Apache License 2.0 4 votes vote down vote up
public static void createLogAppender(Class clazz, String appenderName, Writer writer) {
    Logger logger = Logger.getLogger(clazz);
    WriterAppender writerAppender = new WriterAppender(new PatternLayout("%d [%t] %-5p %c %x - %m%n"), writer);
    writerAppender.setName(appenderName);
    logger.addAppender(writerAppender);
}
 
Example #24
Source File: LogIT.java    From fluo with Apache License 2.0 4 votes vote down vote up
@Test
public void testReadLocks() {
  Column c1 = new Column("f1", "q1");
  Column c2 = new Column("f1", "q2");

  try (Transaction tx = client.newTransaction()) {
    tx.set("r1", c1, "v1");
    tx.set("r1", c2, "v2");
    tx.set("r2", c1, "v3");
    tx.set("r2", c2, "v4");
    tx.commit();
  }

  Logger logger = Logger.getLogger("fluo.tx");

  StringWriter writer = new StringWriter();
  WriterAppender appender =
      new WriterAppender(new PatternLayout("%d{ISO8601} [%-8c{2}] %-5p: %m%n"), writer);

  Level level = logger.getLevel();
  boolean additivity = logger.getAdditivity();

  try {
    logger.setLevel(Level.TRACE);
    logger.setAdditivity(false);
    logger.addAppender(appender);

    try (Transaction tx = client.newTransaction()) {
      Assert.assertEquals("v1", tx.withReadLock().gets("r1", c1));
      Assert.assertEquals(ImmutableMap.of(c1, "v3", c2, "v4"),
          tx.withReadLock().gets("r2", c1, c2));
      Assert.assertEquals(ImmutableMap.of(new RowColumn("r1", c2), "v2"),
          tx.withReadLock().gets(Arrays.asList(new RowColumn("r1", c2))));
      Map<String, Map<Column, String>> expected = new HashMap<>();
      expected.computeIfAbsent("r1", k -> new HashMap<>()).put(c1, "v1");
      expected.computeIfAbsent("r2", k -> new HashMap<>()).put(c1, "v3");
      Map<String, Map<Column, String>> actual =
          tx.withReadLock().gets(Arrays.asList("r1", "r2"), ImmutableSet.of(c1));
      Assert.assertEquals(expected, actual);
      tx.set("r3", c1, "345");
      tx.commit();
    }

  } finally {
    logger.removeAppender(appender);
    logger.setAdditivity(additivity);
    logger.setLevel(level);
  }

  String pattern = "";
  pattern += ".*txid: (\\d+) begin\\(\\) thread: \\d+";
  pattern += ".*txid: \\1 \\QwithReadLock().get(r1, f1 q1 ) -> v1\\E";
  pattern +=
      ".*txid: \\1 \\QwithReadLock().get(r2, [f1 q1 , f1 q2 ]) -> [f1 q1 =v3, f1 q2 =v4]\\E";
  pattern += ".*txid: \\1 \\QwithReadLock().get([r1 f1 q2 ]) -> [r1 f1 q2 =v2]\\E";
  pattern += ".*txid: \\1 \\QwithReadLock().get([r1, r2], [f1 q1 ]) -> "
      + "[r1=[f1 q1 =v1], r2=[f1 q1 =v3]]\\E";
  pattern += ".*txid: \\1 \\Qset(r3, f1 q1 , 345)\\E";
  pattern += ".*txid: \\1 \\Qcommit()\\E -> SUCCESSFUL commitTs: \\d+";
  pattern += ".*txid: \\1 \\Qclose()\\E.*";

  String origLogMsgs = writer.toString();
  String logMsgs = origLogMsgs.replace('\n', ' ');
  Assert.assertTrue(origLogMsgs, logMsgs.matches(pattern));
}
 
Example #25
Source File: LogIT.java    From fluo with Apache License 2.0 4 votes vote down vote up
@Test
public void testBinaryLogging() throws Exception {
  Logger logger = Logger.getLogger("fluo.tx");

  StringWriter writer = new StringWriter();
  WriterAppender appender =
      new WriterAppender(new PatternLayout("%d{ISO8601} [%-8c{2}] %-5p: %m%n"), writer);

  Level level = logger.getLevel();
  boolean additivity = logger.getAdditivity();

  try {
    logger.setLevel(Level.TRACE);
    logger.setAdditivity(false);
    logger.addAppender(appender);

    try (LoaderExecutor le = client.newLoaderExecutor()) {
      le.execute(new BinaryLoader1());
    }

    miniFluo.waitForObservers();

    String pattern = ".*txid: (\\d+) begin\\(\\) thread: \\d+";
    pattern += ".*txid: \\1 class: org.apache.fluo.integration.log.LogIT\\$BinaryLoader1";
    pattern += ".*txid: \\1 \\Qdelete(r\\x051, c\\x021 c\\xf51 )\\E";
    pattern += ".*txid: \\1 \\Qget(r\\x062, c\\x021 c\\xf51 ) -> null\\E";
    pattern += ".*txid: \\1 \\Qget(r\\x062, [c\\x021 c\\xf51 , c\\x092 c\\xe52 ]) -> []\\E";
    pattern +=
        ".*txid: \\1 \\Qget([r\\x051, r\\x062], [c\\x021 c\\xf51 , c\\x092 c\\xe52 ]) -> []\\E";
    pattern += ".*txid: \\1 \\Qset(r\\x051, c\\x092 c\\xe52 , v\\x992)\\E";
    pattern += ".*txid: \\1 \\Qset(r\\x062, c\\x092 c\\xe52 , v\\xd91)\\E";
    pattern += ".*txid: \\1 \\QsetWeakNotification(r\\x062, c\\x092 c\\xe52 )\\E";
    pattern += ".*txid: \\1 \\Qcommit()\\E -> SUCCESSFUL commitTs: \\d+";
    pattern += ".*";

    String origLogMsgs = writer.toString();
    String logMsgs = origLogMsgs.replace('\n', ' ');
    Assert.assertTrue(origLogMsgs, logMsgs.matches(pattern));
    waitForClose(writer, pattern);

    final String v1 = "\\Qr\\x051=[c\\x092 c\\xe52 =v\\x992]\\E";
    final String v2 = "\\Qr\\x062=[c\\x092 c\\xe52 =v\\xd91]\\E";

    pattern = ".*txid: (\\d+) begin\\(\\) thread: \\d+";
    pattern += ".*txid: \\1 \\Qtrigger: r\\x062 c\\x092 c\\xe52  4\\E";
    pattern += ".*txid: \\1 \\Qclass: org.apache.fluo.integration.log.LogIT$BinaryObserver\\E";
    pattern += ".*txid: \\1 \\Qget(r\\x051, c\\x092 c\\xe52 ) -> v\\x992\\E";
    pattern += ".*txid: \\1 \\Qget(r\\x062, [c\\x021 c\\xf51 , c\\x092 c\\xe52 ]) -> "
        + "[c\\x092 c\\xe52 =v\\xd91]\\E";
    pattern += ".*txid: \\1 \\Qget([r\\x051, r\\x062], [c\\x021 c\\xf51 , c\\x092 c\\xe52 ]) -> "
        + "[\\E(" + v1 + "|" + v2 + ")\\, (" + v1 + "|" + v2 + ")\\]";
    pattern += ".*txid: \\1 \\Qcommit() -> SUCCESSFUL commitTs: -1\\E";
    pattern += ".*";
    Assert.assertTrue(origLogMsgs, logMsgs.matches(pattern));
    waitForClose(writer, pattern);

  } finally {
    logger.removeAppender(appender);
    logger.setAdditivity(additivity);
    logger.setLevel(level);
  }
}
 
Example #26
Source File: LogIT.java    From fluo with Apache License 2.0 4 votes vote down vote up
@Test
public void testGetMethods() {

  Column c1 = new Column("f1", "q1");
  Column c2 = new Column("f1", "q2");

  try (Transaction tx = client.newTransaction()) {
    tx.set("r1", c1, "v1");
    tx.set("r1", c2, "v2");
    tx.set("r2", c1, "v3");
    tx.set("r2", c2, "v4");
    tx.commit();
  }

  Logger logger = Logger.getLogger("fluo.tx");

  StringWriter writer = new StringWriter();
  WriterAppender appender =
      new WriterAppender(new PatternLayout("%d{ISO8601} [%-8c{2}] %-5p: %m%n"), writer);

  Level level = logger.getLevel();
  boolean additivity = logger.getAdditivity();

  try {
    logger.setLevel(Level.TRACE);
    logger.setAdditivity(false);
    logger.addAppender(appender);

    try (Snapshot snap = client.newSnapshot()) {
      Map<RowColumn, String> ret1 =
          snap.gets(Arrays.asList(new RowColumn("r1", c1), new RowColumn("r2", c2)));
      Assert.assertEquals(
          ImmutableMap.of(new RowColumn("r1", c1), "v1", new RowColumn("r2", c2), "v4"), ret1);
      Map<String, Map<Column, String>> ret2 =
          snap.gets(Arrays.asList("r1", "r2"), ImmutableSet.of(c1));
      Assert.assertEquals(
          ImmutableMap.of("r1", ImmutableMap.of(c1, "v1"), "r2", ImmutableMap.of(c1, "v3")),
          ret2);
      Map<Column, String> ret3 = snap.gets("r1", ImmutableSet.of(c1, c2));
      Assert.assertEquals(ImmutableMap.of(c1, "v1", c2, "v2"), ret3);
      Assert.assertEquals("v1", snap.gets("r1", c1));
    }

    miniFluo.waitForObservers();
  } finally {
    logger.removeAppender(appender);
    logger.setAdditivity(additivity);
    logger.setLevel(level);
  }

  String pattern = ".*txid: (\\d+) begin\\(\\) thread: \\d+";
  pattern += ".*txid: \\1 \\Qget([r1 f1 q1 , r2 f1 q2 ]) -> [r2 f1 q2 =v4, r1 f1 q1 =v1]\\E";
  pattern += ".*txid: \\1 \\Qget([r1, r2], [f1 q1 ]) -> [r1=[f1 q1 =v1], r2=[f1 q1 =v3]]\\E";
  pattern += ".*txid: \\1 \\Qget(r1, [f1 q1 , f1 q2 ]) -> [f1 q1 =v1, f1 q2 =v2]\\E";
  pattern += ".*txid: \\1 \\Qget(r1, f1 q1 ) -> v1\\E";
  pattern += ".*txid: \\1 close\\(\\).*";

  String origLogMsgs = writer.toString();
  String logMsgs = origLogMsgs.replace('\n', ' ');
  Assert.assertTrue(logMsgs.matches(pattern));
}
 
Example #27
Source File: LogIT.java    From fluo with Apache License 2.0 4 votes vote down vote up
@Test
public void testAllLogging() throws Exception {
  Logger logger = Logger.getLogger("fluo.tx");

  StringWriter writer = new StringWriter();
  WriterAppender appender =
      new WriterAppender(new PatternLayout("%d{ISO8601} [%-8c{2}] %-5p: %m%n"), writer);

  Level level = logger.getLevel();
  boolean additivity = logger.getAdditivity();

  try {
    logger.setLevel(Level.TRACE);
    logger.setAdditivity(false);
    logger.addAppender(appender);

    try (LoaderExecutor le = client.newLoaderExecutor()) {
      le.execute(new SimpleLoader());
    }

    try (LoaderExecutor le = client.newLoaderExecutor()) {
      le.execute(new TriggerLoader(0));
    }
    miniFluo.waitForObservers();

    try (Snapshot snap = client.newSnapshot()) {
      Assert.assertTrue(Integer.parseInt(snap.gets("all", STAT_COUNT)) >= 1);
      Assert.assertEquals("1", snap.gets("r1", new Column("a", "b")));
    }

    String logMsgs = writer.toString();
    logMsgs = logMsgs.replace('\n', ' ');

    String pattern;

    // simple loader should cause this pattern in logs
    pattern = ".*txid: (\\d+) begin\\(\\) thread: \\d+";
    pattern += ".*txid: \\1 class: org.apache.fluo.integration.log.LogIT\\$SimpleLoader";
    pattern += ".*txid: \\1 get\\(r1, a b \\) -> null";
    pattern += ".*txid: \\1 set\\(r1, a b , 1\\)";
    pattern += ".*txid: \\1 commit\\(\\) -> SUCCESSFUL commitTs: \\d+";
    pattern += ".*";
    Assert.assertTrue(logMsgs.matches(pattern));
    waitForClose(writer, pattern);

    // trigger loader should cause this pattern in logs
    pattern = ".*txid: (\\d+) begin\\(\\) thread: \\d+";
    pattern += ".*txid: \\1 class: org.apache.fluo.integration.log.LogIT\\$TriggerLoader";
    pattern += ".*txid: \\1 set\\(0, stat count , 1\\)";
    pattern += ".*txid: \\1 setWeakNotification\\(0, stat count \\)";
    pattern += ".*txid: \\1 commit\\(\\) -> SUCCESSFUL commitTs: \\d+";
    pattern += ".*";
    Assert.assertTrue(logMsgs.matches(pattern));
    waitForClose(writer, pattern);

    // observer should cause this pattern in logs
    pattern = ".*txid: (\\d+) begin\\(\\) thread: \\d+";
    pattern += ".*txid: \\1 trigger: 0 stat count  \\d+";
    pattern += ".*txid: \\1 class: org.apache.fluo.integration.log.LogIT\\$TestObserver";
    pattern += ".*txid: \\1 get\\(0, stat count \\) -> 1";
    pattern += ".*txid: \\1 get\\(all, stat count \\) -> null";
    pattern += ".*txid: \\1 set\\(all, stat count , 1\\)";
    pattern += ".*txid: \\1 commit\\(\\) -> SUCCESSFUL commitTs: \\d+";
    pattern += ".*";
    Assert.assertTrue(logMsgs.matches(pattern));
    waitForClose(writer, pattern);

    // two gets done by snapshot should cause this pattern
    pattern = ".*txid: (\\d+) begin\\(\\) thread: \\d+";
    pattern += ".*txid: \\1 get\\(all, stat count \\) -> 1";
    pattern += ".*txid: \\1 get\\(r1, a b \\) -> 1";
    pattern += ".*txid: \\1 close\\(\\).*";
    Assert.assertTrue(logMsgs.matches(pattern));
  } finally {
    logger.removeAppender(appender);
    logger.setAdditivity(additivity);
    logger.setLevel(level);
  }
}
 
Example #28
Source File: LogIT.java    From fluo with Apache License 2.0 4 votes vote down vote up
@Test
public void testSummaryLogging() throws Exception {
  Logger logger = Logger.getLogger("fluo.tx.summary");

  StringWriter writer = new StringWriter();
  WriterAppender appender = new WriterAppender(new PatternLayout("%p, %m%n"), writer);

  Level level = logger.getLevel();
  boolean additivity = logger.getAdditivity();

  try {
    logger.setLevel(Level.TRACE);
    logger.setAdditivity(false);
    logger.addAppender(appender);

    try (LoaderExecutor le = client.newLoaderExecutor()) {
      for (int i = 0; i < 20; i++) {
        le.execute(new SimpleLoader());
        le.execute(new TriggerLoader(i));
      }
    }

    miniFluo.waitForObservers();
  } finally {
    logger.removeAppender(appender);
    logger.setAdditivity(additivity);
    logger.setLevel(level);
  }

  String logMsgs = writer.toString();
  logMsgs = logMsgs.replace('\n', ' ');

  Assert.assertTrue(logMsgs.matches(".*txid: \\d+ thread : \\d+ "
      + "time: \\d+ \\(\\d+ \\d+\\) #ret: 0 #set: 1 #collisions: 0 waitTime: \\d+ "
      + "committed: true class: TriggerLoader.*"));
  Assert.assertTrue(logMsgs.matches(".*txid: \\d+ thread : \\d+ "
      + "time: \\d+ \\(\\d+ \\d+\\) #ret: 1 #set: 1 #collisions: 0 waitTime: \\d+ "
      + "committed: true class: SimpleLoader.*"));
  Assert.assertTrue(logMsgs.matches(".*txid: \\d+ thread : \\d+ "
      + "time: \\d+ \\(\\d+ \\d+\\) #ret: 1 #set: 1 #collisions: 1 waitTime: \\d+ "
      + "committed: false class: SimpleLoader.*"));
  Assert.assertTrue(logMsgs.matches(".*txid: \\d+ thread : \\d+ "
      + "time: \\d+ \\(\\d+ \\d+\\) #ret: 2 #set: 1 #collisions: 0 waitTime: \\d+ "
      + "committed: true class: TestObserver.*"));
  Assert.assertTrue(logMsgs.matches(".*txid: \\d+ thread : \\d+ "
      + "time: \\d+ \\(\\d+ \\d+\\) #ret: 2 #set: 1 #collisions: 1 waitTime: \\d+ "
      + "committed: false class: TestObserver.*"));
}
 
Example #29
Source File: LogIT.java    From fluo with Apache License 2.0 4 votes vote down vote up
@Test
public void testCollisionLogging() throws Exception {
  Logger logger = Logger.getLogger("fluo.tx.collisions");

  StringWriter writer = new StringWriter();
  WriterAppender appender = new WriterAppender(new PatternLayout("%p, %m%n"), writer);

  Level level = logger.getLevel();
  boolean additivity = logger.getAdditivity();
  try {
    logger.setLevel(Level.TRACE);
    logger.setAdditivity(false);
    logger.addAppender(appender);

    try (LoaderExecutor le = client.newLoaderExecutor()) {
      for (int i = 0; i < 20; i++) {
        le.execute(new SimpleBinaryLoader());
        le.execute(new TriggerLoader(i));
      }
    }

    miniFluo.waitForObservers();
  } finally {
    logger.removeAppender(appender);
    logger.setAdditivity(additivity);
    logger.setLevel(level);
  }

  String logMsgs = writer.toString();
  logMsgs = logMsgs.replace('\n', ' ');

  Assert.assertFalse(logMsgs.contains("TriggerLoader"));

  String pattern;

  pattern = ".*txid: (\\d+) class: org.apache.fluo.integration.log.LogIT\\$SimpleBinaryLoader";
  pattern += ".*txid: \\1 collisions: \\Q[r1\\x0d=[a \\x00\\x09 ]]\\E.*";
  Assert.assertTrue(logMsgs.matches(pattern));

  pattern = ".*txid: (\\d+) trigger: \\d+ stat count  \\d+";
  pattern += ".*txid: \\1 class: org.apache.fluo.integration.log.LogIT\\$TestObserver";
  pattern += ".*txid: \\1 collisions: \\Q[all=[stat count ]]\\E.*";
  Assert.assertTrue(logMsgs.matches(pattern));
}
 
Example #30
Source File: SQLJoinIT.java    From pentaho-metadata with GNU Lesser General Public License v2.1 4 votes vote down vote up
/**
 * Since the legacy join compare logic is non-deterministic, it is not safe to build any expected test results on the
 * generated SQL. The fallback is to validate that the legacy code path is traversed when the "legacy_join_order"
 * boolean set to true in the model. To do this, the test verifies that logging output is as expected.
 *
 * @throws PentahoMetadataException
 */
@Test
public void testLegacyJoinOrderLogic() throws PentahoMetadataException {
  Logger logger = Logger.getLogger( SQLJoin.class.getName() );
  ByteArrayOutputStream out = new ByteArrayOutputStream();
  Appender appender = new WriterAppender( new SimpleLayout(), out );
  logger.addAppender( appender );

  try {
    RelationshipType[] typesToTest = new RelationshipType[] { RelationshipType._0_N, RelationshipType._1_1 };
    for ( RelationshipType firstRel : typesToTest ) {
      for ( RelationshipType secondRel : typesToTest ) {
        final LogicalModel model = new LogicalModel();
        model.setId( "model_01" );
        Category mainCat = new Category();
        mainCat.setId( "cat_01" );
        model.getCategories().add( mainCat );

        LogicalTable[] tables = getTablesWithRelationships( firstRel, secondRel, mainCat, model );
        DatabaseMeta databaseMeta = new DatabaseMeta( "", "ORACLE", "Native", "", "", "", "", "" );
        Query myTest = new Query( null, model );
        myTest.getSelections().add( new Selection( null, tables[ 0 ].getLogicalColumns().get( 0 ), null ) );
        myTest.getSelections().add( new Selection( null, tables[ 1 ].getLogicalColumns().get( 0 ), null ) );
        myTest.getSelections().add( new Selection( null, tables[ 2 ].getLogicalColumns().get( 0 ), null ) );

        SqlGenerator generator = new SqlGenerator();

        // first verify the legacy logic is not used if the property is not set
        generator.generateSql( myTest, "en_US", null, databaseMeta );
        Assert.assertTrue( "Did not expect to use the legacy SQLJoin.compareTo() logic.", !out.toString().contains(
          "Using legacy SQLJoin compare." ) );

        // set the property and make sure the legacy logic is used
        model.setProperty( "legacy_join_order", true );
        generator.generateSql( myTest, "en_US", null, databaseMeta );
        Assert.assertTrue( "Should have used legacy SQLJoin.compareTo() logic.", out.toString().contains(
          "Using legacy SQLJoin compare." ) );
        out.reset(); // clear out accumulated logs for next run
      }
    }
  } finally {
    logger.removeAppender( appender );
  }

}