ch.qos.logback.classic.Level Scala Examples

The following examples show how to use ch.qos.logback.classic.Level. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: ControlInterface.scala    From changestream   with MIT License 5 votes vote down vote up
package changestream.actors

import akka.util.Timeout
import spray.httpx.SprayJsonSupport._
import spray.routing._
import akka.actor._
import ch.qos.logback.classic.Level
import changestream.{ChangeStream, ChangeStreamEventDeserializer, ChangeStreamEventListener}
import org.slf4j.LoggerFactory
import ch.qos.logback.classic.Logger
import spray.http.StatusCodes
import spray.routing.HttpService
import spray.json.DefaultJsonProtocol

import scala.concurrent.Await
import scala.concurrent.duration._
import scala.language.postfixOps

class ControlInterfaceActor extends Actor with ControlInterface {
  def actorRefFactory = context
  def receive = runRoute(controlRoutes)
}

trait ControlInterface extends HttpService with DefaultJsonProtocol {
  import ControlActor._

  protected val log = LoggerFactory.getLogger(getClass)

  // yes this is backward on purpose
  implicit val memoryInfoFormat = jsonFormat3(MemoryInfo)
  implicit val statusFormat = jsonFormat7(Status)
  implicit def executionContext = actorRefFactory.dispatcher
  implicit val timeout = Timeout(10 seconds)

  def controlRoutes: Route = {
    get {
      pathSingleSlash {
        detach() {
          complete(getStatus)
        }
      } ~
      path("status") {
        detach() {
          complete(getStatus)
        }
      } ~
      path("logs") {
        parameter('level) { level => setLogLevel(level) }
      }
    }
  }

  def setLogLevel(level: String) = {
    level.toLowerCase match {
      case "all" | "trace" | "debug" | "info" | "warn" | "error" | "off" =>
        val rootLogger = LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME).asInstanceOf[Logger]
        rootLogger.setLevel(Level.toLevel(level))
        complete("ChangeStream logging level has been set to {}.", level)
      case _ =>
        log.error("ControlActor received invalid log level {}.", level)
        complete(StatusCodes.BadRequest, s"Invalid log level: ${level}")
    }
  }

  def getStatus = {
    val storedPosition = Await.result(ChangeStreamEventListener.getStoredPosition, 60 seconds)

    Status(
      server = ChangeStream.serverName,
      clientId = ChangeStream.clientId,
      isConnected = ChangeStream.isConnected,
      binlogClientPosition = ChangeStreamEventListener.getCurrentPosition,
      lastStoredPosition = storedPosition.getOrElse(""),
      binlogClientSequenceNumber = ChangeStreamEventDeserializer.getCurrentSequenceNumber,
      memoryInfo = MemoryInfo(
        Runtime.getRuntime().totalMemory(),
        Runtime.getRuntime().maxMemory(),
        Runtime.getRuntime().freeMemory()
      )
    )
  }
}

object ControlActor {
  case class Status(
                     server: String,
                     clientId: Long,
                     isConnected: Boolean,
                     binlogClientPosition: String,
                     lastStoredPosition: String,
                     binlogClientSequenceNumber: Long,
                     memoryInfo: MemoryInfo
                   )

  case class MemoryInfo(heapSize: Long, maxHeap: Long, freeHeap: Long)
} 
Example 2
Source File: BackOfficeController.scala    From izanami   with Apache License 2.0 5 votes vote down vote up
package controllers

import ch.qos.logback.classic.{Level, LoggerContext}
import controllers.actions.SecuredAuthContext
import domains.user.User
import org.slf4j.LoggerFactory
import play.api.libs.json.{JsArray, Json}
import play.api.mvc.{AbstractController, ActionBuilder, AnyContent, ControllerComponents}

class BackOfficeController(AuthAction: ActionBuilder[SecuredAuthContext, AnyContent], cc: ControllerComponents)
    extends AbstractController(cc) {

  def changeLogLevel(name: String, newLevel: Option[String]) = AuthAction { ctx =>
    if (isAdmin(ctx)) {
      val loggerContext =
        LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
      val _logger = loggerContext.getLogger(name)
      val oldLevel =
        Option(_logger.getLevel).map(_.levelStr).getOrElse(Level.OFF.levelStr)
      _logger.setLevel(newLevel.map(v => Level.valueOf(v)).getOrElse(Level.ERROR))
      Ok(Json.obj("name" -> name, "oldLevel" -> oldLevel, "newLevel" -> _logger.getLevel.levelStr))
    } else {
      Unauthorized
    }
  }

  def getLogLevel(name: String) = AuthAction { ctx =>
    if (isAdmin(ctx)) {
      val loggerContext =
        LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
      val _logger = loggerContext.getLogger(name)
      Ok(Json.obj("name" -> name, "level" -> _logger.getLevel.levelStr))
    } else {
      Unauthorized
    }
  }

  def getAllLoggers() = AuthAction { ctx =>
    if (isAdmin(ctx)) {
      import scala.jdk.CollectionConverters._
      val loggerContext =
        LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
      val rawLoggers = loggerContext.getLoggerList.asScala.toIndexedSeq
      val loggers = JsArray(rawLoggers.map(logger => {
        val level: String =
          Option(logger.getLevel).map(_.levelStr).getOrElse("OFF")
        Json.obj("name" -> logger.getName, "level" -> level)
      }))
      Ok(loggers)
    } else {
      Unauthorized
    }
  }

  private def isAdmin(ctx: SecuredAuthContext[AnyContent]) =
    ctx.auth.exists {
      case u: User => u.admin
      case _       => false
    }

} 
Example 3
Source File: Logger.scala    From shapenet-viewer   with MIT License 5 votes vote down vote up
package edu.stanford.graphics.shapenet.util

import org.slf4j.LoggerFactory
import java.io.File

import org.slf4j.bridge.SLF4JBridgeHandler
import uk.org.lidalia.sysoutslf4j.context.SysOutOverSLF4J


                   additive: Boolean = false) = {
    import ch.qos.logback.classic.spi.ILoggingEvent
    import ch.qos.logback.classic.Level
    import ch.qos.logback.classic.LoggerContext
    import ch.qos.logback.classic.encoder.PatternLayoutEncoder
    import ch.qos.logback.core.FileAppender

    // Make sure log directory is created
    val file: File = new File(filename)
    val parent: File = file.getParentFile
    if (parent != null) parent.mkdirs

    val loggerContext = LoggerFactory.getILoggerFactory().asInstanceOf[LoggerContext]
    val logger = loggerContext.getLogger(loggerName)

    // Setup pattern
    val patternLayoutEncoder = new PatternLayoutEncoder()
    patternLayoutEncoder.setPattern(pattern)
    patternLayoutEncoder.setContext(loggerContext)
    patternLayoutEncoder.start()

    // Setup appender
    val fileAppender = new FileAppender[ILoggingEvent]()
    fileAppender.setFile(filename)
    fileAppender.setEncoder(patternLayoutEncoder)
    fileAppender.setContext(loggerContext)
    fileAppender.start()

    // Attach appender to logger
    logger.addAppender(fileAppender)
    //logger.setLevel(Level.DEBUG)
    logger.setAdditive(additive)

    fileAppender.getName
  }

  def detachAppender(appenderName: String, loggerName: String = org.slf4j.Logger.ROOT_LOGGER_NAME): Unit = {
    import ch.qos.logback.classic.LoggerContext

    val loggerContext = LoggerFactory.getILoggerFactory().asInstanceOf[LoggerContext]
    val logger = loggerContext.getLogger(loggerName)
    logger.detachAppender(appenderName)
  }

  def getLogger(clazz: Class[_]): org.slf4j.Logger = {
    LoggerFactory.getLogger(clazz)
  }

  def getLogger(name: String): org.slf4j.Logger = {
    LoggerFactory.getLogger(name)
  }
}

trait Loggable {
  lazy val logger = Logger.getLogger(this.getClass)

  def startTrack(name: String): Unit = {
    logger.debug("Starting " + name)
  }

  def endTrack(name: String): Unit = {
    logger.debug("Finished " + name)
  }
} 
Example 4
Source File: LogPublisherHub.scala    From vamp   with Apache License 2.0 5 votes vote down vote up
package io.vamp.common.akka

import akka.actor.{ ActorRef, ActorSystem }
import ch.qos.logback.classic.filter.ThresholdFilter
import ch.qos.logback.classic.spi.ILoggingEvent
import ch.qos.logback.classic.{ Level, LoggerContext, Logger ⇒ LogbackLogger }
import ch.qos.logback.core.AppenderBase
import io.vamp.common.Namespace
import org.slf4j.{ Logger, LoggerFactory }

import scala.collection.mutable

object LogPublisherHub {

  private val logger = LoggerFactory.getLogger(LogPublisherHub.getClass)

  private val context = LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
  private val rootLogger = context.getLogger(Logger.ROOT_LOGGER_NAME)

  private val sessions: mutable.Map[String, LogPublisher] = new mutable.HashMap()

  def subscribe(to: ActorRef, level: String, loggerName: Option[String], encoder: (ILoggingEvent) ⇒ AnyRef)(implicit actorSystem: ActorSystem, namespace: Namespace): Unit = {
    val appenderLevel = Level.toLevel(level, Level.INFO)
    val appenderLogger = loggerName.map(context.getLogger).getOrElse(rootLogger)

    val exists = sessions.get(to.toString).exists { publisher ⇒
      publisher.level == appenderLevel && publisher.logger.getName == appenderLogger.getName
    }

    if (!exists) {
      unsubscribe(to)
      if (appenderLevel != Level.OFF) {
        logger.info(s"Starting log publisher [${appenderLevel.levelStr}] '${appenderLogger.getName}': $to")
        val publisher = LogPublisher(to, appenderLogger, appenderLevel, encoder)
        publisher.start()
        sessions.put(to.toString, publisher)
      }
    }
  }

  def unsubscribe(to: ActorRef): Unit = {
    sessions.remove(to.toString).foreach { publisher ⇒
      logger.info(s"Stopping log publisher: $to")
      publisher.stop()
    }
  }
}

private case class LogPublisher(to: ActorRef, logger: LogbackLogger, level: Level, encoder: (ILoggingEvent) ⇒ AnyRef)(implicit actorSystem: ActorSystem, namespace: Namespace) {

  private val filter = new ThresholdFilter()
  filter.setLevel(level.levelStr)

  private val appender = new AppenderBase[ILoggingEvent] {
    override def append(loggingEvent: ILoggingEvent) = to ! encoder(loggingEvent)
  }

  appender.addFilter(filter)
  appender.setName(to.toString)

  def start() = {
    val context = logger.getLoggerContext
    filter.setContext(context)
    appender.setContext(context)
    filter.start()
    appender.start()
    logger.addAppender(appender)
  }

  def stop() = {
    appender.stop()
    filter.stop()
    logger.detachAppender(appender)
  }
} 
Example 5
Source File: LogbackAppender.scala    From rollbar-scala   with MIT License 5 votes vote down vote up
package com.storecove.rollbar.appenders

import ch.qos.logback.classic.Level
import ch.qos.logback.classic.spi.{ILoggingEvent, ThrowableProxy}
import ch.qos.logback.core.UnsynchronizedAppenderBase
import org.apache.log4j.helpers.LogLog


class LogbackAppender extends UnsynchronizedAppenderBase[ILoggingEvent] with AbstractAppender {

    override def append(event: ILoggingEvent): Unit = {
        if (enabled) {
            try {
                if (event.getLevel.isGreaterOrEqual(notifyLevel)) {
                    val hasThrowable = event.getThrowableProxy != null
                    if (!onlyThrowable || hasThrowable) {
                        rollbarNotifier.notify(event.getLevel.toString, event.getMessage, getThrowable(event), getMDCContext)
                    }
                }
            } catch {
                case e: Exception => LogLog.error("Error sending error notification! error=" + e.getClass.getName + " with message=" + e.getMessage)
            }
        }
    }

    override def start(): Unit = {
        if (this.apiKey == null || this.apiKey.isEmpty) {
            this.addError("No apiKey set for the appender named [" + getName + "].")
        } else if (this.environment == null || this.environment.isEmpty) {
            this.addError("No environment set for the appender named [" + getName + "].")
        } else {
            super.start()
        }
    }

    protected def getThrowable(event: ILoggingEvent): Option[Throwable] = {
        event.getThrowableProxy match {
            case throwableProxy: ThrowableProxy => Some(throwableProxy.getThrowable)
            case _ => None
        }
    }

    override def notifyLevel: Level = Level.toLevel(notifyLevelString)

    def setNotifyLevel(notifyLevel: String): Unit = notifyLevelString = notifyLevel

} 
Example 6
Source File: LoggerUtils.scala    From mimir   with Apache License 2.0 5 votes vote down vote up
package mimir.util

import org.slf4j.{LoggerFactory};
import ch.qos.logback.classic.{Level, Logger};

object LoggerUtils {

  val TRACE = Level.TRACE
  val DEBUG = Level.DEBUG
  val INFO  = Level.INFO
  val WARN  = Level.WARN
  val ERROR = Level.ERROR

  def trace[A]()(body: => A) = body
  def trace[A](loggerName: String*)(body: => A): A =
    enhance(loggerName, Level.TRACE){ body } 

  def debug[A]()(body: => A) = body
  def debug[A](loggerName: String*)(body: => A): A =
    enhance(loggerName, Level.DEBUG){ body }

  def error[A]()(body: => A) = body
  def error[A](loggerName: String*)(body: => A): A =
    enhance(loggerName, Level.ERROR){ body }

  def enhance[A](loggerName: String, level: Level)(body: => A): A =
  {
    val loggerBase = LoggerFactory.getLogger(loggerName)
    if(loggerBase.isInstanceOf[Logger]){
      val logger = loggerBase.asInstanceOf[Logger]
      val originalLevel = logger.getLevel();
      logger.setLevel(level)
      val ret = body
      logger.setLevel(originalLevel)
      ret
    } else {
      loggerBase.warn(s"Unable to set logger is instance of ${loggerBase.getClass}")
      body
    }
  }

  def enhance[A](loggerName: Seq[String], level: Level)(body: => A): A =
  {
    if(loggerName.isEmpty){ body }
    else { 
      enhance(loggerName.tail, level){ 
        enhance(loggerName.head, level)(body) 
      }
    }
  }


} 
Example 7
Source File: LogColors.scala    From graphql-gateway   with Apache License 2.0 5 votes vote down vote up
package sangria.gateway.util

import ch.qos.logback.classic.Level
import ch.qos.logback.classic.spi.ILoggingEvent
import ch.qos.logback.core.pattern.color.ForegroundCompositeConverterBase
import ch.qos.logback.core.pattern.color.ANSIConstants._

class LogColors extends ForegroundCompositeConverterBase[ILoggingEvent] {
  override def getForegroundColorCode(event: ILoggingEvent): String =
    event.getLevel.toInt match {
      case Level.ERROR_INT ⇒
        BOLD + RED_FG
      case Level.WARN_INT ⇒
        YELLOW_FG
      case Level.INFO_INT ⇒
        GREEN_FG
      case Level.DEBUG_INT ⇒
        CYAN_FG
      case _ ⇒
        DEFAULT_FG
    }
} 
Example 8
Source File: JsonEncoderSpec.scala    From logback-json-logger   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.play.logging
import java.io.{PrintWriter, StringWriter}
import java.net.InetAddress

import ch.qos.logback.classic.Level
import ch.qos.logback.classic.spi.{ILoggingEvent, ThrowableProxy}
import ch.qos.logback.core.ContextBase
import org.apache.commons.lang3.time.FastDateFormat
import org.mockito.Mockito.when
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import org.scalatestplus.mockito.MockitoSugar
import play.api.libs.json.{JsLookupResult, Json}

import scala.collection.JavaConverters._

class JsonEncoderSpec extends AnyWordSpec with Matchers with MockitoSugar {

  "Json-encoded message" should {
    "contain all required fields" in {

      val jsonEncoder = new JsonEncoder()
      val event       = mock[ILoggingEvent]

      when(event.getTimeStamp).thenReturn(1)
      when(event.getLevel).thenReturn(Level.INFO)
      when(event.getThreadName).thenReturn("my-thread")
      when(event.getFormattedMessage).thenReturn("my-message")
      when(event.getLoggerName).thenReturn("logger-name")
      when(event.getMDCPropertyMap).thenReturn(Map("myMdcProperty" -> "myMdcValue").asJava)

      val testException = new Exception("test-exception")
      val stringWriter  = new StringWriter()
      testException.printStackTrace(new PrintWriter(stringWriter))
      when(event.getThrowableProxy).thenReturn(new ThrowableProxy(testException))

      jsonEncoder.setContext {
        val ctx = new ContextBase()
        ctx.putProperty("myKey", "myValue")
        ctx
      }

      val result       = new String(jsonEncoder.encode(event), "UTF-8")
      val resultAsJson = Json.parse(result)

      (resultAsJson \ "app").asString           shouldBe "my-app-name"
      (resultAsJson \ "hostname").asString      shouldBe InetAddress.getLocalHost.getHostName
      (resultAsJson \ "timestamp").asString     shouldBe FastDateFormat.getInstance("yyyy-MM-dd HH:mm:ss.SSSZZ").format(1)
      (resultAsJson \ "message").asString       shouldBe "my-message"
      (resultAsJson \ "exception").asString     should include("test-exception")
      (resultAsJson \ "exception").asString     should include("java.lang.Exception")
      (resultAsJson \ "exception").asString     should include(stringWriter.toString)
      (resultAsJson \ "logger").asString        shouldBe "logger-name"
      (resultAsJson \ "thread").asString        shouldBe "my-thread"
      (resultAsJson \ "level").asString         shouldBe "INFO"
      (resultAsJson \ "mykey").asString         shouldBe "myValue"
      (resultAsJson \ "mymdcproperty").asString shouldBe "myMdcValue"

    }
  }

  implicit class JsLookupResultOps(jsLookupResult: JsLookupResult) {
    def asString: String = jsLookupResult.get.as[String]
  }

} 
Example 9
Source File: MonitorActor.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package k.grid.monitoring

import akka.actor.{Actor, Cancellable}
import ch.qos.logback.classic.{Level, Logger}
import com.typesafe.scalalogging.LazyLogging
import org.slf4j.LoggerFactory
import akka.pattern.pipe
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global

object MonitorActor {
  val name = "MonitorActor"
}

object SetNodeLogLevel {
  val lvlMappings = Map(
    "OFF" -> Level.OFF,
    "ERROR" -> Level.ERROR,
    "WARN" -> Level.WARN,
    "INFO" -> Level.INFO,
    "DEBUG" -> Level.DEBUG,
    "TRACE" -> Level.TRACE,
    "ALL" -> Level.ALL
  )

  def levelTranslator(lvl: String): Option[Level] = {
    lvlMappings.get(lvl.toUpperCase)
  }
}

case object GetNodeLogLevel
case class NodeLogLevel(lvl: String)
case class SetNodeLogLevel(level: Level, levelDuration: Option[Int] = Some(10))

class MonitorActor extends Actor with LazyLogging {
  private[this] var originalLogLevel: Level = _
  private val editableLogger = "ROOT"
  private[this] var scheduledLogLevelReset: Cancellable = _

  @throws[Exception](classOf[Exception])
  override def preStart(): Unit = {
    //FIXME: what if logger is not logback? use jmx or write more defensive code
    originalLogLevel = {
      val l = LoggerFactory.getLogger(editableLogger)
      val f = l.getClass.getProtectionDomain.getCodeSource.getLocation.getFile
      l.info("logger is loaded from: " + f)
      l.asInstanceOf[ch.qos.logback.classic.Logger].getLevel
    }
  }

  override def receive: Receive = {
    case PingChildren =>
      MonitorUtil.pingChildren.pipeTo(sender)

    case SetNodeLogLevel(lvl, duration) =>
      if (scheduledLogLevelReset != null) {
        scheduledLogLevelReset.cancel()
        scheduledLogLevelReset = null
      }

      logger.info(s"Setting $editableLogger to log level $lvl")
      duration.foreach { d =>
        logger.info(s"Scheduling $editableLogger to be in level $originalLogLevel in $d minutes")
        scheduledLogLevelReset =
          context.system.scheduler.scheduleOnce(d.minutes, self, SetNodeLogLevel(originalLogLevel, None))
      }

      LoggerFactory.getLogger(editableLogger).asInstanceOf[ch.qos.logback.classic.Logger].setLevel(lvl)
      //change also the log level of the akka logger
      val akkaLoggerName = "akka"
      LoggerFactory.getLogger(akkaLoggerName) match {
        case akkaLogger: Logger =>
          if (akkaLogger != null)
            akkaLogger.setLevel(lvl)
        case _ =>
      }
    case GetNodeLogLevel =>
      val lvl = LoggerFactory.getLogger(editableLogger).asInstanceOf[ch.qos.logback.classic.Logger].getLevel
      sender ! NodeLogLevel(lvl.toString)
  }
} 
Example 10
Source File: NaomiTest.scala    From Raphtory   with Apache License 2.0 5 votes vote down vote up
package com.raphtory.tests

import akka.actor.{ActorSystem, Props}
import ch.qos.logback.classic.Level
import com.raphtory.core.analysis.{AnalysisManager, AnalysisRestApi}
import com.raphtory.core.components.ClusterManagement.{RaphtoryReplicator, WatchDog, WatermarkManager}
import com.raphtory.tests.SingleNodeTest.{SpoutName, minimumRouters, partitionNumber, routerClassName}
import kamon.Kamon
import org.slf4j.LoggerFactory

object NaomiTest extends App{

  Kamon.init()

  val root = LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME).asInstanceOf[ch.qos.logback.classic.Logger]
  root.setLevel(Level.ERROR)

  val partitionNumber = 1
  val minimumRouters  = 1

  var Analyser = "com.raphtory.core.analysis.Algorithms.ConnectedComponents"
  Analyser = "com.raphtory.core.analysis.Algorithms.DegreeBasic"

  val start = 1470837600000L
  val end =   31525368897000L

  val jump =    3600000
  var SpoutName = "com.raphtory.examples.stackex.SXSpout"
  var routerClassName = "com.raphtory.examples.stackex.SXRouter"

  val system = ActorSystem("Single-Node-test")

  system.actorOf(Props(new WatermarkManager(managerCount = 1)),"WatermarkManager")
  system.actorOf(Props(new WatchDog(partitionNumber, minimumRouters)), "WatchDog")
  system.actorOf(Props(RaphtoryReplicator("Router", 1, routerClassName)), s"Routers")
  system.actorOf(Props(RaphtoryReplicator("Partition Manager", 1)), s"PartitionManager")
  system.actorOf(Props(Class.forName(SpoutName)), "Spout")
  val analysisManager = system.actorOf(Props[AnalysisManager], s"AnalysisManager")
  AnalysisRestApi(system)


} 
Example 11
Source File: GabRawSpout.scala    From Raphtory   with Apache License 2.0 5 votes vote down vote up
package com.raphtory.examples.gab.actors

import akka.actor.Cancellable
import ch.qos.logback.classic.Level
import com.mongodb.casbah.Imports.MongoConnection
import com.mongodb.casbah.Imports._
import com.raphtory.core.components.Spout.SpoutTrait
import org.slf4j.LoggerFactory

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.language.postfixOps

final class GabRawSpout extends SpoutTrait {

  //private val redis    = new RedisClient("moe", 6379)
  //private val redisKey = "gab-posts"
  private var sched: Cancellable = null

  //val options: MongoClientOptions = MongoClientOptions.builder.addCommandListener(new LoggingClusterListener).build()
  //ddClusterListener(new LoggingClusterListener).build
  private val mongoConn = MongoConnection("138.37.32.67", 27017)
  private val mongoColl = mongoConn("gab")("posts")
  private var window    = 1000
  private var postMin   = 0
  private var postMax   = 1001

  val root = LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME).asInstanceOf[ch.qos.logback.classic.Logger]
  root.setLevel(Level.ERROR)
  // private val mongoLogger = Logger.getLogger("org.mongodb.driver.cluster")
  // mongoLogger.setLevel(Level.OFF)

  override protected def ProcessSpoutTask(message: Any): Unit = message match {
    case StartSpout  => AllocateSpoutTask(Duration(1, MILLISECONDS), "parsePost")
    case "parsePost" => running()
  }

  def running(): Unit = {
    val count = getNextPosts()
    postMin += window
    postMax += window
    println(s"Current min post is $postMin, max post is $postMax, last call retrieved $count posts")
    AllocateSpoutTask(Duration(10, MILLISECONDS), "parsePost")

  }

  private def getNextPosts(): Int = {
    var count = 0
    for (x <- mongoColl.find("_id" $lt postMax $gt postMin))
      try {
        val data = x.get("data").toString.drop(2).dropRight(1).replaceAll("""\\"""", "").replaceAll("""\\""", "")
        count += 1
        //println(data)
        sendTuple(data)
      } catch {
        case e: Throwable =>
          println("Cannot parse record")
      }
    return count
  }

}
//redis-server --dir /home/moe/ben/gab --dbfilename gab.rdb --daemonize yes 
Example 12
Source File: LogbackConfigurator.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.infra.log

import ch.qos.logback.classic.filter.ThresholdFilter
import ch.qos.logback.classic.spi.{Configurator, ILoggingEvent}
import ch.qos.logback.classic.{Level, LoggerContext, PatternLayout}
import ch.qos.logback.core.encoder.LayoutWrappingEncoder
import ch.qos.logback.core.spi.ContextAwareBase
import ch.qos.logback.core.{ConsoleAppender, FileAppender}
import org.slf4j.LoggerFactory


class LogbackConfigurator extends ContextAwareBase with Configurator {
  def configureDefaultContext(): Unit = {
    val loggerContext = LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
    setContext(loggerContext)
    configure(loggerContext)
  }

  override def configure(loggerContext: LoggerContext): Unit = {
    addInfo("Setting up a logback configuration")
    loggerContext.reset() // forget everything that was configured automagically
    val rootLogger = loggerContext.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME)
    val consoleAppender = mkConsoleAppender(loggerContext)
    // only warnings at the root level
    rootLogger.setLevel(Level.WARN)
    rootLogger.addAppender(mkFileAppender(loggerContext))
    rootLogger.addAppender(consoleAppender)
    // debug messages at the apalache level
    val apalacheLogger = loggerContext.getLogger("at.forsyte.apalache")
    apalacheLogger.setLevel(Level.DEBUG)
  }

  private def mkConsoleAppender(loggerContext: LoggerContext): ConsoleAppender[ILoggingEvent] = {
    // set up ConsoleAppender
    val app = new ConsoleAppender[ILoggingEvent]()
    app.setContext(loggerContext)
    app.setName("console")
    val filter = new ThresholdFilter()
    filter.setContext(loggerContext)
    filter.setLevel(Level.INFO.levelStr)
    filter.start()
    app.addFilter(filter)
    val layout = new PatternLayout()
    layout.setPattern("%-65msg %.-1level@%d{HH:mm:ss.SSS}%n")
    layout.setContext(loggerContext)
    layout.start()
    val encoder = new LayoutWrappingEncoder[ILoggingEvent]()
    encoder.setContext(loggerContext)
    encoder.setLayout(layout)
    app.setEncoder(encoder)
    app.start()
    app
  }

  private def mkFileAppender(loggerContext: LoggerContext): FileAppender[ILoggingEvent] = {
    // set up FileAppender
    val app = new FileAppender[ILoggingEvent]()
    app.setContext(loggerContext)
    app.setName("file")
    app.setFile("detailed.log")
    val encoder = new LayoutWrappingEncoder[ILoggingEvent]()
    encoder.setContext(loggerContext)
    val layout = new PatternLayout()
    layout.setPattern("%d{HH:mm:ss.SSS} [%thread] %-5level %logger{12} - %msg%n")
    layout.setContext(loggerContext)
    layout.start()
    encoder.setLayout(layout)
    app.setEncoder(encoder)
    val filter = new ThresholdFilter()
    filter.setLevel(Level.DEBUG.levelStr)
    filter.setContext(loggerContext)
    filter.start()
    app.addFilter(filter)
    app.start()
    app
  }
} 
Example 13
Source File: TraceTokenMDCLoggingTest.scala    From akka-http-extensions   with Apache License 2.0 5 votes vote down vote up
package com.lonelyplanet.akka.http.extensions.logging

import ch.qos.logback.classic.spi.ILoggingEvent
import ch.qos.logback.classic.{Level, Logger, LoggerContext}
import ch.qos.logback.core.AppenderBase
import com.lonelyplanet.akka.http.extensions.tracing.{MaybeTraceTokenHolder, TraceToken}
import org.scalatest.{FlatSpec, Matchers}
import org.slf4j.LoggerFactory

import scala.util.Random

class TraceTokenMDCLoggingSpec extends FlatSpec with Matchers {
  it should "log trace token if one is present" in {
    withInMemoryAppender { appender =>
      val traceToken = TraceToken.random
      val loggingTester = new LoggingTester(Some(traceToken))
      val message = randomMessage

      loggingTester.doLog(message)

      appender.output should not be empty
      appender.output.lines.foreach({ line =>
        line.contains(message) shouldBe true
        line.contains(traceToken.toString) shouldBe true
      })
    }
  }

  private def withInMemoryAppender(f: (InMemoryLoggingAppender) => Unit) = {
    val loggerContext = LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
    val appender = new InMemoryLoggingAppender
    appender.setContext(loggerContext)

    val logger = LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME).asInstanceOf[Logger]
    logger.setLevel(Level.ALL)
    logger.detachAndStopAllAppenders()

    logger.addAppender(appender)
    appender.start()
    f(appender)
    logger.detachAppender(appender)
    appender.stop()
  }

  private def randomMessage = Random.alphanumeric.take(20).mkString("")
}

private class InMemoryLoggingAppender extends AppenderBase[ILoggingEvent] {
  private val builder = new StringBuilder

  override def append(event: ILoggingEvent): Unit = {
    builder.append(event.getMessage)
    builder.append(" ")
    if (event.getMDCPropertyMap.containsKey(TraceToken.MDCKey)) {
      builder.append(event.getMDCPropertyMap.get(TraceToken.MDCKey))
    }
    builder.append("\n")
  }

  def output: String = builder.toString()
  def clear(): Unit = builder.clear()
}

private class LoggingTester(maybeTraceTokenFunc: => Option[TraceToken]) extends TraceTokenMDCLogging with MaybeTraceTokenHolder {
  override def maybeTraceToken: Option[TraceToken] = maybeTraceTokenFunc
  def doLog(message: String): Unit = {
    logger.trace(message)
    logger.debug(message)
    logger.info(message)
    logger.warn(message)
    logger.error(message)
  }
} 
Example 14
Source File: ChainAppConfigTest.scala    From bitcoin-s   with MIT License 5 votes vote down vote up
package org.bitcoins.chain.config

import java.nio.file.Files

import akka.actor.ActorSystem
import ch.qos.logback.classic.Level
import com.typesafe.config.ConfigFactory
import org.bitcoins.core.config.{MainNet, RegTest, TestNet3}
import org.bitcoins.testkit.chain.ChainUnitTest
import org.bitcoins.testkit.util.FileUtil
import org.scalatest.FutureOutcome

class ChainAppConfigTest extends ChainUnitTest {
  val tempDir = Files.createTempDirectory("bitcoin-s")
  val config = ChainAppConfig(directory = tempDir, useLogbackConf = false)

  //if we don't turn off logging here, isInitF a few lines down will
  //produce some nasty error logs since we are testing initialization
  //of the chain project
  val chainAppConfig = appConfig.withOverrides(
    ConfigFactory.parseString("bitcoin-s.logging.level=OFF"))

  behavior of "ChainAppConfig"

  override def withFixture(test: OneArgAsyncTest): FutureOutcome =
    withChainFixture(test)

  it must "initialize our chain project" in { _ =>
    val isInitF = chainAppConfig.isInitialized()

    for {
      isInit <- isInitF
      _ = assert(!isInit)
      _ <- chainAppConfig.initialize()
      isInitAgain <- chainAppConfig.isInitialized()
    } yield assert(isInitAgain)
  }

  it must "be overridable" in { _ =>
    assert(config.network == RegTest)

    val otherConf = ConfigFactory.parseString("bitcoin-s.network = testnet3")
    val withOther: ChainAppConfig = config.withOverrides(otherConf)
    assert(withOther.network == TestNet3)

    val mainnetConf = ConfigFactory.parseString("bitcoin-s.network = mainnet")
    val mainnet: ChainAppConfig = withOther.withOverrides(mainnetConf)
    assert(mainnet.network == MainNet)
  }

  it must "be overridable with multiple levels" in { _ =>
    val testnet = ConfigFactory.parseString("bitcoin-s.network = testnet3")
    val mainnet = ConfigFactory.parseString("bitcoin-s.network = mainnet")
    val overriden: ChainAppConfig = config.withOverrides(testnet, mainnet)
    assert(overriden.network == MainNet)

  }

  it must "have user data directory configuration take precedence" in { _ =>
    val tempDir = Files.createTempDirectory("bitcoin-s")
    val tempFile = Files.createFile(tempDir.resolve("bitcoin-s.conf"))
    val confStr = """
                    | bitcoin-s {
                    |   network = testnet3
                    |   
                    |   logging {
                    |     level = off
                    |
                    |     p2p = warn
                    |   }
                    | }
    """.stripMargin
    val _ = Files.write(tempFile, confStr.getBytes())

    val appConfig = ChainAppConfig(directory = tempDir, useLogbackConf = false)

    assert(appConfig.datadir == tempDir.resolve("testnet3"))
    assert(appConfig.network == TestNet3)
    assert(appConfig.logLevel == Level.OFF)
    assert(appConfig.p2pLogLevel == Level.WARN)
  }

  override def afterAll: Unit = {

    FileUtil.deleteTmpDir(chainAppConfig.baseDatadir)
  }
} 
Example 15
Source File: NodeAppConfigTest.scala    From bitcoin-s   with MIT License 5 votes vote down vote up
package org.bitcoins.node

import org.bitcoins.testkit.util.{BitcoinSAsyncTest, BitcoinSUnitTest}
import org.bitcoins.node.config.NodeAppConfig
import org.bitcoins.core.config.TestNet3
import com.typesafe.config.Config
import com.typesafe.config.ConfigFactory
import org.bitcoins.core.config.RegTest
import org.bitcoins.core.config.MainNet
import ch.qos.logback.classic.Level
import java.nio.file.Files

class NodeAppConfigTest extends BitcoinSAsyncTest {
  val tempDir = Files.createTempDirectory("bitcoin-s")

  val config: NodeAppConfig =
    NodeAppConfig(directory = tempDir, useLogbackConf = false)

  it must "be overridable" in {
    assert(config.network == RegTest)

    val otherConf = ConfigFactory.parseString("bitcoin-s.network = testnet3")
    val withOther: NodeAppConfig = config.withOverrides(otherConf)
    assert(withOther.network == TestNet3)

    val mainnetConf = ConfigFactory.parseString("bitcoin-s.network = mainnet")
    val mainnet: NodeAppConfig = withOther.withOverrides(mainnetConf)
    assert(mainnet.network == MainNet)
  }

  it must "be overridable with multiple levels" in {
    val testnet = ConfigFactory.parseString("bitcoin-s.network = testnet3")
    val mainnet = ConfigFactory.parseString("bitcoin-s.network = mainnet")
    val overriden: NodeAppConfig = config.withOverrides(testnet, mainnet)
    assert(overriden.network == MainNet)

  }

  it must "have user data directory configuration take precedence" in {

    val tempDir = Files.createTempDirectory("bitcoin-s")
    val tempFile = Files.createFile(tempDir.resolve("bitcoin-s.conf"))
    val confStr = """
                    | bitcoin-s {
                    |   network = testnet3
                    |   
                    |   logging {
                    |     level = off
                    |
                    |     p2p = warn
                    |   }
                    | }
    """.stripMargin
    val _ = Files.write(tempFile, confStr.getBytes())

    val appConfig = NodeAppConfig(directory = tempDir, useLogbackConf = false)

    assert(appConfig.datadir == tempDir.resolve("testnet3"))
    assert(appConfig.network == TestNet3)
    assert(appConfig.logLevel == Level.OFF)
    assert(appConfig.p2pLogLevel == Level.WARN)
  }
} 
Example 16
Source File: LogCollector.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.platform.testing

import ch.qos.logback.classic.Level
import ch.qos.logback.classic.spi.ILoggingEvent
import ch.qos.logback.core.AppenderBase

import scala.beans.BeanProperty
import scala.collection.concurrent.TrieMap
import scala.collection.mutable
import scala.reflect.ClassTag

object LogCollector {

  private val log =
    TrieMap
      .empty[String, TrieMap[String, mutable.Builder[(Level, String), Vector[(Level, String)]]]]

  def read[Test, Logger](
      implicit test: ClassTag[Test],
      logger: ClassTag[Logger]): IndexedSeq[(Level, String)] =
    log
      .get(test.runtimeClass.getName)
      .flatMap(_.get(logger.runtimeClass.getName))
      .fold(IndexedSeq.empty[(Level, String)])(_.result())

  def clear[Test](implicit test: ClassTag[Test]): Unit = {
    log.remove(test.runtimeClass.getName)
    ()
  }

}

final class LogCollector extends AppenderBase[ILoggingEvent] {

  @BeanProperty
  var test: String = _

  override def append(e: ILoggingEvent): Unit = {
    if (test == null) {
      addError("Test identifier undefined, skipping logging")
    } else {
      val log = LogCollector.log
        .getOrElseUpdate(test, TrieMap.empty)
        .getOrElseUpdate(e.getLoggerName, Vector.newBuilder)
      val _ = log.synchronized { log += e.getLevel -> e.getMessage }
    }
  }
} 
Example 17
Source File: Logger.scala    From codacy-analysis-cli   with GNU Affero General Public License v3.0 5 votes vote down vote up
package com.codacy.analysis.core.utils

import ch.qos.logback.classic
import ch.qos.logback.classic.Level
import org.slf4j
import org.slf4j.LoggerFactory

object Logger {

  def setLevel(debug: Boolean = false): Unit = {
    val level = if (debug) Level.DEBUG else Level.OFF
    val root = LoggerFactory.getLogger(slf4j.Logger.ROOT_LOGGER_NAME)
    root match {
      case classicLogger: classic.Logger =>
        classicLogger.setLevel(level)
      case _ =>
    }
  }

} 
Example 18
Source File: LoggerHandlerWithIdSpec.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.provider

import ch.qos.logback.classic.{ Level, Logger }
import com.ing.wbaa.rokku.proxy.data.RequestId
import com.ing.wbaa.rokku.proxy.handler.LoggerHandlerWithId
import org.scalatest.BeforeAndAfter
import org.scalatest.diagrams.Diagrams
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class LoggerHandlerWithIdSpec extends AnyWordSpec with Matchers with Diagrams with BeforeAndAfter {

  private val logger = new LoggerHandlerWithId
  implicit val id: RequestId = RequestId("1")

  private val logRoot: Logger = org.slf4j.LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME).asInstanceOf[Logger]
  private val currentLogLevel = logRoot.getLevel
  private val val1 = 1
  private val val2 = 2
  before(logRoot.setLevel(Level.DEBUG))
  after(logRoot.setLevel(currentLogLevel))

  "Logger" should {
    "work" in {

      noException should be thrownBy {

        logger.debug("test debug {}", val1)
        logger.debug("test debug {} {}", val1, val2)
        logger.debug("test debug {}", new RuntimeException("RTE").getMessage)

        logger.info("test info {}", val1)
        logger.info("test info {} {}", val1, val2)
        logger.info("test info {}", new RuntimeException("RTE").getMessage)

        logger.warn("test warn {}", val1)
        logger.warn("test warn {} {}", val1, val2)
        logger.warn("test warn {}", new RuntimeException("RTE").getMessage)

        logger.error("test error {}", val1)
        logger.error("test error {} {}", val1, val2)
        logger.error("test error {}", new RuntimeException("RTE").getMessage)
      }
    }
  }
} 
Example 19
Source File: LoggingTest.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up

package org.apache.iota.fey

import ch.qos.logback.classic.{Level, Logger}
import org.scalatest.matchers.{MatchResult, Matcher}
import org.slf4j.helpers.SubstituteLogger
import org.slf4j.{LoggerFactory, Logger => SLF4JLogger}

trait LoggingTest{

  protected val logAppenderName = "inMemory"
  private val appender = findInMemoryAppender(logAppenderName)

  def beLoggedAt(logLevel: Level): Matcher[String] = new Matcher[String] {
    def apply(left: String) = {
      val containsAtLevel = appender.containsAtLevel(left, logLevel)
      MatchResult(containsAtLevel,
        s" '$left' was not found at log level",
        s" '$left' was found at log level")
    }
  }

  def resetCapturedLogs(): Unit = appender.reset()

  def dumpCapturedLogsToSysOut(): Unit = appender.dumpLogs()

  private def findInMemoryAppender(s: String): InMemoryAppender = {
    LoggerFactory.getLogger(SLF4JLogger.ROOT_LOGGER_NAME) match {
      case logger: Logger => logger.getAppender(s) match {
        case inMemoryAppender: InMemoryAppender => inMemoryAppender
        case _ => throw new IllegalStateException(s"Is the InMemoryAppender registered with logback in its configuration file with the name $s?")
      }
      case sub: SubstituteLogger => throw new IllegalStateException("SLF4J is probably still initializing. Is LoggingTest part of the outermost class wrapping your tests?")
      case _ => throw new IllegalStateException("Are you using LogBack logging?")
    }
  }
} 
Example 20
Source File: WatchServiceReceiverSpec.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up
package org.apache.iota.fey

import java.nio.file.{Files, Paths}
import java.nio.charset.StandardCharsets

import akka.testkit.{EventFilter, TestProbe}

import scala.concurrent.duration.{DurationInt, FiniteDuration}
import java.io.File

import ch.qos.logback.classic.Level

class WatchServiceReceiverSpec extends BaseAkkaSpec{

  val watcherTB = TestProbe("WATCH-SERVICE")
  var watchFileTask:WatchServiceReceiver = _
  val watchTestDir = s"${CONFIG.JSON_REPOSITORY}/watchtest"

  "Creating WatchServiceReceiver" should {
    "process initial files in the JSON repository" in {
      CONFIG.JSON_EXTENSION = "json.not"
      watchFileTask = new WatchServiceReceiver(watcherTB.ref)
      watcherTB.expectMsgAllClassOf(classOf[JsonReceiverActor.JSON_RECEIVED])
      CONFIG.JSON_EXTENSION = "json.test"
    }
  }

  var watchThread: Thread = _
  "Start a Thread with WatchServiceReceiver" should {
    "Start Thread" in {
      watchThread = new Thread(watchFileTask, "TESTING-WATCHER-IN-THREAD")
      watchThread.setDaemon(true)
      watchThread.start()
      TestProbe().isThreadRunning("TESTING-WATCHER-IN-THREAD") should be(true)
    }
  }

  "Start watching directory" should {
    "Starting receiving CREATED event" taggedAs(SlowTest) in {
      watchFileTask.watch(Paths.get(watchTestDir))
      Files.write(Paths.get(s"$watchTestDir/watched.json.test"), Utils_JSONTest.create_json_test.getBytes(StandardCharsets.UTF_8))
      watcherTB.expectMsgAllClassOf(20.seconds, classOf[JsonReceiverActor.JSON_RECEIVED])
    }
    "Starting receiving UPDATE event" taggedAs(SlowTest) in {
      Files.write(Paths.get(s"$watchTestDir/watched-update.json.test"), Utils_JSONTest.delete_json_test.getBytes(StandardCharsets.UTF_8))
      Thread.sleep(200)
      Files.write(Paths.get(s"$watchTestDir/watched-update.json.test"), Utils_JSONTest.create_json_test.getBytes(StandardCharsets.UTF_8))
      watcherTB.expectMsgAllClassOf(20.seconds, classOf[JsonReceiverActor.JSON_RECEIVED])
    }
  }

  "processJson" should {
    "log to warn level when json has invalid schema" in {
      Files.write(Paths.get(s"$watchTestDir/watched-invalid.json.test"), Utils_JSONTest.test_json_schema_invalid.getBytes(StandardCharsets.UTF_8))
      watchFileTask.processJson(s"$watchTestDir/watched-invalid.json.test",new File(s"$watchTestDir/watched-invalid.json.test"))
      s"File $watchTestDir/watched-invalid.json.test not processed. Incorrect JSON schema" should beLoggedAt(Level.WARN)
    }
  }

  "interrupt watchservice" should{
    "interrupt thread" in {
      watchThread.interrupt()
    }
  }

} 
Example 21
Source File: JsonReceiverSpec.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up
package org.apache.iota.fey

import java.nio.file.{Files, Paths}

import akka.actor.ActorRef
import akka.testkit.{EventFilter, TestProbe}
import ch.qos.logback.classic.Level
import scala.concurrent.duration.{DurationInt, FiniteDuration}

class JsonReceiverSpec extends BaseAkkaSpec with LoggingTest{


  class ReceiverTest(verifyActor: ActorRef) extends JsonReceiver{

    override def execute(): Unit = {
      verifyActor ! "EXECUTED"
      Thread.sleep(500)
    }

    override def exceptionOnRun(e: Exception): Unit = {
      verifyActor ! "INTERRUPTED"
    }

  }

  val verifyTB = TestProbe("RECEIVER-TEST")
  val receiver = new ReceiverTest(verifyTB.ref)

  "Executing validJson in JsonReceiver" should {
    "return false when json schema is not right" in {
      receiver.validJson(getJSValueFromString(Utils_JSONTest.test_json_schema_invalid)) should be(false)
    }
    "log message to Error" in {
      ("Incorrect JSON schema \n/ensembles/0 \n\tErrors: Property command missing") should beLoggedAt(Level.ERROR)
    }
    "return true when Json schema is valid" in {
      receiver.validJson(getJSValueFromString(Utils_JSONTest.create_json_test)) should be(true)
    }
  }

  "Executing checkForLocation in JsonReceiver" should {
    "log message at Debug level" in {
      receiver.checkForLocation(getJSValueFromString(Utils_JSONTest.test_json_schema_invalid))
      "Location not defined in JSON" should beLoggedAt(Level.DEBUG)
    }
    "download jar dynamically from URL" in {
      receiver.checkForLocation(getJSValueFromString(Utils_JSONTest.location_test))
      Files.exists(Paths.get(s"${CONFIG.DYNAMIC_JAR_REPO}/fey-stream.jar")) should be(true)
    }
  }

  var watchThread: Thread = _
  "Start a Thread with the JSON receiver" should {
    "Start Thread" in {
      watchThread = new Thread(receiver, "TESTING-RECEIVERS-IN-THREAD")
      watchThread.setDaemon(true)
      watchThread.start()
      TestProbe().isThreadRunning("TESTING-RECEIVERS-IN-THREAD") should be(true)
    }
    "execute execute() method inside run" in {
      verifyTB.expectMsgAllOf(600.milliseconds,"EXECUTED","EXECUTED")
    }
  }

  "Interrupting the receiver Thread" should {
    "Throw Interrupted exception" in {
      EventFilter[InterruptedException]() intercept {
        watchThread.interrupt()
        watchThread.join()
      }
    }
    "execute exceptionOnRun method" in {
      verifyTB.receiveWhile(1200.milliseconds) {
        case "EXECUTED" =>
      }
      verifyTB.expectMsg("INTERRUPTED")
    }
  }


} 
Example 22
Source File: BaseSpecTest.scala    From wookiee   with Apache License 2.0 5 votes vote down vote up
package com.webtrends.harness.service.test

import akka.actor.ActorSystem
import ch.qos.logback.classic.Level
import com.typesafe.config.{Config, ConfigFactory}
import com.webtrends.harness.component.Component
import com.webtrends.harness.service.Service
import org.specs2.mutable.SpecificationLike
import org.scalatest.{MustMatchers, WordSpecLike}

import scala.concurrent.duration._

trait BaseWookieeTest {
  def config:Config = ConfigFactory.empty()
  def componentMap:Option[Map[String, Class[_<:Component]]] = None
  def servicesMap:Option[Map[String, Class[_<:Service]]] = None
  def logLevel: Level = Level.INFO
  def startupWait: FiniteDuration = 15 seconds

  TestHarness(config, servicesMap, componentMap, logLevel, startupWait)
  Thread.sleep(1000)
  implicit val system: ActorSystem = TestHarness.system.get
}

trait BaseWookieeSpecTest extends BaseWookieeTest with SpecificationLike
trait BaseWookieeScalaTest extends BaseWookieeTest with WordSpecLike with MustMatchers 
Example 23
Source File: LoggerSpec.scala    From wookiee   with Apache License 2.0 5 votes vote down vote up
package com.webtrends.harness.logging

import akka.actor.ActorSystem
import akka.testkit.{TestKit, TestProbe}
import ch.qos.logback.classic.Level
import com.webtrends.harness.TestKitSpecificationWithJUnit
import org.slf4j.LoggerFactory

class LoggerSpec extends TestKitSpecificationWithJUnit(ActorSystem("harness")) with LoggingAdapter {

  val probe = new TestProbe(system)
  val appender = setupAppender()
  sequential

  "logging" should {
    "allow for logging that is received by a mediator actor using Scala string interpolation" in {
      Logger.registerMediator(probe.ref)
      val logger = Logger("test")
      val x = 0
      logger.trace(s"testing ${x}123...")

      val msg = Trace(LoggerFactory getLogger "test", "testing 0123...", None, None, Nil, None)
      Logger.unregisterMediator(probe.ref)
      probe.expectMsgClass(classOf[Trace]) must be equalTo msg
    }

    "allow for logging that is received by a mediator actor using Java string interpolation" in {
      Logger.registerMediator(probe.ref)
      val logger = Logger("test")
      logger.debug("testing {}123...", 0)

      val msg = Debug(LoggerFactory getLogger "test", "testing {}123...", None, None, Seq(0), None)
      Logger.unregisterMediator(probe.ref)
      probe.expectMsgClass(classOf[Debug]) must be equalTo msg
    }

    "allow for logging that is handle directly by the underlying logging framework using Scala string interpolation" in {
      val logger = Logger("test")
      val x = 0
      logger.info(s"testing ${x}123...")
      appender.lastMessage.get must be equalTo "testing 0123..."
    }

    "allow for logging that is handle directly by the underlying logging framework using Java string interpolation" in {
      val logger = Logger("test")
      logger.warn("testing {}123...", 0)
      appender.lastMessage.get must be equalTo "testing 0123..."
    }

    "allow for logging that is handle directly by the underlying logging framework using Scala string interpolation and handles a Throwable" in {
      val logger = Logger("test")
      logger.error("testing {}123...", 0)
      appender.lastMessage.get must be equalTo "testing 0123..."
    }

    "don't log if try succeeds" in {
      val logger = Logger("test")
      logger.error("testing {}123...", 0)
      tryAndLogError({ true })
      appender.lastMessage.get must be equalTo "testing 0123..."
    }

    "do log if try fails" in {
      val logger = Logger("test")
      logger.error("testing {}123...", 0)
      tryAndLogError({ 5 / 0 })
      appender.lastMessage.get must be equalTo "/ by zero"
    }
  }

  step {
    TestKit.shutdownActorSystem(system)
  }

  private def setupAppender(): TestingAppender = {
    val root = LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME).asInstanceOf[ch.qos.logback.classic.Logger]
    root.setLevel(Level.ALL)
    val appender = new TestingAppender()
    appender.start()
    root.addAppender(appender)
    appender
  }
} 
Example 24
Source File: Slf4jLogging.scala    From wookiee   with Apache License 2.0 5 votes vote down vote up
package com.webtrends.harness.logging

import ch.qos.logback.classic.Level
import org.slf4j.LoggerFactory

private[harness] trait Slf4jLogging extends LogProcessor with AkkaLogProcessor {

  protected def getRootLevel: Level = {
    val root = LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME).asInstanceOf[ch.qos.logback.classic.Logger]
    root.getLevel
  }

  def setLogLevel(level:Level) = {
    val root = LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME).asInstanceOf[ch.qos.logback.classic.Logger]
    root.setLevel(level)
  }
} 
Example 25
Source File: DexExtensionGrpcConnector.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.tool.connectors

import cats.instances.future._
import cats.instances.list._
import cats.syntax.either._
import cats.syntax.traverse._
import ch.qos.logback.classic.{Level, Logger}
import com.wavesplatform.dex.cli.ErrorOr
import com.wavesplatform.dex.domain.account.Address
import com.wavesplatform.dex.domain.asset.Asset
import com.wavesplatform.dex.domain.asset.Asset.{IssuedAsset, Waves}
import com.wavesplatform.dex.grpc.integration.WavesBlockchainClientBuilder
import com.wavesplatform.dex.grpc.integration.clients.WavesBlockchainClient
import com.wavesplatform.dex.grpc.integration.dto.BriefAssetDescription
import com.wavesplatform.dex.grpc.integration.settings.GrpcClientSettings.ChannelOptionsSettings
import com.wavesplatform.dex.grpc.integration.settings.{GrpcClientSettings, WavesBlockchainClientSettings}
import monix.execution.Scheduler.Implicits.{global => monixScheduler}
import org.slf4j.LoggerFactory

import scala.concurrent.ExecutionContext.Implicits.{global => executionContext}
import scala.concurrent.duration._
import scala.concurrent.{Await, Awaitable, Future}
import scala.util.Try

case class DexExtensionGrpcConnector private (target: String, grpcAsyncClient: WavesBlockchainClient[Future]) extends Connector {

  import DexExtensionGrpcConnector._

  private def sync[A](f: Awaitable[A]): A = Await.result(f, requestTimeout)

  private def getDetailedBalance(asset: Asset, balance: Long): Future[(Asset, (BriefAssetDescription, Long))] = asset match {
    case Waves           => Future.successful(asset -> (BriefAssetDescription.wavesDescription -> balance))
    case ia: IssuedAsset => grpcAsyncClient.assetDescription(ia).map(maybeDesc => ia -> (maybeDesc.get -> balance))
  }

  def matcherBalanceAsync(address: Address): Future[DetailedBalance] =
    for {
      balances                <- grpcAsyncClient.allAssetsSpendableBalance(address)
      balancesWithDescription <- balances.toList.traverse { case (a, b) => getDetailedBalance(a, b) }
    } yield balancesWithDescription.toMap

  def matcherBalanceSync(address: Address): DetailedBalance = sync { matcherBalanceAsync(address) }

  override def close(): Unit = Await.result(grpcAsyncClient.close(), 3.seconds)
}

object DexExtensionGrpcConnector {

  val requestTimeout: FiniteDuration = 10.seconds

  type DetailedBalance = Map[Asset, (BriefAssetDescription, Long)]

  def create(target: String): ErrorOr[DexExtensionGrpcConnector] =
    Try {
      LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME).asInstanceOf[Logger].setLevel(Level.OFF)
      val grpcSettings   = GrpcClientSettings(target, 5, 5, true, 2.seconds, 5.seconds, 1.minute, ChannelOptionsSettings(5.seconds))
      val clientSettings = WavesBlockchainClientSettings(grpcSettings, 100.milliseconds, 100)
      WavesBlockchainClientBuilder.async(clientSettings, monixScheduler, executionContext)
    }.toEither
      .bimap(ex => s"Cannot establish gRPC connection to DEX Extension! $ex", client => DexExtensionGrpcConnector(target, client))
} 
Example 26
Source File: Main.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.codegen

import java.io.File
import java.nio.file.Path

import ch.qos.logback.classic.Level
import com.daml.lf.codegen.conf.Conf
import com.typesafe.scalalogging.StrictLogging
import org.slf4j.{Logger, LoggerFactory}
import scalaz.Cord

import scala.collection.breakOut

object Main extends StrictLogging {

  private val codegenId = "Scala Codegen"

  @deprecated("Use codegen font-end: com.daml.codegen.CodegenMain.main", "0.13.23")
  def main(args: Array[String]): Unit =
    Conf.parse(args) match {
      case Some(conf) =>
        generateCode(conf)
      case None =>
        throw new IllegalArgumentException(
          s"Invalid ${codegenId: String} command line arguments: ${args.mkString(" "): String}")
    }

  def generateCode(conf: Conf): Unit = conf match {
    case Conf(darMap, outputDir, decoderPkgAndClass, verbosity, roots) =>
      setGlobalLogLevel(verbosity)
      logUnsupportedEventDecoderOverride(decoderPkgAndClass)
      val (dars, packageName) = darsAndOnePackageName(darMap)
      CodeGen.generateCode(dars, packageName, outputDir.toFile, CodeGen.Novel, roots)
  }

  private def setGlobalLogLevel(verbosity: Level): Unit = {
    LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME) match {
      case a: ch.qos.logback.classic.Logger =>
        a.setLevel(verbosity)
        logger.info(s"${codegenId: String} verbosity: ${verbosity.toString}")
      case _ =>
        logger.warn(s"${codegenId: String} cannot set requested verbosity: ${verbosity.toString}")
    }
  }

  private def logUnsupportedEventDecoderOverride(mapping: Option[(String, String)]): Unit =
    mapping.foreach {
      case (a, b) =>
        logger.warn(
          s"${codegenId: String} does not allow overriding Event Decoder, skipping: ${a: String} -> ${b: String}")
    }

  private def darsAndOnePackageName(darMap: Map[Path, Option[String]]): (List[File], String) = {
    val dars: List[File] = darMap.keys.map(_.toFile)(breakOut)
    val uniquePackageNames: Set[String] = darMap.values.collect { case Some(x) => x }(breakOut)
    uniquePackageNames.toSeq match {
      case Seq(packageName) =>
        (dars, packageName)
      case _ =>
        throw new IllegalStateException(
          s"${codegenId: String} expects all dars mapped to the same package name, " +
            s"requested: ${format(darMap): String}")
    }
  }

  private def format(map: Map[Path, Option[String]]): String = {
    val cord = map.foldLeft(Cord("{")) { (str, kv) =>
      str ++ kv._1.toFile.getAbsolutePath ++ "->" ++ kv._2.toString ++ ","
    }
    (cord ++ "}").toString
  }
} 
Example 27
Source File: Conf.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.lf.codegen.conf

import java.nio.file.{Path, Paths}

import ch.qos.logback.classic.Level
import com.daml.buildinfo.BuildInfo
import scopt.{OptionParser, Read}


final case class Conf(
    darFiles: Map[Path, Option[String]] = Map(),
    outputDirectory: Path,
    decoderPkgAndClass: Option[(String, String)] = None,
    verbosity: Level = Level.ERROR,
    roots: List[String] = Nil
)

object Conf {

  private[conf] final val PackageAndClassRegex =
    """(?:(\p{javaJavaIdentifierStart}\p{javaJavaIdentifierPart}+(?:\.\p{javaJavaIdentifierStart}\p{javaJavaIdentifierPart}+)*)\.)(\p{javaJavaIdentifierStart}\p{javaJavaIdentifierPart}+)""".r

  def parse(args: Array[String]): Option[Conf] =
    parser.parse(args, Conf(Map.empty, Paths.get(".")))

  def parser: OptionParser[Conf] = new scopt.OptionParser[Conf]("codegen") {
    head("codegen", BuildInfo.Version)
    note("Code generator for the DAML ledger bindings.\n")

    arg[(Path, Option[String])]("<DAR-file[=package-prefix]>...")(
      optTupleRead(readPath, Read.stringRead))
      .unbounded()
      .action((p, c) => c.copy(darFiles = c.darFiles + p))
      .required()
      .text("DAR file to use as input of the codegen with an optional, but recommend, package prefix for the generated sources.")

    opt[Path]('o', "output-directory")(readPath)
      .action((p, c) => c.copy(outputDirectory = p))
      .required()
      .text("Output directory for the generated sources")

    opt[(String, String)]('d', "decoderClass")(readClassName)
      .action((className, c) => c.copy(decoderPkgAndClass = Some(className)))
      .text("Fully Qualified Class Name of the optional Decoder utility")

    opt[Level]('V', "verbosity")(readVerbosity)
      .action((l, c) => c.copy(verbosity = l))
      .text("Verbosity between 0 (only show errors) and 4 (show all messages) -- defaults to 0")

    opt[String]('r', "root")(Read.stringRead)
      .unbounded()
      .action((rexp, c) => c.copy(roots = rexp :: c.roots))
      .text(
        "Regular expression for fully-qualified names of templates to generate -- defaults to .*")

    help("help").text("This help text")

  }

  private[conf] val readPath: scopt.Read[Path] = scopt.Read.stringRead.map(s => Paths.get(s))

  val readClassName: scopt.Read[(String, String)] = scopt.Read.stringRead.map {
    case PackageAndClassRegex(p, c) => (p, c)
    case _ =>
      throw new IllegalArgumentException("Expected a Full Qualified Class Name")
  }

  val readVerbosity: scopt.Read[Level] = scopt.Read.stringRead.map {
    case "0" => Level.ERROR
    case "1" => Level.WARN
    case "2" => Level.INFO
    case "3" => Level.DEBUG
    case "4" => Level.TRACE
    case _ =>
      throw new IllegalArgumentException(
        "Expected a verbosity value between 0 (least verbose) and 4 (most verbose)")
  }

  private[conf] def optTupleRead[A: Read, B: Read]: Read[(A, Option[B])] =
    new Read[(A, Option[B])] {
      override def arity: Int = 2

      override def reads: String => (A, Option[B]) = { s: String =>
        s.split('=').toList match {
          case Nil =>
            throw new IllegalArgumentException("Expected a key with an optional value: key[=value]")
          case key :: Nil => (implicitly[Read[A]].reads(key), None)
          case key :: value :: Nil =>
            (implicitly[Read[A]].reads(key), Some(implicitly[Read[B]].reads(value)))
          case _ =>
            throw new IllegalArgumentException("Expected a key with an optional value: key[=value]")
        }
      }
    }

} 
Example 28
Source File: SandboxConfig.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.platform.sandbox.config

import java.io.File
import java.nio.file.Path
import java.time.Duration

import ch.qos.logback.classic.Level
import com.daml.caching.SizedCache
import com.daml.ledger.api.auth.AuthService
import com.daml.ledger.api.tls.TlsConfiguration
import com.daml.ledger.participant.state.v1.SeedService.Seeding
import com.daml.platform.common.LedgerIdMode
import com.daml.platform.configuration.{CommandConfiguration, LedgerConfiguration, MetricsReporter}
import com.daml.platform.services.time.TimeProviderType
import com.daml.ports.Port


final case class SandboxConfig(
    address: Option[String],
    port: Port,
    portFile: Option[Path],
    damlPackages: List[File],
    timeProviderType: Option[TimeProviderType],
    commandConfig: CommandConfiguration,
    ledgerConfig: LedgerConfiguration,
    tlsConfig: Option[TlsConfiguration],
    scenario: Option[String],
    implicitPartyAllocation: Boolean,
    ledgerIdMode: LedgerIdMode,
    maxInboundMessageSize: Int,
    jdbcUrl: Option[String],
    eagerPackageLoading: Boolean,
    logLevel: Option[Level],
    authService: Option[AuthService],
    seeding: Option[Seeding],
    metricsReporter: Option[MetricsReporter],
    metricsReportingInterval: Duration,
    eventsPageSize: Int,
    lfValueTranslationEventCacheConfiguration: SizedCache.Configuration,
    lfValueTranslationContractCacheConfiguration: SizedCache.Configuration,
    profileDir: Option[Path],
    stackTraces: Boolean,
)

object SandboxConfig {
  val DefaultPort: Port = Port(6865)

  val DefaultMaxInboundMessageSize: Int = 4 * 1024 * 1024

  val DefaultEventsPageSize: Int = 1000

  val DefaultTimeProviderType: TimeProviderType = TimeProviderType.WallClock

  val DefaultLfValueTranslationCacheConfiguration: SizedCache.Configuration =
    SizedCache.Configuration.none

  lazy val nextDefault: SandboxConfig =
    SandboxConfig(
      address = None,
      port = DefaultPort,
      portFile = None,
      damlPackages = Nil,
      timeProviderType = None,
      commandConfig = CommandConfiguration.default,
      ledgerConfig = LedgerConfiguration.defaultLocalLedger,
      tlsConfig = None,
      scenario = None,
      implicitPartyAllocation = true,
      ledgerIdMode = LedgerIdMode.Dynamic,
      maxInboundMessageSize = DefaultMaxInboundMessageSize,
      jdbcUrl = None,
      eagerPackageLoading = false,
      logLevel = None, // the default is in logback.xml
      authService = None,
      seeding = Some(Seeding.Strong),
      metricsReporter = None,
      metricsReportingInterval = Duration.ofSeconds(10),
      eventsPageSize = DefaultEventsPageSize,
      lfValueTranslationEventCacheConfiguration = DefaultLfValueTranslationCacheConfiguration,
      lfValueTranslationContractCacheConfiguration = DefaultLfValueTranslationCacheConfiguration,
      profileDir = None,
      stackTraces = true,
    )

  lazy val default: SandboxConfig =
    nextDefault.copy(
      seeding = None,
      ledgerConfig = LedgerConfiguration.defaultLedgerBackedIndex,
    )
} 
Example 29
Source File: GlobalLogLevel.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.platform.sandbox

import ch.qos.logback.classic.Level
import org.slf4j.{Logger, LoggerFactory}

object GlobalLogLevel {
  def set(level: Level): Unit = {
    val rootLogger = LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME)
    LoggerFactory.getILoggerFactory match {
      case loggerContext: ch.qos.logback.classic.LoggerContext =>
        rootLogger.info(s"Sandbox verbosity changed to $level")
        loggerContext.getLoggerList.forEach(_.setLevel(level))
      case _ =>
        rootLogger.warn(s"Sandbox verbosity cannot be set to requested $level")
    }
  }
}