akka.actor.ExtendedActorSystem Scala Examples

The following examples show how to use akka.actor.ExtendedActorSystem. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: BuildTagViewForPersistenceId.scala    From akka-persistence-cassandra   with Apache License 2.0 5 votes vote down vote up
package akka.persistence.cassandra.reconciler

import akka.actor.ActorSystem
import akka.persistence.cassandra.PluginSettings
import akka.Done
import akka.persistence.cassandra.journal.TagWriter._
import scala.concurrent.duration._
import scala.concurrent.Future
import akka.stream.scaladsl.Source
import akka.actor.ExtendedActorSystem
import akka.persistence.query.PersistenceQuery
import akka.persistence.cassandra.query.scaladsl.CassandraReadJournal
import akka.event.Logging
import akka.persistence.cassandra.journal.CassandraTagRecovery
import akka.persistence.cassandra.Extractors
import akka.util.Timeout
import akka.stream.OverflowStrategy
import akka.stream.scaladsl.Sink
import akka.annotation.InternalApi
import akka.serialization.SerializationExtension


@InternalApi
private[akka] final class BuildTagViewForPersisetceId(
    persistenceId: String,
    system: ActorSystem,
    recovery: CassandraTagRecovery,
    settings: PluginSettings) {

  import system.dispatcher

  private implicit val sys = system
  private val log = Logging(system, classOf[BuildTagViewForPersisetceId])
  private val serialization = SerializationExtension(system)

  private val queries: CassandraReadJournal =
    PersistenceQuery(system.asInstanceOf[ExtendedActorSystem])
      .readJournalFor[CassandraReadJournal]("akka.persistence.cassandra.query")

  private implicit val flushTimeout = Timeout(30.seconds)

  def reconcile(flushEvery: Int = 1000): Future[Done] = {

    val recoveryPrep = for {
      tp <- recovery.lookupTagProgress(persistenceId)
      _ <- recovery.setTagProgress(persistenceId, tp)
    } yield tp

    Source
      .futureSource(recoveryPrep.map((tp: Map[String, TagProgress]) => {
        log.debug("[{}] Rebuilding tag view table from: [{}]", persistenceId, tp)
        queries
          .eventsByPersistenceId(
            persistenceId,
            0,
            Long.MaxValue,
            Long.MaxValue,
            None,
            settings.journalSettings.readProfile,
            "BuildTagViewForPersistenceId",
            extractor = Extractors.rawEvent(settings.eventsByTagSettings.bucketSize, serialization, system))
          .map(recovery.sendMissingTagWriteRaw(tp, actorRunning = false))
          .buffer(flushEvery, OverflowStrategy.backpressure)
          .mapAsync(1)(_ => recovery.flush(flushTimeout))
      }))
      .runWith(Sink.ignore)

  }

} 
Example 2
Source File: JavaSerde.scala    From affinity   with Apache License 2.0 5 votes vote down vote up
package io.amient.affinity.core.serde

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectOutputStream}

import akka.actor.ExtendedActorSystem
import akka.serialization.JavaSerializer
import akka.util.ClassLoaderObjectInputStream

class JavaSerde(system: ExtendedActorSystem) extends Serde[AnyRef] {

  override def identifier: Int = 101

  override def close(): Unit = ()

  override def fromBytes(bytes: Array[Byte]): AnyRef = {
    val in = new ClassLoaderObjectInputStream(system.dynamicAccess.classLoader, new ByteArrayInputStream(bytes))
    val obj = JavaSerializer.currentSystem.withValue(system) { in.readObject }
    in.close()
    obj
  }

  override def toBytes(o: AnyRef): Array[Byte] = {
    val bos = new ByteArrayOutputStream
    val out = new ObjectOutputStream(bos)
    JavaSerializer.currentSystem.withValue(system) { out.writeObject(o) }
    out.close()
    bos.toByteArray
  }

} 
Example 3
Source File: AbstractWrapSerde.scala    From affinity   with Apache License 2.0 5 votes vote down vote up
package io.amient.affinity.core.serde

import akka.actor.ExtendedActorSystem
import akka.serialization.Serializer
import com.typesafe.config.Config
import io.amient.affinity.core.util.ByteUtils

abstract class AbstractWrapSerde(serdes: Serdes) extends Serializer {

  def this(system: ExtendedActorSystem) = this(Serde.tools(system))
  def this(config: Config) = this(Serde.tools(config))

  def fromBinaryWrapped(bytes: Array[Byte]): Any = {
    val serializerIdentifier = ByteUtils.asIntValue(bytes)
    val data = new Array[Byte](bytes.length - 4)
    Array.copy(bytes, 4, data, 0, bytes.length - 4)
    val wrappedSerde = serdes.by(serializerIdentifier)
    wrappedSerde.fromBytes(data)
  }

  def toBinaryWrapped(wrapped: Any, offset: Int = 0): Array[Byte] = {
    val w: AnyRef = wrapped match {
      case null => null
      case ref: AnyRef => ref
      case u: Unit => u.asInstanceOf[AnyRef]
      case z: Boolean => z.asInstanceOf[AnyRef]
      case b: Byte => b.asInstanceOf[AnyRef]
      case c: Char => c.asInstanceOf[AnyRef]
      case s: Short => s.asInstanceOf[AnyRef]
      case i: Int => i.asInstanceOf[AnyRef]
      case l: Long => l.asInstanceOf[AnyRef]
      case f: Float => f.asInstanceOf[AnyRef]
      case d: Double => d.asInstanceOf[AnyRef]
    }
    val delegate = serdes.find(w)
    val bytes: Array[Byte] = delegate.toBinary(w)
    val result = new Array[Byte](bytes.length + 4 + offset)
    ByteUtils.putIntValue(delegate.identifier, result, 0)
    Array.copy(bytes, 0, result, 4 + offset, bytes.length)
    result
  }

  override def includeManifest: Boolean = false

} 
Example 4
Source File: OptionSerde.scala    From affinity   with Apache License 2.0 5 votes vote down vote up
package io.amient.affinity.core.serde.primitive

import akka.actor.ExtendedActorSystem
import com.typesafe.config.Config
import io.amient.affinity.core.serde.{AbstractWrapSerde, Serde, Serdes}

class OptionSerde(serdes: Serdes) extends AbstractWrapSerde(serdes) with Serde[Option[Any]] {

  def this(system: ExtendedActorSystem) = this(Serde.tools(system))
  def this(config: Config) = this(Serde.tools(config))

  override protected def fromBytes(bytes: Array[Byte]): Option[Any] = {
    if (bytes.length == 0) None else Some(fromBinaryWrapped(bytes))
  }

  override def identifier: Int = 131

  override def toBytes(o: Option[Any]): Array[Byte] = o match {
    case None => Array[Byte]()
    case Some(other: AnyRef) => toBinaryWrapped(other)
    case _ => throw new NotImplementedError("AnyVal needs conversion to AnyRef")
  }

  override def includeManifest: Boolean = {
    false
  }

  override def close() = ()
} 
Example 5
Source File: TupleSerde.scala    From affinity   with Apache License 2.0 5 votes vote down vote up
package io.amient.affinity.core.serde.primitive

import akka.actor.ExtendedActorSystem
import com.typesafe.config.Config
import io.amient.affinity.core.serde.{AbstractWrapSerde, Serde, Serdes}
import io.amient.affinity.core.util.ByteUtils

class TupleSerde(serdes: Serdes) extends AbstractWrapSerde(serdes) with Serde[Product] {

  def this(system: ExtendedActorSystem) = this(Serde.tools(system))
  def this(config: Config) = this(Serde.tools(config))

  override def identifier: Int = 132

  override def toBytes(p: Product): Array[Byte] = {
    var result = new Array[Byte](4)
    ByteUtils.putIntValue(p.productArity, result, 0)
    p.productIterator.foreach {
      m =>
        val bytes = toBinaryWrapped(m.asInstanceOf[AnyRef])
        val tmp = new Array[Byte](result.length + bytes.length + 4)
        Array.copy(result, 0, tmp, 0, result.length)
        ByteUtils.putIntValue(bytes.length, tmp, result.length)
        Array.copy(bytes, 0, tmp, 4 + result.length, bytes.length)
        result = tmp
    }
    result
  }

  override protected def fromBytes(bytes: Array[Byte]): Product = {
    val arity = ByteUtils.asIntValue(bytes, 0)
    var tmp = scala.collection.immutable.List[Any]()
    var offset = 4
    (1 to arity).foreach { _ =>
      val len = ByteUtils.asIntValue(bytes, offset)
      offset += 4
      val b = new Array[Byte](len)
      Array.copy(bytes, offset, b, 0, len)
      val element = fromBinaryWrapped(b)
      offset += len
      tmp :+= element
    }
    tmp match {
      case p1 :: Nil => Tuple1(p1)
      case p1 :: p2 :: Nil => Tuple2(p1, p2)
      case p1 :: p2 :: p3 :: Nil => Tuple3(p1, p2, p3)
      case p1 :: p2 :: p3 :: p4 :: Nil => Tuple4(p1, p2, p3, p4)
      case p1 :: p2 :: p3 :: p4 :: p5 :: Nil => Tuple5(p1, p2, p3, p4, p5)
      case p1 :: p2 :: p3 :: p4 :: p5 :: p6 :: Nil => Tuple6(p1, p2, p3, p4, p5, p6)
      case p1 :: p2 :: p3 :: p4 :: p5 :: p6 :: p7 :: Nil => Tuple7(p1, p2, p3, p4, p5, p6, p7)
      case p1 :: p2 :: p3 :: p4 :: p5 :: p6 :: p7 :: p8 :: Nil => Tuple8(p1, p2, p3, p4, p5, p6, p7, p8)
      case p1 :: p2 :: p3 :: p4 :: p5 :: p6 :: p7 :: p8 :: p9 :: Nil => Tuple9(p1, p2, p3, p4, p5, p6, p7, p8, p9)
      case _ => throw new IllegalAccessException("Only Tuple1-9 are supported by TupleSerde")
    }
  }

  override def close() = ()
} 
Example 6
Source File: ClusterStatusCheck.scala    From reactive-lib   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.rp.akkaclusterbootstrap

import akka.actor.ExtendedActorSystem
import akka.cluster.{ Cluster, MemberStatus }
import com.lightbend.rp.status.{ HealthCheck, ReadinessCheck }
import scala.concurrent.{ ExecutionContext, Future }

class ClusterStatusCheck extends ReadinessCheck with HealthCheck {
  def healthy(actorSystem: ExtendedActorSystem)(implicit ec: ExecutionContext): Future[Boolean] =
    status(actorSystem) map {
      case MemberStatus.Joining => true
      case MemberStatus.WeaklyUp => true
      case MemberStatus.Up => true
      case MemberStatus.Leaving => true
      case MemberStatus.Exiting => true
      case MemberStatus.Down => false
      case MemberStatus.Removed => false
    }

  def ready(actorSystem: ExtendedActorSystem)(implicit ec: ExecutionContext): Future[Boolean] =
    status(actorSystem) map {
      case MemberStatus.Joining => false
      case MemberStatus.WeaklyUp => true
      case MemberStatus.Up => true
      case MemberStatus.Leaving => false
      case MemberStatus.Exiting => false
      case MemberStatus.Down => false
      case MemberStatus.Removed => false
    }

  private def status(actorSystem: ExtendedActorSystem)(implicit ec: ExecutionContext): Future[MemberStatus] = {
    val cluster = Cluster(actorSystem)
    val selfNow = cluster.selfMember
    Future.successful(selfNow.status)
  }
} 
Example 7
Source File: ApplicationStatus.scala    From reactive-lib   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.rp.status

import akka.actor.{ ActorSystem, ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider }
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server._
import akka.management.http.{ ManagementRouteProvider, ManagementRouteProviderSettings }
import scala.collection.immutable.Seq
import scala.concurrent.{ ExecutionContext, Future }

import Directives._

class ApplicationStatus(system: ExtendedActorSystem) extends Extension with ManagementRouteProvider {
  private val settings = Settings(system)

  private val healthChecks =
    settings
      .healthChecks
      .map(c =>
        system
          .dynamicAccess
          .createInstanceFor[HealthCheck](c, Seq.empty)
          .getOrElse(throw new IllegalArgumentException(s"Incompatible HealthCheck class definition: $c")))

  private val readinessChecks =
    settings
      .readinessChecks
      .map(c =>
        system
          .dynamicAccess
          .createInstanceFor[ReadinessCheck](c, Seq.empty)
          .getOrElse(throw new IllegalArgumentException(s"Incompatible ReadinessCheck class definition: $c")))

  def routes(settings: ManagementRouteProviderSettings): Route = pathPrefix("platform-tooling") {
    import system.dispatcher

    concat(
      path("ping")(complete("pong!")),
      path("healthy")(complete(isHealthy.map(h => if (h) StatusCodes.OK else StatusCodes.ServiceUnavailable))),
      path("ready")(complete(isReady.map(r => if (r) StatusCodes.OK else StatusCodes.ServiceUnavailable))))
  }

  def isHealthy(implicit ec: ExecutionContext): Future[Boolean] =
    Future
      .sequence(healthChecks.map(_.healthy(system)))
      .map(_.forall(identity))

  def isReady(implicit ec: ExecutionContext): Future[Boolean] =
    Future
      .sequence(readinessChecks.map(_.ready(system)))
      .map(_.forall(identity))
}

object ApplicationStatus extends ExtensionId[ApplicationStatus] with ExtensionIdProvider {
  override def lookup: ApplicationStatus.type = ApplicationStatus
  override def get(system: ActorSystem): ApplicationStatus = super.get(system)
  override def createExtension(system: ExtendedActorSystem): ApplicationStatus = new ApplicationStatus(system)
} 
Example 8
Source File: CassandraSerializationSpec.scala    From akka-persistence-cassandra   with Apache License 2.0 5 votes vote down vote up
package akka.persistence.cassandra.journal

import akka.actor.{ ExtendedActorSystem, Props }
import akka.persistence.RecoveryCompleted
import akka.persistence.cassandra.EventWithMetaData.UnknownMetaData
import akka.persistence.cassandra.{ CassandraLifecycle, CassandraSpec, EventWithMetaData, Persister }
import akka.serialization.BaseSerializer
import akka.testkit.TestProbe
import com.typesafe.config.ConfigFactory

object CassandraSerializationSpec {
  val config = ConfigFactory.parseString(s"""
       |akka.actor.serialize-messages=false
       |akka.actor.serializers.crap="akka.persistence.cassandra.journal.BrokenDeSerialization"
       |akka.actor.serialization-identifiers."akka.persistence.cassandra.journal.BrokenDeSerialization" = 666
       |akka.actor.serialization-bindings {
       |  "akka.persistence.cassandra.Persister$$CrapEvent" = crap
       |}
       |akka.persistence.journal.max-deletion-batch-size = 3
       |akka.persistence.publish-confirmations = on
       |akka.persistence.publish-plugin-commands = on
       |akka.persistence.cassandra.journal.target-partition-size = 5
       |akka.persistence.cassandra.max-result-size = 3
       |akka.persistence.cassandra.journal.keyspace=CassandraIntegrationSpec
       |akka.persistence.cassandra.snapshot.keyspace=CassandraIntegrationSpecSnapshot
       |
    """.stripMargin).withFallback(CassandraLifecycle.config)

}

class BrokenDeSerialization(override val system: ExtendedActorSystem) extends BaseSerializer {
  override def includeManifest: Boolean = false
  override def toBinary(o: AnyRef): Array[Byte] =
    // I was serious with the class name
    Array.emptyByteArray
  override def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef =
    throw new RuntimeException("I can't deserialize a single thing")
}

class CassandraSerializationSpec extends CassandraSpec(CassandraSerializationSpec.config) {

  import akka.persistence.cassandra.Persister._

  "A Cassandra journal" must {

    "Fail recovery when deserialization fails" in {
      val probe = TestProbe()
      val incarnation1 = system.actorOf(Props(new Persister("id1", probe.ref)))
      probe.expectMsgType[RecoveryCompleted]

      incarnation1 ! CrapEvent(1)
      probe.expectMsg(CrapEvent(1))

      probe.watch(incarnation1)
      system.stop(incarnation1)
      probe.expectTerminated(incarnation1)

      val incarnation2 = system.actorOf(Props(new Persister("id1", probe.ref)))
      probe.expectMsgType[RuntimeException].getMessage shouldBe "I can't deserialize a single thing"
      incarnation2
    }

    "be able to store meta data" in {
      val probe = TestProbe()
      val incarnation1 = system.actorOf(Props(new Persister("id2", probe.ref)))
      probe.expectMsgType[RecoveryCompleted]

      val eventWithMeta = EventWithMetaData("TheActualEvent", "TheAdditionalMetaData")
      incarnation1 ! eventWithMeta
      probe.expectMsg(eventWithMeta)

      probe.watch(incarnation1)
      system.stop(incarnation1)
      probe.expectTerminated(incarnation1)

      system.actorOf(Props(new Persister("id2", probe.ref)))
      probe.expectMsg(eventWithMeta) // from replay
    }

    "not fail replay due to deserialization problem of meta data" in {
      val probe = TestProbe()
      val incarnation1 = system.actorOf(Props(new Persister("id3", probe.ref)))
      probe.expectMsgType[RecoveryCompleted]

      val eventWithMeta = EventWithMetaData("TheActualEvent", CrapEvent(13))
      incarnation1 ! eventWithMeta
      probe.expectMsg(eventWithMeta)

      probe.watch(incarnation1)
      system.stop(incarnation1)
      probe.expectTerminated(incarnation1)

      system.actorOf(Props(new Persister("id3", probe.ref)))
      probe.expectMsg(EventWithMetaData("TheActualEvent", UnknownMetaData(666, ""))) // from replay, no meta
    }

  }

} 
Example 9
Source File: CassandraEventUpdateSpec.scala    From akka-persistence-cassandra   with Apache License 2.0 5 votes vote down vote up
package akka.persistence.cassandra.journal

import java.util.UUID

import scala.concurrent.Await
import akka.Done
import akka.event.Logging
import akka.persistence.PersistentRepr
import akka.persistence.cassandra.journal.CassandraJournal.Serialized
import akka.persistence.cassandra.{ CassandraLifecycle, CassandraSpec, TestTaggingActor, _ }
import akka.serialization.SerializationExtension
import com.typesafe.config.ConfigFactory

import scala.concurrent.ExecutionContext
import scala.concurrent.Future
import akka.actor.ExtendedActorSystem
import akka.stream.alpakka.cassandra.CqlSessionProvider
import akka.stream.alpakka.cassandra.scaladsl.CassandraSession

object CassandraEventUpdateSpec {
  val config = ConfigFactory.parseString("""
    """).withFallback(CassandraLifecycle.config)
}

class CassandraEventUpdateSpec extends CassandraSpec(CassandraEventUpdateSpec.config) { s =>

  private[akka] val log = Logging(system, getClass)
  private val serialization = SerializationExtension(system)

  val updater = new CassandraEventUpdate {

    override private[akka] val log = s.log
    override private[akka] def settings: PluginSettings =
      PluginSettings(system)
    override private[akka] implicit val ec: ExecutionContext = system.dispatcher
    // use separate session, not shared via CassandraSessionRegistry because init is different
    private val sessionProvider =
      CqlSessionProvider(
        system.asInstanceOf[ExtendedActorSystem],
        system.settings.config.getConfig(PluginSettings.DefaultConfigPath))
    override private[akka] val session: CassandraSession =
      new CassandraSession(
        system,
        sessionProvider,
        ec,
        log,
        systemName,
        init = _ => Future.successful(Done),
        onClose = () => ())
  }

  "CassandraEventUpdate" must {
    "update the event in messages" in {
      val pid = nextPid
      val a = system.actorOf(TestTaggingActor.props(pid))
      a ! "e-1"
      expectMsgType[TestTaggingActor.Ack.type]
      val eventsBefore = events(pid)
      eventsBefore.map(_.pr.payload) shouldEqual Seq("e-1")
      val originalEvent = eventsBefore.head
      val modifiedEvent = serialize(originalEvent.pr.withPayload("secrets"), originalEvent.offset, Set("ignored"))

      updater.updateEvent(modifiedEvent).futureValue shouldEqual Done

      eventPayloadsWithTags(pid) shouldEqual Seq(("secrets", Set()))
    }

    "update the event in tag_views" in {
      val pid = nextPid
      val b = system.actorOf(TestTaggingActor.props(pid, Set("red", "blue")))
      b ! "e-1"
      expectMsgType[TestTaggingActor.Ack.type]
      val eventsBefore = events(pid).head
      val modifiedEvent = serialize(eventsBefore.pr.withPayload("hidden"), eventsBefore.offset, Set("ignored"))

      expectEventsForTag(tag = "red", "e-1")
      expectEventsForTag(tag = "blue", "e-1")

      updater.updateEvent(modifiedEvent).futureValue shouldEqual Done

      expectEventsForTag(tag = "red", "hidden")
      expectEventsForTag(tag = "blue", "hidden")
    }

    def serialize(pr: PersistentRepr, offset: UUID, tags: Set[String]): Serialized = {
      import system.dispatcher
      Await.result(serializeEvent(pr, tags, offset, Hour, serialization, system), remainingOrDefault)
    }
  }
} 
Example 10
Source File: TestEventAdapter.scala    From akka-persistence-cassandra   with Apache License 2.0 5 votes vote down vote up
package akka.persistence.cassandra.query

import akka.actor.ExtendedActorSystem
import akka.persistence.journal.{ EventAdapter, EventSeq, Tagged }

sealed trait TestEvent[T] {
  def value: T
}

class TestEventAdapter(system: ExtendedActorSystem) extends EventAdapter {

  override def manifest(event: Any): String = ""

  override def toJournal(event: Any): Any = event match {
    case e: String if e.startsWith("tagged:") =>
      val taggedEvent = e.stripPrefix("tagged:")
      val tags = taggedEvent.takeWhile(_ != ':').split(",").toSet
      val payload = taggedEvent.dropWhile(_ != ':').drop(1)
      Tagged(payload, tags)
    case e => e
  }

  override def fromJournal(event: Any, manifest: String): EventSeq = event match {
    case e: String if e.contains(":") =>
      e.split(":").toList match {
        case "dropped" :: _ :: Nil            => EventSeq.empty
        case "duplicated" :: x :: Nil         => EventSeq(x, x)
        case "prefixed" :: prefix :: x :: Nil => EventSeq.single(s"$prefix-$x")
        case _                                => throw new IllegalArgumentException(e)
      }
    case _ => EventSeq.single(event)
  }
} 
Example 11
Source File: CassandraQueryJournalOverrideSpec.scala    From akka-persistence-cassandra   with Apache License 2.0 5 votes vote down vote up
package akka.persistence.cassandra.query

import akka.actor.ExtendedActorSystem
import akka.persistence.PersistentRepr
import akka.persistence.cassandra.TestTaggingActor.Ack
import akka.persistence.cassandra.query.scaladsl.CassandraReadJournal
import akka.persistence.cassandra.{ CassandraLifecycle, CassandraSpec, TestTaggingActor }
import akka.persistence.query.{ PersistenceQuery, ReadJournalProvider }
import akka.stream.testkit.scaladsl.TestSink
import com.typesafe.config.{ Config, ConfigFactory }

import scala.concurrent.duration._

class JournalOverride(as: ExtendedActorSystem, config: Config, configPath: String)
    extends CassandraReadJournal(as, config, configPath) {
  override private[akka] def mapEvent(pr: PersistentRepr) =
    PersistentRepr("cat", pr.sequenceNr, pr.persistenceId, pr.manifest, pr.deleted, pr.sender, pr.writerUuid)
}

class JournalOverrideProvider(as: ExtendedActorSystem, config: Config, configPath: String) extends ReadJournalProvider {
  override def scaladslReadJournal() = new JournalOverride(as, config, configPath)
  override def javadslReadJournal() = null
}

object CassandraQueryJournalOverrideSpec {

  val config = ConfigFactory.parseString("""
      akka.persistence.cassandra.query {
        class = "akka.persistence.cassandra.query.JournalOverrideProvider"
      }
    """.stripMargin).withFallback(CassandraLifecycle.config)

}

class CassandraQueryJournalOverrideSpec extends CassandraSpec(CassandraQueryJournalOverrideSpec.config) {

  lazy val journal =
    PersistenceQuery(system).readJournalFor[JournalOverride](CassandraReadJournal.Identifier)

  "Cassandra query journal override" must {
    "map events" in {
      val pid = "p1"
      val p1 = system.actorOf(TestTaggingActor.props(pid))
      p1 ! "not a cat"
      expectMsg(Ack)

      val currentEvents = journal.currentEventsByPersistenceId(pid, 0, Long.MaxValue)
      val currentProbe = currentEvents.map(_.event.toString).runWith(TestSink.probe[String])
      currentProbe.request(2)
      currentProbe.expectNext("cat")
      currentProbe.expectComplete()

      val liveEvents = journal.eventsByPersistenceId(pid, 0, Long.MaxValue)
      val liveProbe = liveEvents.map(_.event.toString).runWith(TestSink.probe[String])
      liveProbe.request(2)
      liveProbe.expectNext("cat")
      liveProbe.expectNoMessage(100.millis)
      liveProbe.cancel()

      val internalEvents = journal.eventsByPersistenceIdWithControl(pid, 0, Long.MaxValue, None)
      val internalProbe = internalEvents.map(_.event.toString).runWith(TestSink.probe[String])
      internalProbe.request(2)
      internalProbe.expectNext("cat")
      liveProbe.expectNoMessage(100.millis)
      liveProbe.cancel()
    }
  }
} 
Example 12
Source File: SeqSerde.scala    From affinity   with Apache License 2.0 5 votes vote down vote up
package io.amient.affinity.core.serde.collection

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream}

import akka.actor.ExtendedActorSystem
import com.typesafe.config.Config
import io.amient.affinity.core.serde.{AbstractWrapSerde, Serde, Serdes}

class SeqSerde(serdes: Serdes) extends AbstractWrapSerde(serdes) with Serde[Seq[Any]] {

  def this(system: ExtendedActorSystem) = this(Serde.tools(system))
  def this(config: Config) = this(Serde.tools(config))

  override def identifier: Int = 141

  override def close(): Unit = ()

  override protected def fromBytes(bytes: Array[Byte]): Seq[Any] = {
    val di = new DataInputStream(new ByteArrayInputStream(bytes))
    val numItems = di.readInt()
    val result = ((1 to numItems) map { _ =>
      val len = di.readInt()
      val item = new Array[Byte](len)
      di.read(item)
      fromBinaryWrapped(item)
    }).toList
    di.close()
    result
  }

  override def toBytes(seq: Seq[Any]): Array[Byte] = {
    val os = new ByteArrayOutputStream()
    val d = new DataOutputStream(os)
    d.writeInt(seq.size)
    for (a: Any <- seq) a match {
      case ref: AnyRef =>
        val item = toBinaryWrapped(ref)
        d.writeInt(item.length)
        d.write(item)
    }
    os.close
    os.toByteArray
  }
} 
Example 13
Source File: DocSvr.scala    From Raphtory   with Apache License 2.0 5 votes vote down vote up
package com.raphtory.core.clustersetup

import akka.actor.ActorSystem
import akka.actor.Address
import akka.actor.ExtendedActorSystem
import akka.cluster.Cluster
import akka.cluster.Member
import akka.event.LoggingAdapter
import akka.management.cluster.bootstrap.ClusterBootstrap
import akka.management.javadsl.AkkaManagement
import com.raphtory.core.clustersetup.util.ConfigUtils._
import com.raphtory.core.utils.Utils
import com.typesafe.config.Config
import com.typesafe.config.ConfigFactory
import com.typesafe.config.ConfigValueFactory

import scala.collection.JavaConversions
import scala.collection.JavaConversions._

trait DocSvr {

  def seedLoc: String

  implicit val system: ActorSystem
  val docker = System.getenv().getOrDefault("DOCKER", "false").trim.toBoolean

  val clusterSystemName: String = Utils.clusterSystemName
  val ssn: String               = java.util.UUID.randomUUID.toString

  
  def printConfigInfo(config: Config, system: ActorSystem): Unit = {
    val log: LoggingAdapter = system.log

    val systemConfig: SystemConfig = config.parse()
    val bindAddress: SocketAddress = systemConfig.bindAddress
    val tcpAddress: SocketAddress  = systemConfig.tcpAddress

    log.info(s"Created ActorSystem with ID: $ssn")

    log.info(s"Binding ActorSystem internally to address ${bindAddress.host}:${bindAddress.port}")
    log.info(s"Binding ActorSystem externally to host ${tcpAddress.host}:${tcpAddress.port}")

    log.info(s"Registering the following seeds to ActorSystem: ${systemConfig.seeds}")
    log.info(s"Registering the following roles to ActorSystem: ${systemConfig.roles}")

    // FIXME: This is bit unorthodox ...
    val akkaSystemUrl: Address = system.asInstanceOf[ExtendedActorSystem].provider.getDefaultAddress
    log.info(s"ActorSystem successfully initialised at the following Akka URL: $akkaSystemUrl")
  }
} 
Example 14
Source File: ExternalAddressExt.scala    From ForestFlow   with Apache License 2.0 5 votes vote down vote up
package ai.forestflow.akka.extensions

import akka.actor.{ActorRef, ActorSystem, Address, ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider}

//noinspection TypeAnnotation
object ExternalAddress extends ExtensionId[ExternalAddressExt] with ExtensionIdProvider {
  //The lookup method is required by ExtensionIdProvider,
  // so we return ourselves here, this allows us
  // to configure our extension to be loaded when
  // the ActorSystem starts
  override def lookup = ExternalAddress

  //This method will be called by Akka
  // to instantiate our Extension
  override def createExtension(system: ExtendedActorSystem): ExternalAddressExt =
    new ExternalAddressExt(system)

  
  override def get(system: ActorSystem): ExternalAddressExt = super.get(system)
}
class ExternalAddressExt(system: ExtendedActorSystem) extends Extension {
  def addressForAkka: Address = system.provider.getDefaultAddress
  def akkaActorRefFromString(refString: String): ActorRef = system.provider.resolveActorRef(refString)

} 
Example 15
Source File: CrdtSerializers.scala    From cloudstate   with Apache License 2.0 5 votes vote down vote up
package io.cloudstate.proxy.crdt

import akka.actor.{AddressFromURIString, ExtendedActorSystem}
import akka.cluster.UniqueAddress
import akka.serialization.{BaseSerializer, SerializerWithStringManifest}
import com.google.protobuf.UnsafeByteOperations
import io.cloudstate.proxy.crdt.protobufs.{CrdtVote, CrdtVoteEntry}

class CrdtSerializers(override val system: ExtendedActorSystem)
    extends SerializerWithStringManifest
    with BaseSerializer {
  override def manifest(o: AnyRef): String = o match {
    case v: Vote => "V"
  }

  override def toBinary(o: AnyRef): Array[Byte] = o match {
    case v: Vote =>
      CrdtVote(v.state.toSeq.sortBy(_._1).map {
        case (address, value) =>
          CrdtVoteEntry(address.address.toString, address.longUid, UnsafeByteOperations.unsafeWrap(value.toByteArray))
      }).toByteArray
    case _ => throw new RuntimeException(s"Don't know how to serialize message of type [${o.getClass}]")
  }

  override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = manifest match {
    case "V" =>
      Vote(
        CrdtVote
          .parseFrom(bytes)
          .entries
          .map { entry =>
            (UniqueAddress(AddressFromURIString(entry.address), entry.uid), BigInt(entry.value.toByteArray))
          }
          .toMap,
        None
      )
    case _ => throw new RuntimeException(s"Don't know how to deserialize manifest [$manifest]")
  }
} 
Example 16
Source File: ProtobufAnySerializer.scala    From cloudstate   with Apache License 2.0 5 votes vote down vote up
package io.cloudstate.proxy

import akka.serialization.{BaseSerializer, SerializerWithStringManifest}
import akka.actor.ExtendedActorSystem
import com.google.protobuf.ByteString
import com.google.protobuf.any.{Any => pbAny}

final class ProtobufAnySerializer(override val system: ExtendedActorSystem)
    extends SerializerWithStringManifest
    with BaseSerializer {

  final override def manifest(o: AnyRef): String = o match {
    case any: pbAny => any.typeUrl
    case _ =>
      throw new IllegalArgumentException(s"$this only supports com.google.protobuf.any.Any, not ${o.getClass.getName}!")
  }

  final override def toBinary(o: AnyRef): Array[Byte] = o match {
    case any: pbAny => any.value.toByteArray
    case _ =>
      throw new IllegalArgumentException(s"$this only supports com.google.protobuf.any.Any, not ${o.getClass.getName}!")
  }

  final override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = manifest match {
    case null =>
      throw new IllegalArgumentException("null manifest detected instead of valid com.google.protobuf.any.Any.typeUrl")
    case typeUrl => pbAny(typeUrl, ByteString.copyFrom(bytes))
  }
} 
Example 17
Source File: Settings.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.storage.config

import java.nio.file.{Path, Paths}

import akka.actor.{ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider}
import akka.http.scaladsl.model.Uri
import ch.epfl.bluebrain.nexus.rdf.implicits._
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri
import scala.annotation.nowarn
import com.typesafe.config.Config
import pureconfig.generic.auto._
import pureconfig.ConvertHelpers._
import pureconfig._


@SuppressWarnings(Array("LooksLikeInterpolatedString", "OptionGet"))
class Settings(config: Config) extends Extension {

  @nowarn("cat=unused")
  val appConfig: AppConfig = {
    implicit val uriConverter: ConfigConvert[Uri]                 =
      ConfigConvert.viaString[Uri](catchReadError(s => Uri(s)), _.toString)
    implicit val pathConverter: ConfigConvert[Path]               =
      ConfigConvert.viaString[Path](catchReadError(s => Paths.get(s)), _.toString)
    implicit val absoluteIriConverter: ConfigConvert[AbsoluteIri] =
      ConfigConvert.viaString[AbsoluteIri](catchReadError(s => url"$s"), _.toString)
    ConfigSource.fromConfig(config).at("app").loadOrThrow[AppConfig]
  }

}

object Settings extends ExtensionId[Settings] with ExtensionIdProvider {

  override def lookup(): ExtensionId[_ <: Extension] = Settings

  override def createExtension(system: ExtendedActorSystem): Settings = apply(system.settings.config)

  def apply(config: Config): Settings = new Settings(config)
} 
Example 18
Source File: JWKSetSerializerSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.io

import akka.actor.{ActorSystem, ExtendedActorSystem}
import akka.serialization.Serialization
import akka.testkit.TestKit
import com.nimbusds.jose.jwk.JWKSet
import com.typesafe.config.ConfigFactory
import org.scalatest.TryValues
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class JWKSetSerializerSpec
    extends TestKit(ActorSystem("JWKSetSerializerSpec", ConfigFactory.load("test.conf")))
    with AnyWordSpecLike
    with Matchers
    with TryValues {

  private val serialization = new Serialization(system.asInstanceOf[ExtendedActorSystem])

  private val json =
    """
      |{
      |  "keys": [
      |    {
      |      "kid": "-JoF9COvvt7UhyhJMC-YlTF6piRlZgQKRQks5sPMKxw",
      |      "kty": "RSA",
      |      "alg": "RS256",
      |      "use": "sig",
      |      "n": "iEk11wBlv0I4pawBSY6ZYCLvwVslfCvjwvg5tIAg9n",
      |      "e": "AQAB"
      |    }
      |  ]
      |}
    """.stripMargin

  private val jwks = JWKSet.parse(json)

  "A JWKSetSerializer" should {

    "serialize and deserialize" in {
      val bytes = serialization.serialize(jwks).success.value
      val obj   = serialization.deserialize(bytes, classOf[JWKSet]).success.value
      jwks.toJSONObject shouldEqual obj.toJSONObject // JWKSet doesn't have a proper equals method
    }
  }
} 
Example 19
Source File: ResolverCacheSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.cache

import akka.actor.ExtendedActorSystem
import akka.serialization.Serialization
import akka.testkit._
import ch.epfl.bluebrain.nexus.commons.test.ActorSystemFixture
import ch.epfl.bluebrain.nexus.iam.types.Identity.Anonymous
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.config.KgConfig._
import ch.epfl.bluebrain.nexus.kg.resolve.Resolver._
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.{ProjectLabel, ProjectRef}
import ch.epfl.bluebrain.nexus.service.config.{ServiceConfig, Settings}
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.matchers.should.Matchers
import org.scalatest.{Inspectors, TryValues}

import scala.concurrent.duration._

//noinspection NameBooleanParameters
class ResolverCacheSpec
    extends ActorSystemFixture("ResolverCacheSpec", true)
    with Matchers
    with Inspectors
    with ScalaFutures
    with TryValues
    with TestHelper {

  implicit override def patienceConfig: PatienceConfig = PatienceConfig(3.seconds.dilated, 5.milliseconds)

  implicit private val appConfig: ServiceConfig = Settings(system).serviceConfig
  implicit private val keyValueStoreCfg         = appConfig.kg.keyValueStore.keyValueStoreConfig

  val ref1 = ProjectRef(genUUID)
  val ref2 = ProjectRef(genUUID)

  val label1 = ProjectLabel(genString(), genString())
  val label2 = ProjectLabel(genString(), genString())

  val resolver: InProjectResolver       = InProjectResolver(ref1, genIri, 1L, false, 10)
  val crossRefs: CrossProjectResolver   =
    CrossProjectResolver(Set(genIri), List(ref1, ref2), Set(Anonymous), ref1, genIri, 0L, false, 1)
  val crossLabels: CrossProjectResolver =
    CrossProjectResolver(Set(genIri), List(label1, label2), Set(Anonymous), ref1, genIri, 0L, false, 1)

  val resolverProj1: Set[InProjectResolver] = List.fill(5)(resolver.copy(id = genIri)).toSet
  val resolverProj2: Set[InProjectResolver] = List.fill(5)(resolver.copy(id = genIri, ref = ref2)).toSet

  private val cache = ResolverCache[Task]

  "ResolverCache" should {

    "index resolvers" in {
      val list = (resolverProj1 ++ resolverProj2).toList
      forAll(list) { resolver =>
        cache.put(resolver).runToFuture.futureValue
        cache.get(resolver.ref, resolver.id).runToFuture.futureValue shouldEqual Some(resolver)
      }
    }

    "list resolvers" in {
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs resolverProj1
      cache.get(ref2).runToFuture.futureValue should contain theSameElementsAs resolverProj2
    }

    "deprecate resolver" in {
      val resolver = resolverProj1.head
      cache.put(resolver.copy(deprecated = true, rev = 2L)).runToFuture.futureValue
      cache.get(resolver.ref, resolver.id).runToFuture.futureValue shouldEqual None
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs resolverProj1.filterNot(_ == resolver)
    }

    "serialize cross project resolver" when {
      val serialization = new Serialization(system.asInstanceOf[ExtendedActorSystem])
      "parameterized with ProjectRef" in {
        val bytes = serialization.serialize(crossRefs).success.value
        val out   = serialization.deserialize(bytes, classOf[CrossProjectResolver]).success.value
        out shouldEqual crossRefs
      }
      "parameterized with ProjectLabel" in {
        val bytes = serialization.serialize(crossLabels).success.value
        val out   = serialization.deserialize(bytes, classOf[CrossProjectResolver]).success.value
        out shouldEqual crossLabels
      }
    }
  }
} 
Example 20
Source File: Settings.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.service.config

import java.nio.file.{Path, Paths}

import akka.actor.{ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider}
import akka.http.scaladsl.model.Uri
import ch.epfl.bluebrain.nexus.iam.auth.AccessToken
import ch.epfl.bluebrain.nexus.iam.types.Permission
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri
import ch.epfl.bluebrain.nexus.rdf.implicits._
import com.typesafe.config.Config
import pureconfig.generic.auto._
import pureconfig.ConvertHelpers.{catchReadError, optF}
import pureconfig.{ConfigConvert, ConfigSource}

import scala.annotation.nowarn


@SuppressWarnings(Array("LooksLikeInterpolatedString"))
class Settings(config: Config) extends Extension {

  @nowarn("cat=unused")
  implicit private val uriConverter: ConfigConvert[Uri] =
    ConfigConvert.viaString[Uri](catchReadError(Uri(_)), _.toString)

  @nowarn("cat=unused")
  implicit private val permissionConverter: ConfigConvert[Permission] =
    ConfigConvert.viaString[Permission](optF(Permission(_)), _.toString)

  @nowarn("cat=unused")
  implicit val absoluteIriConverter: ConfigConvert[AbsoluteIri] =
    ConfigConvert.viaString[AbsoluteIri](catchReadError(s => url"$s"), _.toString)

  @nowarn("cat=unused")
  implicit private val pathConverter: ConfigConvert[Path] =
    ConfigConvert.viaString[Path](catchReadError(s => Paths.get(s)), _.toString)

  @nowarn("cat=unused")
  implicit private val authTokenConverter: ConfigConvert[AccessToken] =
    ConfigConvert.viaString[AccessToken](catchReadError(s => AccessToken(s)), _.value)

  val serviceConfig: ServiceConfig =
    ConfigSource.fromConfig(config).at("app").loadOrThrow[ServiceConfig]
}

object Settings extends ExtensionId[Settings] with ExtensionIdProvider {

  override def lookup(): ExtensionId[_ <: Extension] = Settings

  override def createExtension(system: ExtendedActorSystem): Settings = apply(system.settings.config)

  def apply(config: Config): Settings = new Settings(config)
} 
Example 21
Source File: CassandraHealth.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.service.routes

import akka.actor.{ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider}
import akka.event.Logging
import akka.stream.alpakka.cassandra.CassandraSessionSettings
import akka.stream.alpakka.cassandra.scaladsl.CassandraSessionRegistry
import ch.epfl.bluebrain.nexus.sourcing.projections.Projections._

import scala.concurrent.{ExecutionContext, Future}

trait CassandraHealth extends Extension {

  
  def check: Future[Boolean]
}

object CassandraHealth extends ExtensionId[CassandraHealth] with ExtensionIdProvider {

  override def lookup(): ExtensionId[_ <: Extension] = CassandraHealth

  override def createExtension(as: ExtendedActorSystem): CassandraHealth = {
    implicit val ec: ExecutionContext = as.dispatcher

    val log              = Logging(as, "CassandraHeathCheck")
    val keyspace: String = journalConfig(as).getString("keyspace")
    val session          = CassandraSessionRegistry.get(as).sessionFor(CassandraSessionSettings(cassandraDefaultConfigPath))

    new CassandraHealth {
      private val query = s"SELECT now() FROM $keyspace.messages;"

      override def check: Future[Boolean] = {
        session.selectOne(query).map(_ => true).recover {
          case err =>
            log.error("Error while attempting to query for health check", err)
            false
        }
      }
    }
  }
} 
Example 22
Source File: DefaultJournalCassandraSession.scala    From aecor   with MIT License 5 votes vote down vote up
package akka.persistence.cassandra

import akka.Done
import akka.actor.{ ActorSystem, ExtendedActorSystem }
import akka.event.Logging
import akka.persistence.cassandra.Session.Init
import akka.persistence.cassandra.session.CassandraSessionSettings
import akka.persistence.cassandra.session.scaladsl.CassandraSession
import cats.effect.{ ContextShift, Effect }
import cats.implicits._

object DefaultJournalCassandraSession {

  
  def apply[F[_]: ContextShift](
    system: ActorSystem,
    metricsCategory: String,
    init: Init[F],
    sessionProvider: Option[SessionProvider] = None
  )(implicit F: Effect[F]): F[CassandraSession] = F.delay {
    val log = Logging(system, classOf[CassandraSession])
    val provider = sessionProvider.getOrElse(
      SessionProvider(
        system.asInstanceOf[ExtendedActorSystem],
        system.settings.config.getConfig("cassandra-journal")
      )
    )
    val settings = CassandraSessionSettings(system.settings.config.getConfig("cassandra-journal"))
    new CassandraSession(system, provider, settings, system.dispatcher, log, metricsCategory, { x =>
      F.toIO(init(Session[F](x)).as(Done)).unsafeToFuture()
    })
  }
} 
Example 23
Source File: UserSerializer.scala    From whirlwind-tour-akka-typed   with Apache License 2.0 5 votes vote down vote up
package de.heikoseeberger.wtat

import akka.actor.ExtendedActorSystem
import akka.serialization.SerializerWithStringManifest
import de.heikoseeberger.wtat.proto.{ User => UserProto }
import java.io.NotSerializableException

final class UserSerializer(system: ExtendedActorSystem) extends SerializerWithStringManifest {

  override val identifier = 4242

  private final val UserManifest = "User"

  override def manifest(o: AnyRef) =
    o match {
      case _: User => UserManifest
      case _       => throw new IllegalArgumentException(s"Unknown class: ${o.getClass}!")
    }

  override def toBinary(o: AnyRef) =
    o match {
      case User(username, nickname) => UserProto(username.value, nickname.value).toByteArray
      case _                        => throw new IllegalArgumentException(s"Unknown class: ${o.getClass}!")
    }

  override def fromBinary(bytes: Array[Byte], manifest: String) = {
    def user(pb: UserProto) = User(pb.username, pb.nickname)
    manifest match {
      case UserManifest => user(UserProto.parseFrom(bytes))
      case _            => throw new NotSerializableException(s"Unknown manifest: $manifest!")
    }
  }
} 
Example 24
Source File: UserRepositorySerializer.scala    From whirlwind-tour-akka-typed   with Apache License 2.0 5 votes vote down vote up
package de.heikoseeberger.wtat

import akka.actor.ExtendedActorSystem
import akka.actor.typed.{ ActorRef, ActorRefResolver }
import akka.serialization.SerializerWithStringManifest
import de.heikoseeberger.wtat.proto.userrepository.{
  AddUser => AddUserProto,
  RemoveUser => RemoveUserProto,
  UserAdded => UserAddedProto,
  UserRemoved => UserRemovedProto,
  UsernameTaken => UsernameTakenProto,
  UsernameUnknown => UsernameUnknownProto
}
import de.heikoseeberger.wtat.proto.{ User => UserProto }
import java.io.NotSerializableException

final class UserRepositorySerializer(system: ExtendedActorSystem)
    extends SerializerWithStringManifest {
  import UserRepository._
  import akka.actor.typed.scaladsl.adapter._

  override val identifier = 4243

  private final val AddUserManifest         = "AddUser"
  private final val UsernameTakenManifest   = "UsernameTaken"
  private final val UserAddedManifest       = "UserAdded"
  private final val RemoveUserManifest      = "RemoveUser"
  private final val UsernameUnknownManifest = "UsernameUnknown"
  private final val UserRemovedManifest     = "UserRemoved"

  private val resolver = ActorRefResolver(system.toTyped)

  override def manifest(o: AnyRef) =
    o match {
      case serializable: Serializable =>
        serializable match {
          case _: AddUser         => AddUserManifest
          case _: UsernameTaken   => UsernameTakenManifest
          case _: UserAdded       => UserAddedManifest
          case _: RemoveUser      => RemoveUserManifest
          case _: UsernameUnknown => UsernameUnknownManifest
          case _: UserRemoved     => UserRemovedManifest
        }
      case _ => throw new IllegalArgumentException(s"Unknown class: ${o.getClass}!")
    }

  override def toBinary(o: AnyRef) = {
    def userProto(user: User)      = UserProto(user.username.value, user.nickname.value)
    def toBinary(ref: ActorRef[_]) = resolver.toSerializationFormat(ref)
    val proto =
      o match {
        case serializable: Serializable =>
          serializable match {
            case AddUser(user, replyTo)        => AddUserProto(Some(userProto(user)), toBinary(replyTo))
            case UsernameTaken(username)       => UsernameTakenProto(username)
            case UserAdded(user)               => UserAddedProto(Some(userProto(user)))
            case RemoveUser(username, replyTo) => RemoveUserProto(username, toBinary(replyTo))
            case UsernameUnknown(username)     => UsernameUnknownProto(username)
            case UserRemoved(username)         => UserRemovedProto(username)
          }
        case _ => throw new IllegalArgumentException(s"Unknown class: ${o.getClass}!")
      }
    proto.toByteArray
  }

  override def fromBinary(bytes: Array[Byte], manifest: String) = {
    def addUser(proto: AddUserProto)                 = AddUser(user(proto.user.get), fromBinary(proto.replyTo))
    def usernameTaken(proto: UsernameTakenProto)     = UsernameTaken(proto.username)
    def userAdded(proto: UserAddedProto)             = UserAdded(user(proto.user.get))
    def user(proto: UserProto)                       = User(proto.username, proto.nickname).valueOr(_.fail)
    def removeUser(proto: RemoveUserProto)           = RemoveUser(proto.username, fromBinary(proto.replyTo))
    def usernameUnknown(proto: UsernameUnknownProto) = UsernameUnknown(proto.username)
    def userRemoved(proto: UserRemovedProto)         = UserRemoved(proto.username)
    def fromBinary(ref: String)                      = resolver.resolveActorRef(ref)
    manifest match {
      case AddUserManifest         => addUser(AddUserProto.parseFrom(bytes))
      case UsernameTakenManifest   => usernameTaken(UsernameTakenProto.parseFrom(bytes))
      case UserAddedManifest       => userAdded(UserAddedProto.parseFrom(bytes))
      case RemoveUserManifest      => removeUser(RemoveUserProto.parseFrom(bytes))
      case UsernameUnknownManifest => usernameUnknown(UsernameUnknownProto.parseFrom(bytes))
      case UserRemovedManifest     => userRemoved(UserRemovedProto.parseFrom(bytes))
      case _                       => throw new NotSerializableException(s"Unknown manifest: $manifest!")
    }
  }
} 
Example 25
Source File: ReadJournalSource.scala    From apache-spark-test   with Apache License 2.0 5 votes vote down vote up
package akka.persistence.jdbc.spark.sql.execution.streaming

import akka.actor.{ ActorSystem, ExtendedActorSystem }
import akka.persistence.query.PersistenceQuery
import akka.persistence.query.scaladsl.{ CurrentEventsByPersistenceIdQuery, CurrentEventsByTagQuery, CurrentPersistenceIdsQuery, ReadJournal }
import akka.stream.scaladsl.Sink
import akka.stream.scaladsl.extension.{ Sink => Snk }
import akka.stream.{ ActorMaterializer, Materializer }
import org.apache.spark.sql._
import org.apache.spark.sql.execution.streaming.{ LongOffset, Offset, Source }
import org.apache.spark.sql.types.StructType

import scala.collection.immutable._
import scala.concurrent.duration.{ FiniteDuration, _ }
import scala.concurrent.{ Await, ExecutionContext, Future }

trait ReadJournalSource {
  _: Source =>
  def readJournalPluginId: String
  def sqlContext: SQLContext

  // some machinery
  implicit val system: ActorSystem = ActorSystem()
  implicit val mat: Materializer = ActorMaterializer()
  implicit val ec: ExecutionContext = system.dispatcher

  // read journal, only interested in the Current queries, as Spark isn't asynchronous
  lazy val readJournal = PersistenceQuery(system).readJournalFor(readJournalPluginId)
    .asInstanceOf[ReadJournal with CurrentPersistenceIdsQuery with CurrentEventsByPersistenceIdQuery with CurrentEventsByTagQuery]

  implicit class FutureOps[A](f: Future[A])(implicit ec: ExecutionContext, timeout: FiniteDuration = null) {
    def futureValue: A = Await.result(f, Option(timeout).getOrElse(10.seconds))
  }

  def maxPersistenceIds: Long =
    readJournal.currentPersistenceIds().runWith(Snk.count).futureValue

  def persistenceIds(start: Long, end: Long) =
    readJournal.currentPersistenceIds().drop(start).take(end).runWith(Sink.seq).futureValue

  def maxEventsByPersistenceId(pid: String): Long =
    readJournal.currentEventsByPersistenceId(pid, 0, Long.MaxValue).runWith(Snk.count).futureValue

  def eventsByPersistenceId(pid: String, start: Long, end: Long, eventMapperFQCN: String): Seq[Row] = {
    readJournal.currentEventsByPersistenceId(pid, start, end)
      .map(env => getMapper(eventMapperFQCN).get.row(env, sqlContext)).runWith(Sink.seq).futureValue
  }

  implicit def mapToDataFrame(rows: Seq[Row]): DataFrame = {
    import scala.collection.JavaConversions._
    sqlContext.createDataFrame(rows, schema)
  }

  def getStartEnd(_start: Option[Offset], _end: Offset): (Long, Long) = (_start, _end) match {
    case (Some(LongOffset(start)), LongOffset(end)) => (start, end)
    case (None, LongOffset(end))                    => (0L, end)
  }

  def getMapper(eventMapperFQCN: String): Option[EventMapper] =
    system.asInstanceOf[ExtendedActorSystem].dynamicAccess.createInstanceFor[EventMapper](eventMapperFQCN, List.empty)
      .recover { case cause => cause.printStackTrace(); null }.toOption

  override def stop(): Unit = {
    println("Stopping jdbc read journal")
    system.terminate()
  }
} 
Example 26
Source File: CoordinatorTests.scala    From sparkplug   with MIT License 5 votes vote down vote up
package springnz.sparkplug.client

import akka.actor.{ ExtendedActorSystem, ActorRef, ActorSystem }
import akka.pattern.ask
import akka.testkit.{ ImplicitSender, TestKit }
import akka.util.Timeout
import com.typesafe.config.ConfigFactory
import org.scalatest._
import springnz.sparkplug.executor.MessageTypes.{ JobFailure, JobRequest, JobSuccess, ShutDown }

import scala.concurrent.Await
import scala.concurrent.duration._

import scala.collection.JavaConverters._

class CoordinatorTests(_system: ActorSystem)
    extends TestKit(_system) with ImplicitSender with WordSpecLike with BeforeAndAfterAll with Matchers {

  def this() = this(ActorSystem(Constants.actorSystemName, ConfigFactory.parseMap(Map(
    "akka.remote.netty.tcp.port" -> new Integer(0)).asJava).withFallback(ClientExecutor.defaultClientAkkaConfig)))

  var coordinator: ActorRef = null

  "client coordinator" should {

    "successfuly execute a job request" in {
      val request = JobRequest("springnz.sparkplug.executor.LetterCountPlugin", None)
      coordinator ! request
      expectMsg[JobSuccess](30.seconds, JobSuccess(request, (2, 2)))
    }

    "successfuly execute a job request after a failure" in {
      val invalidRequest = JobRequest("springnz.sparkplug.executor.InvalidClass", None)
      coordinator ! invalidRequest
      expectMsgType[JobFailure](30.seconds)
      val goodRequest = JobRequest("springnz.sparkplug.executor.LetterCountPlugin", None)
      coordinator ! goodRequest
      expectMsg[JobSuccess](30.seconds, JobSuccess(goodRequest, (2, 2)))
    }

    "work with the ask pattern as well" in {
      implicit val timeout = Timeout(30.seconds)
      val request = JobRequest("springnz.sparkplug.executor.LetterCountPlugin", None)
      val replyFuture = coordinator ? request
      val result = Await.result(replyFuture, 30.seconds)
      result shouldBe JobSuccess(request, (2, 2))
    }

  }

  override def beforeAll {
    val configSection = s"sparkplug.${springnz.sparkplug.executor.Constants.defaultAkkaRemoteConfigSection}"
    val port = system.asInstanceOf[ExtendedActorSystem].provider.getDefaultAddress.port.get
    val akkaClientConfig = ConfigFactory.parseMap(Map(
      "akka.remote.netty.tcp.port" -> new Integer(port)).asJava).withFallback(ClientExecutor.defaultClientAkkaConfig)
    coordinator = system.actorOf(Coordinator.props(None,
      akkaRemoteConfig = Some(ConfigFactory.load.getConfig(configSection)),
      akkaClientConfig = akkaClientConfig), "TestCoordinator")
  }

  override def afterAll {
    system.actorSelection(s"/user/TestCoordinator") ! ShutDown
    TestKit.shutdownActorSystem(system, verifySystemShutdown = true)
  }

} 
Example 27
Source File: ResolverCacheSpec.scala    From nexus-kg   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.cache

import akka.actor.ExtendedActorSystem
import akka.serialization.Serialization
import akka.testkit._
import ch.epfl.bluebrain.nexus.commons.test.ActorSystemFixture
import ch.epfl.bluebrain.nexus.iam.client.types.Identity.Anonymous
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.config.AppConfig._
import ch.epfl.bluebrain.nexus.kg.config.{AppConfig, Settings}
import ch.epfl.bluebrain.nexus.kg.resolve.Resolver._
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.{ProjectLabel, ProjectRef}
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.matchers.should.Matchers
import org.scalatest.{Inspectors, TryValues}

import scala.concurrent.duration._

//noinspection NameBooleanParameters
class ResolverCacheSpec
    extends ActorSystemFixture("ResolverCacheSpec", true)
    with Matchers
    with Inspectors
    with ScalaFutures
    with TryValues
    with TestHelper {

  override implicit def patienceConfig: PatienceConfig = PatienceConfig(3.seconds.dilated, 5.milliseconds)

  private implicit val appConfig: AppConfig = Settings(system).appConfig

  val ref1 = ProjectRef(genUUID)
  val ref2 = ProjectRef(genUUID)

  val label1 = ProjectLabel(genString(), genString())
  val label2 = ProjectLabel(genString(), genString())

  val resolver: InProjectResolver = InProjectResolver(ref1, genIri, 1L, false, 10)
  val crossRefs: CrossProjectResolver =
    CrossProjectResolver(Set(genIri), List(ref1, ref2), Set(Anonymous), ref1, genIri, 0L, false, 1)
  val crossLabels: CrossProjectResolver =
    CrossProjectResolver(Set(genIri), List(label1, label2), Set(Anonymous), ref1, genIri, 0L, false, 1)

  val resolverProj1: Set[InProjectResolver] = List.fill(5)(resolver.copy(id = genIri)).toSet
  val resolverProj2: Set[InProjectResolver] = List.fill(5)(resolver.copy(id = genIri, ref = ref2)).toSet

  private val cache = ResolverCache[Task]

  "ResolverCache" should {

    "index resolvers" in {
      val list = (resolverProj1 ++ resolverProj2).toList
      forAll(list) { resolver =>
        cache.put(resolver).runToFuture.futureValue
        cache.get(resolver.ref, resolver.id).runToFuture.futureValue shouldEqual Some(resolver)
      }
    }

    "list resolvers" in {
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs resolverProj1
      cache.get(ref2).runToFuture.futureValue should contain theSameElementsAs resolverProj2
    }

    "deprecate resolver" in {
      val resolver = resolverProj1.head
      cache.put(resolver.copy(deprecated = true, rev = 2L)).runToFuture.futureValue
      cache.get(resolver.ref, resolver.id).runToFuture.futureValue shouldEqual None
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs resolverProj1.filterNot(_ == resolver)
    }

    "serialize cross project resolver" when {
      val serialization = new Serialization(system.asInstanceOf[ExtendedActorSystem])
      "parameterized with ProjectRef" in {
        val bytes = serialization.serialize(crossRefs).success.value
        val out   = serialization.deserialize(bytes, classOf[CrossProjectResolver]).success.value
        out shouldEqual crossRefs
      }
      "parameterized with ProjectLabel" in {
        val bytes = serialization.serialize(crossLabels).success.value
        val out   = serialization.deserialize(bytes, classOf[CrossProjectResolver]).success.value
        out shouldEqual crossLabels
      }
    }
  }
} 
Example 28
Source File: MassCore.scala    From fusion-data   with Apache License 2.0 5 votes vote down vote up
package mass.extension

import java.nio.file.{ Files, Path, Paths }

import akka.actor.ExtendedActorSystem
import akka.serialization.jackson.JacksonObjectMapperProvider
import com.fasterxml.jackson.databind.ObjectMapper
import com.typesafe.scalalogging.StrictLogging
import fusion.common.extension.{ FusionExtension, FusionExtensionId }
import fusion.core.extension.FusionCore
import mass.MassSettings
import mass.core.Constants


final class MassCore private (override val classicSystem: ExtendedActorSystem)
    extends FusionExtension
    with StrictLogging {
  FusionCore(classicSystem)
  val settings: MassSettings = MassSettings(classicSystem.settings.config)
  val jsonMapper: ObjectMapper = JacksonObjectMapperProvider(classicSystem).getOrCreate(Constants.JACKSON_JSON, None)
  val cborMapper: ObjectMapper = JacksonObjectMapperProvider(classicSystem).getOrCreate(Constants.JACKSON_CBOR, None)
  val tempDirectory: Path = {
    val _tempDirectory = Paths.get(
      configuration.getOrElse[String](s"${Constants.BASE_CONF}.core.temp-dir", System.getProperty("java.io.tmpdir")))
    if (!Files.isDirectory(_tempDirectory)) {
      Files.createDirectories(_tempDirectory)
    }
    _tempDirectory
  }

  logger.info(configuration.getConfig(Constants.BASE_CONF).toString)

  def name: String = configuration.getString(s"${Constants.BASE_CONF}.name")

  override def toString = s"MassCore($classicSystem)"
}

object MassCore extends FusionExtensionId[MassCore] {
  override def createExtension(system: ExtendedActorSystem): MassCore = new MassCore(system)
} 
Example 29
Source File: RdpSystem.scala    From fusion-data   with Apache License 2.0 5 votes vote down vote up
package mass.rdp

import akka.actor.ExtendedActorSystem
import akka.stream.Materializer
import com.typesafe.scalalogging.StrictLogging
import fusion.common.extension.{ FusionExtension, FusionExtensionId }
import mass.connector.ConnectorSystem
import mass.core.Constants
import mass.extension.MassCore
import mass.rdp.etl.graph.{ EtlGraphParserFactory, EtlStreamFactory }
import mass.rdp.module.RdpModule

import scala.util.{ Failure, Success }

trait RdpRefFactory {
  def settings: MassCore

  def connectorSystem: ConnectorSystem
}

private[rdp] class RdpSetup(val system: ExtendedActorSystem) extends StrictLogging {
  val massCore = MassCore(system)

  val extensions: Vector[RdpModule] =
    massCore.configuration
      .get[Seq[String]](s"${Constants.BASE_CONF}.rdp.extensions")
      .flatMap { className =>
        system.dynamicAccess.createInstanceFor[RdpModule](className, Nil) match {
          case Success(v) => Some(v)
          case Failure(e) =>
            logger.warn(s"初始化找到未知RdpExtension", e)
            None
        }
      }
      .toVector

  def initialStreamFactories(): Map[String, EtlStreamFactory] = {
    val list = extensions.flatMap(_.etlStreamBuilders) ++
      massCore.configuration.get[Seq[String]](s"${Constants.BASE_CONF}.rdp.stream-builders").flatMap { className =>
        system.dynamicAccess.createInstanceFor[EtlStreamFactory](className, Nil) match {
          case Success(v) => Some(v)
          case Failure(e) =>
            logger.warn(s"初始化找到未知EtlStreamBuilder", e)
            None
        }
      }
    list.map(v => v.`type` -> v).toMap
  }

  def initialGraphParserFactories(): Map[String, EtlGraphParserFactory] =
    extensions.flatMap(_.graphParserFactories).map(v => v.`type` -> v).toMap
}


final class RdpSystem private (override val classicSystem: ExtendedActorSystem)
    extends RdpRefFactory
    with FusionExtension
    with StrictLogging {
  override val connectorSystem: ConnectorSystem = ConnectorSystem(classicSystem)
  implicit val materializer: Materializer = Materializer.matFromSystem(classicSystem)

  private val setup = new RdpSetup(classicSystem)

  protected var _streamFactories: Map[String, EtlStreamFactory] = setup.initialStreamFactories()

  protected var _graphParerFactories: Map[String, EtlGraphParserFactory] = setup.initialGraphParserFactories()

  def streamFactories: Map[String, EtlStreamFactory] = _streamFactories

  def registerSourceBuilder(b: EtlStreamFactory): Unit = {
    logger.info(s"注册EtlSourceBuilder: $b")
    _streamFactories = _streamFactories.updated(b.`type`, b)
  }

  def graphParserFactories: Map[String, EtlGraphParserFactory] =
    _graphParerFactories

  def registerGraphParserFactories(b: EtlGraphParserFactory): Unit = {
    logger.info(s"注册EtlGraphParserFactor: $b")
    _graphParerFactories = _graphParerFactories.updated(b.`type`, b)
  }
  override def settings: MassCore = setup.massCore
  def name: String = classicSystem.name
}

object RdpSystem extends FusionExtensionId[RdpSystem] {
  override def createExtension(system: ExtendedActorSystem): RdpSystem = new RdpSystem(system)
} 
Example 30
Source File: ConnectorSystem.scala    From fusion-data   with Apache License 2.0 5 votes vote down vote up
package mass.connector

import java.nio.file.Path

import akka.Done
import akka.actor.ExtendedActorSystem
import com.typesafe.scalalogging.StrictLogging
import fusion.common.extension.{ FusionExtension, FusionExtensionId }
import fusion.core.extension.FusionCore
import mass.core.Constants

import scala.concurrent.Future
import scala.util.{ Failure, Success }

final class ConnectorSystem private (override val classicSystem: ExtendedActorSystem)
    extends FusionExtension
    with StrictLogging {
  private var _parsers = Map.empty[String, ConnectorParser]
  private var _connectors = Map.empty[String, Connector]

  init()

  private def init(): Unit = {
    configuration.get[Seq[String]](s"${Constants.BASE_CONF}.connector.parsers").foreach { className =>
      classicSystem.dynamicAccess.createInstanceFor[ConnectorParser](className, Nil) match {
        case Success(parse) => registerConnectorParser(parse)
        case Failure(e)     => logger.error(s"未知的ConnectorParse", e)
      }
    }
    FusionCore(classicSystem).shutdowns.serviceUnbind("ConnectorSystem") { () =>
      Future {
        connectors.foreach { case (_, c) => c.close() }
        Done
      }(classicSystem.dispatcher)
    }
  }

  def name: String = classicSystem.name

  def getConnector(name: String): Option[Connector] = _connectors.get(name)

  def connectors: Map[String, Connector] = _connectors

  def registerConnector(c: Connector): Map[String, Connector] = {
    _connectors = _connectors.updated(c.name, c)
    _connectors
  }

  def parsers: Map[String, ConnectorParser] = _parsers

  def registerConnectorParser(parse: ConnectorParser): Map[String, ConnectorParser] = {
    _parsers = _parsers.updated(parse.`type`, parse)
    logger.info(s"注册Connector解析器:$parse,当前数量:${parsers.size}")
    parsers
  }

  def fromFile(path: Path): Option[Connector] = ???

  def fromXML(node: scala.xml.Node): Option[Connector] = {
    import mass.core.XmlUtils.XmlRich
    val maybeParser = parsers.get(node.attr("type"))
    maybeParser.map(cp => cp.parseFromXML(node))
  }
}

object ConnectorSystem extends FusionExtensionId[ConnectorSystem] {
  override def createExtension(system: ExtendedActorSystem): ConnectorSystem = new ConnectorSystem(system)
} 
Example 31
Source File: SSLContextProvider.scala    From service-container   with Apache License 2.0 5 votes vote down vote up
package com.github.vonnagy.service.container.security

import javax.net.ssl.SSLContext

import akka.actor.{ActorSystem, ExtendedActorSystem}
import com.github.vonnagy.service.container.log.LoggingAdapter
import com.typesafe.sslconfig.akka.AkkaSSLConfig
import com.typesafe.sslconfig.akka.util.AkkaLoggerFactory
import com.typesafe.sslconfig.ssl.{ConfigSSLContextBuilder, SSLConfigFactory}

trait SSLContextProvider extends LoggingAdapter {

  // The actor system
  implicit def system: ActorSystem
  // The namespace of the SSL configuration
  implicit  def configNamespace: String
  // Is this a client or server SSL configuration
  def isClient: Boolean

  lazy val sslConfig = new AkkaSSLConfig(system.asInstanceOf[ExtendedActorSystem], {
    val containerOverrides = system.settings.config.getConfig(configNamespace)
    val akkaOverrides = system.settings.config.getConfig("akka.ssl-config")
    val defaults = system.settings.config.getConfig("ssl-config")
    SSLConfigFactory.parse(containerOverrides withFallback akkaOverrides withFallback defaults)
  })

  implicit def sslContext = if (sslConfig.config.default) {
    log.info("ssl.default is true, using the JDK's default SSLContext")
    sslConfig.validateDefaultTrustManager(sslConfig.config)
    SSLContext.getDefault
  } else {
    // break out the static methods as much as we can...
    val keyManagerFactory = sslConfig.buildKeyManagerFactory( sslConfig.config)
    val trustManagerFactory = sslConfig.buildTrustManagerFactory( sslConfig.config)
    new ConfigSSLContextBuilder(new AkkaLoggerFactory(system), sslConfig.config, keyManagerFactory, trustManagerFactory).build()
  }
}

trait SSLServerContextProvider extends SSLContextProvider {

  def isClient = false

}

trait SSLClientContextProvider extends SSLContextProvider {

  def isClient = true

} 
Example 32
Source File: BankAccountEventJSONSerializer.scala    From akka-ddd-cqrs-es-example   with MIT License 5 votes vote down vote up
package com.github.j5ik2o.bank.adaptor.serialization

import akka.actor.ExtendedActorSystem
import akka.event.{ Logging, LoggingAdapter }
import akka.serialization.SerializerWithStringManifest
import com.github.j5ik2o.bank.domain.model._
import org.slf4j.LoggerFactory
import pureconfig._

object BankAccountEventJSONManifest {
  final val CREATE   = BankAccountOpened.getClass.getName.stripSuffix("$")
  final val UPDATE   = BankAccountEventUpdated.getClass.getName.stripSuffix("$")
  final val DEPOSIT  = BankAccountDeposited.getClass.getName.stripSuffix("$")
  final val WITHDRAW = BankAccountWithdrawn.getClass.getName.stripSuffix("$")
  final val DESTROY  = BankAccountClosed.getClass.getName.stripSuffix("$")
}

class BankAccountEventJSONSerializer(system: ExtendedActorSystem) extends SerializerWithStringManifest {
  import BankAccountCreatedJson._
  import BankAccountEventJSONManifest._
  import io.circe.generic.auto._

  private val logger = LoggerFactory.getLogger(getClass)

  private val config = loadConfigOrThrow[BankAccountEventJSONSerializerConfig](
    system.settings.config.getConfig("bank.interface.bank-account-event-json-serializer")
  )

  private implicit val log: LoggingAdapter = Logging.getLogger(system, getClass)

  private val isDebugEnabled = config.isDebuged

  override def identifier: Int = 50

  override def manifest(o: AnyRef): String = {
    val result = o.getClass.getName
    logger.debug(s"manifest: $result")
    result
  }

  override def toBinary(o: AnyRef): Array[Byte] = o match {
    case orig: BankAccountOpened       => CirceJsonSerialization.toBinary(orig, isDebugEnabled)
    case orig: BankAccountEventUpdated => CirceJsonSerialization.toBinary(orig, isDebugEnabled)
    case orig: BankAccountDeposited    => CirceJsonSerialization.toBinary(orig, isDebugEnabled)
    case orig: BankAccountWithdrawn    => CirceJsonSerialization.toBinary(orig, isDebugEnabled)
    case orig: BankAccountClosed       => CirceJsonSerialization.toBinary(orig, isDebugEnabled)
  }

  override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = {
    logger.debug(s"fromBinary: $manifest")
    manifest match {
      case CREATE =>
        CirceJsonSerialization.fromBinary[BankAccountOpened, BankAccountCreatedJson](bytes, isDebugEnabled)
      case UPDATE =>
        CirceJsonSerialization.fromBinary[BankAccountEventUpdated, BankAccountUpdatedJson](bytes, isDebugEnabled)
      case DEPOSIT =>
        CirceJsonSerialization.fromBinary[BankAccountDeposited, BankAccountDepositedJson](bytes, isDebugEnabled)
      case WITHDRAW =>
        CirceJsonSerialization.fromBinary[BankAccountWithdrawn, BankAccountWithdrawedJson](bytes, isDebugEnabled)
      case DESTROY =>
        CirceJsonSerialization.fromBinary[BankAccountClosed, BankAccountDestroyedJson](bytes, isDebugEnabled)
    }
  }
} 
Example 33
Source File: EventSerializer.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.io

import java.nio.charset.Charset

import akka.actor.ExtendedActorSystem
import akka.serialization.SerializerWithStringManifest
import ch.epfl.bluebrain.nexus.iam.acls.AclEvent
import ch.epfl.bluebrain.nexus.iam.permissions.PermissionsEvent
import ch.epfl.bluebrain.nexus.iam.realms.RealmEvent
import ch.epfl.bluebrain.nexus.iam.types.GrantType.Camel._
import ch.epfl.bluebrain.nexus.rdf.Iri.Url
import ch.epfl.bluebrain.nexus.rdf.implicits._
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig.HttpConfig
import ch.epfl.bluebrain.nexus.service.config.Settings
import io.circe.generic.extras.Configuration
import io.circe.generic.extras.semiauto._
import io.circe.parser._
import io.circe.syntax._
import io.circe.{Decoder, Encoder, Printer}


class EventSerializer(system: ExtendedActorSystem) extends SerializerWithStringManifest {
  private val utf8 = Charset.forName("UTF-8")

  private val printer = Printer.noSpaces.copy(dropNullValues = true)

  implicit private[io] val http: HttpConfig = Settings(system).serviceConfig.http

  implicit private[io] val config: Configuration = Configuration.default.withDiscriminator("@type")

  implicit private[io] val urlEncoder: Encoder[Url] =
    Encoder.encodeString.contramap(_.asUri)
  implicit private[io] val urlDecoder: Decoder[Url] =
    Decoder.decodeString.emap(Url.apply)

  implicit private[io] val permissionEventEncoder: Encoder[PermissionsEvent] = deriveConfiguredEncoder[PermissionsEvent]
  implicit private[io] val permissionEventDecoder: Decoder[PermissionsEvent] = deriveConfiguredDecoder[PermissionsEvent]
  implicit private[io] val aclEventEncoder: Encoder[AclEvent]                = deriveConfiguredEncoder[AclEvent]
  implicit private[io] val aclEventDecoder: Decoder[AclEvent]                = deriveConfiguredDecoder[AclEvent]
  implicit private[io] val realmEventEncoder: Encoder[RealmEvent]            = deriveConfiguredEncoder[RealmEvent]
  implicit private[io] val realmEventDecoder: Decoder[RealmEvent]            = deriveConfiguredDecoder[RealmEvent]

  override val identifier: Int = 1225

  override def manifest(o: AnyRef): String                              =
    o match {
      case _: PermissionsEvent => "permissions-event"
      case _: AclEvent         => "acl-event"
      case _: RealmEvent       => "realm-event"
      case other               =>
        throw new IllegalArgumentException(
          s"Cannot determine manifest for unknown type: '${other.getClass.getCanonicalName}'"
        )
    }
  override def toBinary(o: AnyRef): Array[Byte]                         =
    o match {
      case ev: PermissionsEvent => ev.asJson.printWith(printer).getBytes(utf8)
      case ev: AclEvent         => ev.asJson.printWith(printer).getBytes(utf8)
      case ev: RealmEvent       => ev.asJson.printWith(printer).getBytes(utf8)
      case other                =>
        throw new IllegalArgumentException(s"Cannot serialize unknown type: '${other.getClass.getCanonicalName}'")
    }
  override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef =
    manifest match {
      case "permissions-event" =>
        val str = new String(bytes, utf8)
        decode[PermissionsEvent](str)
          .getOrElse(throw new IllegalArgumentException(s"Cannot deserialize value: '$str' to 'PermissionsEvent'"))
      case "acl-event"         =>
        val str = new String(bytes, utf8)
        decode[AclEvent](str)
          .getOrElse(throw new IllegalArgumentException(s"Cannot deserialize value: '$str' to 'AclEvent'"))
      case "realm-event"       =>
        val str = new String(bytes, utf8)
        decode[RealmEvent](str)
          .getOrElse(throw new IllegalArgumentException(s"Cannot deserialize value: '$str' to 'RealmEvent'"))
      case other               =>
        throw new IllegalArgumentException(s"Cannot deserialize type with unknown manifest: '$other'")
    }
} 
Example 34
Source File: PersistentRepr.scala    From aecor   with MIT License 5 votes vote down vote up
package aecor.runtime.akkapersistence.serialization

import akka.actor.ExtendedActorSystem
import akka.serialization.{ BaseSerializer, SerializerWithStringManifest }

final case class PersistentRepr(manifest: String, payload: Array[Byte])

class PersistentReprSerializer(val system: ExtendedActorSystem)
    extends SerializerWithStringManifest
    with BaseSerializer {

  override def toBinary(o: AnyRef): Array[Byte] = o match {
    case pr: PersistentRepr => pr.payload
    case x => throw new IllegalArgumentException(s"Serialization of [$x] is not supported")
  }

  override def manifest(o: AnyRef): String = o match {
    case pr: PersistentRepr => pr.manifest
    case x => throw new IllegalArgumentException(s"Serialization of [$x] is not supported")
  }

  override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef =
    PersistentRepr(manifest, bytes)
} 
Example 35
Source File: MessageSerializer.scala    From aecor   with MIT License 5 votes vote down vote up
package aecor.runtime.akkapersistence.serialization

import aecor.runtime.akkapersistence.AkkaPersistenceRuntime.EntityCommand
import aecor.runtime.akkapersistence.AkkaPersistenceRuntimeActor.{ CommandResult, HandleCommand }
import akka.actor.ExtendedActorSystem
import akka.serialization.{ BaseSerializer, SerializerWithStringManifest }
import com.google.protobuf.ByteString
import scodec.bits.BitVector

import scala.collection.immutable._

class MessageSerializer(val system: ExtendedActorSystem)
    extends SerializerWithStringManifest
    with BaseSerializer {

  val HandleCommandManifest = "A"
  val EntityCommandManifest = "B"
  val CommandResultManifest = "C"

  private val fromBinaryMap =
    HashMap[String, Array[Byte] => AnyRef](
      HandleCommandManifest -> handleCommandFromBinary,
      EntityCommandManifest -> entityCommandFromBinary,
      CommandResultManifest -> commandResultFromBinary
    )

  override def manifest(o: AnyRef): String = o match {
    case HandleCommand(_)    => HandleCommandManifest
    case EntityCommand(_, _) => EntityCommandManifest
    case CommandResult(_)    => CommandResultManifest
    case x                   => throw new IllegalArgumentException(s"Serialization of [$x] is not supported")
  }

  override def toBinary(o: AnyRef): Array[Byte] = o match {
    case x @ HandleCommand(_) =>
      x.commandBytes.toByteArray
    case _ @CommandResult(resultBytes) =>
      resultBytes.toByteArray
    case x @ EntityCommand(_, _) =>
      entityCommandToBinary(x)
    case x => throw new IllegalArgumentException(s"Serialization of [$x] is not supported")
  }

  override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef =
    fromBinaryMap.get(manifest) match {
      case Some(f) => f(bytes)
      case other   => throw new IllegalArgumentException(s"Unknown manifest [$other]")
    }

  private def entityCommandToBinary(a: EntityCommand): Array[Byte] =
    msg.EntityCommand(a.entityKey, ByteString.copyFrom(a.commandBytes.toByteBuffer)).toByteArray

  private def entityCommandFromBinary(bytes: Array[Byte]): EntityCommand =
    msg.EntityCommand.parseFrom(bytes) match {
      case msg.EntityCommand(entityId, commandBytes) =>
        EntityCommand(entityId, BitVector(commandBytes.asReadOnlyByteBuffer))
    }

  private def handleCommandFromBinary(bytes: Array[Byte]): HandleCommand =
    HandleCommand(BitVector(bytes))

  private def commandResultFromBinary(bytes: Array[Byte]): CommandResult =
    CommandResult(BitVector(bytes))

} 
Example 36
Source File: MessageSerializer.scala    From aecor   with MIT License 5 votes vote down vote up
package aecor.runtime.akkageneric.serialization

import aecor.runtime.akkageneric.GenericAkkaRuntime.KeyedCommand
import aecor.runtime.akkageneric.GenericAkkaRuntimeActor.{ Command, CommandResult }
import akka.actor.ExtendedActorSystem
import akka.serialization.{ BaseSerializer, SerializerWithStringManifest }
import com.google.protobuf.ByteString
import scodec.bits.BitVector

import scala.collection.immutable.HashMap

class MessageSerializer(val system: ExtendedActorSystem)
    extends SerializerWithStringManifest
    with BaseSerializer {

  val KeyedCommandManifest = "A"
  val CommandManifest = "B"
  val CommandResultManifest = "C"

  private val fromBinaryMap =
    HashMap[String, Array[Byte] => AnyRef](
      KeyedCommandManifest -> keyedCommandFromBinary,
      CommandManifest -> commandFromBinary,
      CommandResultManifest -> commandResultFromBinary
    )

  override def manifest(o: AnyRef): String = o match {
    case KeyedCommand(_, _) => KeyedCommandManifest
    case Command(_)         => CommandManifest
    case CommandResult(_)   => CommandResultManifest
    case x                  => throw new IllegalArgumentException(s"Serialization of [$x] is not supported")
  }

  override def toBinary(o: AnyRef): Array[Byte] = o match {
    case Command(bytes) =>
      bytes.toByteArray
    case CommandResult(bytes) =>
      bytes.toByteArray
    case x @ KeyedCommand(_, _) =>
      entityCommandToBinary(x)
    case x => throw new IllegalArgumentException(s"Serialization of [$x] is not supported")
  }

  override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef =
    fromBinaryMap.get(manifest) match {
      case Some(f) => f(bytes)
      case other   => throw new IllegalArgumentException(s"Unknown manifest [$other]")
    }

  private def entityCommandToBinary(a: KeyedCommand): Array[Byte] =
    msg.KeyedCommand(a.key, ByteString.copyFrom(a.bytes.toByteBuffer)).toByteArray

  private def keyedCommandFromBinary(bytes: Array[Byte]): KeyedCommand =
    msg.KeyedCommand.parseFrom(bytes) match {
      case msg.KeyedCommand(key, commandBytes) =>
        KeyedCommand(key, BitVector(commandBytes.asReadOnlyByteBuffer()))
    }

  private def commandFromBinary(bytes: Array[Byte]): Command =
    Command(BitVector(bytes))

  private def commandResultFromBinary(bytes: Array[Byte]): CommandResult =
    CommandResult(BitVector(bytes))
} 
Example 37
Source File: MessageSerializer.scala    From aecor   with MIT License 5 votes vote down vote up
package aecor.distributedprocessing.serialization

import aecor.distributedprocessing.DistributedProcessingWorker.KeepRunning
import akka.actor.ExtendedActorSystem
import akka.serialization.{ BaseSerializer, SerializerWithStringManifest }

class MessageSerializer(val system: ExtendedActorSystem)
    extends SerializerWithStringManifest
    with BaseSerializer {
  val KeepRunningManifest = "A"
  override def manifest(o: AnyRef): String = o match {
    case KeepRunning(_) => KeepRunningManifest
    case x => throw new IllegalArgumentException(s"Serialization of [$x] is not supported")
  }

  override def toBinary(o: AnyRef): Array[Byte] = o match {
    case KeepRunning(workerId) => msg.KeepRunning(workerId).toByteArray
    case x => throw new IllegalArgumentException(s"Serialization of [$x] is not supported")
  }

  override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef =
    manifest match {
      case KeepRunningManifest =>
        KeepRunning(msg.KeepRunning.parseFrom(bytes).workerId)
      case other => throw new IllegalArgumentException(s"Unknown manifest [$other]")
    }
} 
Example 38
Source File: JWKSetSerializerSpec.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.io

import akka.actor.{ActorSystem, ExtendedActorSystem}
import akka.serialization.Serialization
import akka.testkit.TestKit
import com.nimbusds.jose.jwk.JWKSet
import com.typesafe.config.ConfigFactory
import org.scalatest.TryValues
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class JWKSetSerializerSpec
    extends TestKit(ActorSystem("JWKSetSerializerSpec", ConfigFactory.load("akka-test.conf")))
    with AnyWordSpecLike
    with Matchers
    with TryValues {

  private val serialization = new Serialization(system.asInstanceOf[ExtendedActorSystem])

  private val json =
    """
      |{
      |  "keys": [
      |    {
      |      "kid": "-JoF9COvvt7UhyhJMC-YlTF6piRlZgQKRQks5sPMKxw",
      |      "kty": "RSA",
      |      "alg": "RS256",
      |      "use": "sig",
      |      "n": "iEk11wBlv0I4pawBSY6ZYCLvwVslfCvjwvg5tIAg9n",
      |      "e": "AQAB"
      |    }
      |  ]
      |}
    """.stripMargin

  private val jwks = JWKSet.parse(json)

  "A JWKSetSerializer" should {

    "serialize and deserialize" in {
      val bytes = serialization.serialize(jwks).success.value
      val obj   = serialization.deserialize(bytes, classOf[JWKSet]).success.value
      jwks.toJSONObject shouldEqual obj.toJSONObject // JWKSet doesn't have a proper equals method
    }
  }
} 
Example 39
Source File: EventSerializer.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.io

import java.nio.charset.Charset

import akka.actor.ExtendedActorSystem
import akka.serialization.SerializerWithStringManifest
import ch.epfl.bluebrain.nexus.iam.acls.AclEvent
import ch.epfl.bluebrain.nexus.iam.config.AppConfig.HttpConfig
import ch.epfl.bluebrain.nexus.iam.config.Settings
import ch.epfl.bluebrain.nexus.iam.permissions.PermissionsEvent
import ch.epfl.bluebrain.nexus.iam.realms.RealmEvent
import ch.epfl.bluebrain.nexus.iam.types.GrantType.Camel._
import ch.epfl.bluebrain.nexus.rdf.Iri.Url
import ch.epfl.bluebrain.nexus.rdf.implicits._
import io.circe.generic.extras.Configuration
import io.circe.generic.extras.semiauto._
import io.circe.parser._
import io.circe.syntax._
import io.circe.{Decoder, Encoder, Printer}


class EventSerializer(system: ExtendedActorSystem) extends SerializerWithStringManifest {
  private val utf8 = Charset.forName("UTF-8")

  private val printer = Printer.noSpaces.copy(dropNullValues = true)

  private[io] implicit val http: HttpConfig = Settings(system).appConfig.http

  private[io] implicit val config: Configuration = Configuration.default.withDiscriminator("@type")

  private[io] implicit val urlEncoder: Encoder[Url] =
    Encoder.encodeString.contramap(_.asUri)
  private[io] implicit val urlDecoder: Decoder[Url] =
    Decoder.decodeString.emap(Url.apply)

  private[io] implicit val permissionEventEncoder: Encoder[PermissionsEvent] = deriveConfiguredEncoder[PermissionsEvent]
  private[io] implicit val permissionEventDecoder: Decoder[PermissionsEvent] = deriveConfiguredDecoder[PermissionsEvent]
  private[io] implicit val aclEventEncoder: Encoder[AclEvent]                = deriveConfiguredEncoder[AclEvent]
  private[io] implicit val aclEventDecoder: Decoder[AclEvent]                = deriveConfiguredDecoder[AclEvent]
  private[io] implicit val realmEventEncoder: Encoder[RealmEvent]            = deriveConfiguredEncoder[RealmEvent]
  private[io] implicit val realmEventDecoder: Decoder[RealmEvent]            = deriveConfiguredDecoder[RealmEvent]

  override val identifier: Int = 1225

  override def manifest(o: AnyRef): String = o match {
    case _: PermissionsEvent => "permissions-event"
    case _: AclEvent         => "acl-event"
    case _: RealmEvent       => "realm-event"
    case other =>
      throw new IllegalArgumentException(
        s"Cannot determine manifest for unknown type: '${other.getClass.getCanonicalName}'"
      )
  }
  override def toBinary(o: AnyRef): Array[Byte] = o match {
    case ev: PermissionsEvent => ev.asJson.printWith(printer).getBytes(utf8)
    case ev: AclEvent         => ev.asJson.printWith(printer).getBytes(utf8)
    case ev: RealmEvent       => ev.asJson.printWith(printer).getBytes(utf8)
    case other =>
      throw new IllegalArgumentException(s"Cannot serialize unknown type: '${other.getClass.getCanonicalName}'")
  }
  override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = manifest match {
    case "permissions-event" =>
      val str = new String(bytes, utf8)
      decode[PermissionsEvent](str)
        .getOrElse(throw new IllegalArgumentException(s"Cannot deserialize value: '$str' to 'PermissionsEvent'"))
    case "acl-event" =>
      val str = new String(bytes, utf8)
      decode[AclEvent](str)
        .getOrElse(throw new IllegalArgumentException(s"Cannot deserialize value: '$str' to 'AclEvent'"))
    case "realm-event" =>
      val str = new String(bytes, utf8)
      decode[RealmEvent](str)
        .getOrElse(throw new IllegalArgumentException(s"Cannot deserialize value: '$str' to 'RealmEvent'"))
    case other =>
      throw new IllegalArgumentException(s"Cannot deserialize type with unknown manifest: '$other'")
  }
} 
Example 40
Source File: Settings.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.config

import akka.actor.{ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider}
import akka.http.scaladsl.model.Uri
import ch.epfl.bluebrain.nexus.iam.types.Permission
import com.github.ghik.silencer.silent
import com.typesafe.config.Config
import pureconfig.generic.auto._
import pureconfig.ConvertHelpers._
import pureconfig._


@SuppressWarnings(Array("LooksLikeInterpolatedString"))
class Settings(config: Config) extends Extension {

  @silent // not recognized as used... but it is below
  private implicit val uriConverter: ConfigConvert[Uri] =
    ConfigConvert.viaString[Uri](catchReadError(Uri(_)), _.toString)

  @silent // not recognized as used... but it is below
  private implicit val permissionConverter: ConfigConvert[Permission] =
    ConfigConvert.viaString[Permission](optF(Permission(_)), _.toString)

  val appConfig: AppConfig =
    ConfigSource.fromConfig(config).at("app").loadOrThrow[AppConfig]
}

object Settings extends ExtensionId[Settings] with ExtensionIdProvider {

  override def lookup(): ExtensionId[_ <: Extension] = Settings

  override def createExtension(system: ExtendedActorSystem): Settings = apply(system.settings.config)

  def apply(config: Config): Settings = new Settings(config)
} 
Example 41
Source File: CassandraHeath.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.routes

import akka.Done
import akka.actor.{ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider}
import akka.event.Logging
import akka.persistence.cassandra.CassandraPluginConfig
import akka.persistence.cassandra.session.scaladsl.CassandraSession

import scala.concurrent.Future

trait CassandraHeath extends Extension {

  
  def check: Future[Boolean]
}

object CassandraHeath extends ExtensionId[CassandraHeath] with ExtensionIdProvider {

  override def lookup(): ExtensionId[_ <: Extension] = CassandraHeath

  override def createExtension(as: ExtendedActorSystem): CassandraHeath = {
    implicit val ec = as.dispatcher
    val log         = Logging(as, "CassandraHeathCheck")
    val config      = new CassandraPluginConfig(as, as.settings.config.getConfig("cassandra-journal"))
    val (p, s)      = (config.sessionProvider, config.sessionSettings)
    val session     = new CassandraSession(as, p, s, ec, log, "health", _ => Future.successful(Done.done()))

    new CassandraHeath {
      private val query = s"SELECT now() FROM ${config.keyspace}.messages;"

      override def check: Future[Boolean] = {
        session.selectOne(query).map(_ => true).recover {
          case err =>
            log.error("Error while attempting to query for health check", err)
            false
        }
      }
    }
  }
} 
Example 42
Source File: SetSerde.scala    From affinity   with Apache License 2.0 5 votes vote down vote up
package io.amient.affinity.core.serde.collection

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream}

import akka.actor.ExtendedActorSystem
import com.typesafe.config.Config
import io.amient.affinity.core.serde.{AbstractWrapSerde, Serde, Serdes}

class SetSerde(serdes: Serdes) extends AbstractWrapSerde(serdes) with Serde[Set[Any]] {

  def this(system: ExtendedActorSystem) = this(Serde.tools(system))
  def this(config: Config) = this(Serde.tools(config))

  override def identifier: Int = 142

  override protected def fromBytes(bytes: Array[Byte]): Set[Any] = {
    val di = new DataInputStream(new ByteArrayInputStream(bytes))
    val numItems = di.readInt()
    val result = ((1 to numItems) map { _ =>
      val len = di.readInt()
      val item = new Array[Byte](len)
      di.read(item)
      fromBinaryWrapped(item)
    }).toSet
    di.close()
    result
  }

  override def toBytes(set: Set[Any]): Array[Byte] = {
    val os = new ByteArrayOutputStream()
    val d = new DataOutputStream(os)
    d.writeInt(set.size)
    for (a: Any <- set) a match {
      case ref: AnyRef =>
        val item = toBinaryWrapped(ref)
        d.writeInt(item.length)
        d.write(item)
    }
    os.close
    os.toByteArray
  }

  override def close() = ()
} 
Example 43
Source File: KafkaSettings.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.config

import akka.actor.{ ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider }
import com.typesafe.config.Config

class KafkaSettings(config: Config) extends Extension {
  val bootstrapServers: String = config.getString("kafka.producer.bootstrapServers")
  val createEventsTopic: String = config.getString("kafka.producer.createTopic")
  val deleteEventsTopic: String = config.getString("kafka.producer.deleteTopic")
  val auditEventsTopic: String = config.getString("kafka.producer.auditTopic")
  val retries: String = config.getString("kafka.producer.retries")
  val retriesBackOff: String = config.getString("kafka.producer.backoff")
  val retriesBackOffMax: String = config.getString("kafka.producer.backoffMax")
  val requestTimeoutMs: String = config.getString("kafka.producer.requestTimeoutMs")
  val protocol: String = config.getString("kafka.producer.protocol")
  val maxblock: String = config.getString("kafka.producer.maxblock")
  val sslTruststoreLocation: String = config.getString("kafka.producer.ssl.truststore.location")
  val sslTruststorePassword: String = config.getString("kafka.producer.ssl.truststore.password")
  val sslKeystoreLocation: String = config.getString("kafka.producer.ssl.keystore.location")
  val sslKeystorePassword: String = config.getString("kafka.producer.ssl.keystore.password")
  val sslKeyPassword: String = config.getString("kafka.producer.ssl.key.password")
  val kafkaConfig: Config = config.getConfig("kafka.producer")
}

object KafkaSettings extends ExtensionId[KafkaSettings] with ExtensionIdProvider {
  override def createExtension(system: ExtendedActorSystem): KafkaSettings = new KafkaSettings(system.settings.config)
  override def lookup(): ExtensionId[KafkaSettings] = KafkaSettings
} 
Example 44
Source File: FastKryoSerializer.scala    From incubator-retired-gearpump   with Apache License 2.0 5 votes vote down vote up
package org.apache.gearpump.serializer

import akka.actor.ExtendedActorSystem
import com.esotericsoftware.kryo.Kryo.DefaultInstantiatorStrategy
import com.romix.akka.serialization.kryo.{KryoBasedSerializer, KryoSerializer}
import org.apache.gearpump.serializer.FastKryoSerializer.KryoSerializationException
import org.apache.gearpump.util.LogUtil
import org.objenesis.strategy.StdInstantiatorStrategy

class FastKryoSerializer(system: ExtendedActorSystem) extends Serializer {

  private val LOG = LogUtil.getLogger(getClass)
  private val config = system.settings.config

  private val kryoSerializer: KryoBasedSerializer = new KryoSerializer(system).serializer
  private val kryo = kryoSerializer.kryo
  val strategy = new DefaultInstantiatorStrategy
  strategy.setFallbackInstantiatorStrategy(new StdInstantiatorStrategy)
  kryo.setInstantiatorStrategy(strategy)
  private val kryoClazz = new GearpumpSerialization(config).customize(kryo)

  override def serialize(message: Any): Array[Byte] = {
    try {
      kryoSerializer.toBinary(message.asInstanceOf[AnyRef])
    } catch {
      case ex: java.lang.IllegalArgumentException =>
        val clazz = message.getClass
        val error = s"""
          | ${ex.getMessage}
          |You can also register the class by providing a configuration with serializer
          |defined,
          |
          |gearpump{
          |  serializers {
          |    ## Follow this format when adding new serializer for new message types
          |    #    "yourpackage.YourClass" = "yourpackage.YourSerializerForThisClass"
          |
          |    ## If you intend to use default serializer for this class, then you can write this
          |    #    "yourpackage.YourClass" = ""
          |  }
          |}
          |
          |If you want to register the serializer globally, you need to change
          |gear.conf on every worker in the cluster; if you only want to register
          |the serializer for a single streaming application, you need to create
          |a file under conf/ named application.conf, and add the above configuration
          |into application.conf. To verify whether the configuration is effective,
          |you can browser your UI http://{UI Server Host}:8090/api/v1.0/app/{appId}/config,
          |and check whether your custom serializer is added.
        """.stripMargin

        LOG.error(error, ex)
        throw new KryoSerializationException(error, ex)
    }
  }

  override def deserialize(msg: Array[Byte]): Any = {
    kryoSerializer.fromBinary(msg)
  }
}

object FastKryoSerializer {
  class KryoSerializationException(msg: String, ex: Throwable = null) extends Exception(msg, ex)
} 
Example 45
Source File: ExpressTransport.scala    From incubator-retired-gearpump   with Apache License 2.0 5 votes vote down vote up
package org.apache.gearpump.streaming.task

import akka.actor.{ActorRef, ExtendedActorSystem}
import org.apache.gearpump.Message
import org.apache.gearpump.transport.netty.TaskMessage
import org.apache.gearpump.transport.{Express, HostPort}
import org.apache.gearpump.util.AkkaHelper

trait ExpressTransport {
  this: TaskActor =>

  final val express = Express(context.system)
  implicit val system = context.system.asInstanceOf[ExtendedActorSystem]

  final def local: HostPort = express.localHost
  lazy val sourceId = TaskId.toLong(taskId)

  lazy val sessionRef: ActorRef = {
    AkkaHelper.actorFor(system, s"/session#$sessionId")
  }

  def transport(msg: AnyRef, remotes: TaskId*): Unit = {
    var serializedMessage: AnyRef = null

    remotes.foreach { remote =>
      val transportId = TaskId.toLong(remote)
      val localActor = express.lookupLocalActor(transportId)
      if (localActor.isDefined) {
        localActor.get.tell(msg, sessionRef)
      } else {
        if (null == serializedMessage) {
          msg match {
            case message: Message =>
              val bytes = serializerPool.get().serialize(message.value)
              serializedMessage = SerializedMessage(message.timestamp.toEpochMilli, bytes)
            case _ => serializedMessage = msg
          }
        }
        val taskMessage = new TaskMessage(sessionId, transportId, sourceId, serializedMessage)

        val remoteAddress = express.lookupRemoteAddress(transportId)
        if (remoteAddress.isDefined) {
          express.transport(taskMessage, remoteAddress.get)
        } else {
          LOG.error(
            s"Can not find target task $remote, maybe the application is undergoing recovery")
        }
      }
    }
  }
} 
Example 46
Source File: StormSerializerPoolSpec.scala    From incubator-retired-gearpump   with Apache License 2.0 5 votes vote down vote up
package org.apache.gearpump.experiments.storm.util

import java.util.{HashMap => JHashMap, List => JList, Map => JMap}
import scala.collection.JavaConverters._

import akka.actor.ExtendedActorSystem
import backtype.storm.utils.Utils
import com.esotericsoftware.kryo.Kryo
import org.scalacheck.Gen
import org.scalatest.mock.MockitoSugar
import org.scalatest.prop.PropertyChecks
import org.scalatest.{Matchers, PropSpec}

import org.apache.gearpump.cluster.UserConfig
import org.apache.gearpump.experiments.storm.topology.GearpumpTuple
import org.apache.gearpump.experiments.storm.util.StormConstants._
import org.apache.gearpump.streaming.MockUtil

class StormSerializerPoolSpec extends PropSpec with PropertyChecks with Matchers with MockitoSugar {

  property("StormSerializerPool should create and manage StormSerializer") {
    val taskContext = MockUtil.mockTaskContext
    val serializerPool = new StormSerializationFramework
    val system = taskContext.system.asInstanceOf[ExtendedActorSystem]
    implicit val actorSystem = system
    val stormConfig = Utils.readDefaultConfig.asInstanceOf[JMap[AnyRef, AnyRef]]
    val config = UserConfig.empty.withValue[JMap[AnyRef, AnyRef]](STORM_CONFIG, stormConfig)
    serializerPool.init(system, config)
    serializerPool.get shouldBe a[StormSerializer]
  }

  property("StormSerializer should serialize and deserialize GearpumpTuple") {
    val tupleGen = for {
      values <- Gen.listOf[String](Gen.alphaStr).map(_.asJava.asInstanceOf[JList[AnyRef]])
      sourceTaskId <- Gen.chooseNum[Int](0, Int.MaxValue)
      sourceStreamId <- Gen.alphaStr
    } yield new GearpumpTuple(values, new Integer(sourceTaskId), sourceStreamId, null)

    val kryo = new Kryo
    forAll(tupleGen) { (tuple: GearpumpTuple) =>
      val serializer = new StormSerializer(kryo)
      serializer.deserialize(serializer.serialize(tuple)) shouldBe tuple
    }
  }
} 
Example 47
Source File: StormSerializationFramework.scala    From incubator-retired-gearpump   with Apache License 2.0 5 votes vote down vote up
package org.apache.gearpump.experiments.storm.util

import java.lang.{Integer => JInteger}
import java.util.{Map => JMap}

import akka.actor.ExtendedActorSystem
import backtype.storm.serialization.SerializationFactory
import backtype.storm.utils.ListDelegate
import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.io.{Input, Output}

import org.apache.gearpump.cluster.UserConfig
import org.apache.gearpump.experiments.storm.topology.GearpumpTuple
import org.apache.gearpump.experiments.storm.util.StormConstants._
import org.apache.gearpump.serializer.{SerializationFramework, Serializer}

class StormSerializationFramework extends SerializationFramework {
  private var stormConfig: JMap[AnyRef, AnyRef] = null
  private var pool: ThreadLocal[Serializer] = null

  override def init(system: ExtendedActorSystem, config: UserConfig): Unit = {
    implicit val actorSystem = system
    stormConfig = config.getValue[JMap[AnyRef, AnyRef]](STORM_CONFIG).get
    pool = new ThreadLocal[Serializer]() {
      override def initialValue(): Serializer = {
        val kryo = SerializationFactory.getKryo(stormConfig)
        new StormSerializer(kryo)
      }
    }
  }

  override def get(): Serializer = {
    pool.get()
  }
}


class StormSerializer(kryo: Kryo) extends Serializer {
  // -1 means the max buffer size is 2147483647
  private val output = new Output(4096, -1)
  private val input = new Input

  override def serialize(message: Any): Array[Byte] = {
    val tuple = message.asInstanceOf[GearpumpTuple]
    output.clear()
    output.writeInt(tuple.sourceTaskId)
    output.writeString(tuple.sourceStreamId)
    val listDelegate = new ListDelegate
    listDelegate.setDelegate(tuple.values)
    kryo.writeObject(output, listDelegate)
    output.toBytes
  }

  override def deserialize(msg: Array[Byte]): Any = {
    input.setBuffer(msg)
    val sourceTaskId: JInteger = input.readInt
    val sourceStreamId: String = input.readString
    val listDelegate = kryo.readObject[ListDelegate](input, classOf[ListDelegate])
    new GearpumpTuple(listDelegate.getDelegate, sourceTaskId, sourceStreamId, null)
  }
} 
Example 48
Source File: AddressTerminatedTopic.scala    From perf_tester   with Apache License 2.0 5 votes vote down vote up
package akka.event

import java.util.concurrent.atomic.AtomicReference
import scala.annotation.tailrec
import akka.actor.ActorRef
import akka.actor.ActorSystem
import akka.actor.AddressTerminated
import akka.actor.ExtendedActorSystem
import akka.actor.Extension
import akka.actor.ExtensionId
import akka.actor.ExtensionIdProvider


private[akka] final class AddressTerminatedTopic extends Extension {

  private val subscribers = new AtomicReference[Set[ActorRef]](Set.empty[ActorRef])

  @tailrec def subscribe(subscriber: ActorRef): Unit = {
    val current = subscribers.get
    if (!subscribers.compareAndSet(current, current + subscriber))
      subscribe(subscriber) // retry
  }

  @tailrec def unsubscribe(subscriber: ActorRef): Unit = {
    val current = subscribers.get
    if (!subscribers.compareAndSet(current, current - subscriber))
      unsubscribe(subscriber) // retry
  }

  def publish(msg: AddressTerminated): Unit = {
    subscribers.get foreach { _.tell(msg, ActorRef.noSender) }
  }

} 
Example 49
Source File: Main.scala    From perf_tester   with Apache License 2.0 5 votes vote down vote up
package akka

import akka.actor.ActorSystem
import akka.actor.ExtendedActorSystem
import akka.actor.Actor
import akka.actor.Terminated
import akka.actor.ActorLogging
import akka.actor.Props
import akka.actor.ActorRef
import scala.util.control.NonFatal


  def main(args: Array[String]): Unit = {
    if (args.length != 1) {
      println("you need to provide exactly one argument: the class of the application supervisor actor")
    } else {
      val system = ActorSystem("Main")
      try {
        val appClass = system.asInstanceOf[ExtendedActorSystem].dynamicAccess.getClassFor[Actor](args(0)).get
        val app = system.actorOf(Props(appClass), "app")
        val terminator = system.actorOf(Props(classOf[Terminator], app), "app-terminator")
      } catch {
        case NonFatal(e) ⇒ system.terminate(); throw e
      }
    }
  }

  class Terminator(app: ActorRef) extends Actor with ActorLogging {
    context watch app
    def receive = {
      case Terminated(_) ⇒
        log.info("application supervisor has terminated, shutting down")
        context.system.terminate()
    }
  }

} 
Example 50
Source File: VisualMailboxMetricClient.scala    From akka-visualmailbox   with Apache License 2.0 5 votes vote down vote up
package de.aktey.akka.visualmailbox

import java.net.InetSocketAddress

import akka.actor.{Actor, ActorRef, ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider, Props}
import akka.io.{IO, Udp}
import akka.util.ByteString
import de.aktey.akka.visualmailbox.packing.Packing


object VisualMailboxMetricClient extends ExtensionId[VisualMailboxMetricClient] with ExtensionIdProvider {
  override def createExtension(system: ExtendedActorSystem): VisualMailboxMetricClient = {
    new VisualMailboxMetricClient(
      system,
      VisualMailboxMetricClientConfig.fromConfig(system.settings.config)
    )
  }

  override def lookup(): ExtensionId[_ <: Extension] = VisualMailboxMetricClient
}

class VisualMailboxMetricClient(system: ExtendedActorSystem, config: VisualMailboxMetricClientConfig) extends Extension {
  private val udpSender = system.systemActorOf(
    Props(new UdpSender(config.serverAddress)).withDispatcher("de.aktey.akka.visualmailbox.client.dispatcher"),
    "de-aktey-akka-visualmailbox-sender"
  )
  system.systemActorOf(
    Props(new VisualMailboxMetricListener(udpSender)).withDispatcher("de.aktey.akka.visualmailbox.client.dispatcher"),
    "de-aktey-akka-visualmailbox-receiver"
  )
}

class VisualMailboxMetricListener(udpSender: ActorRef) extends Actor {

  import context._

  import concurrent.duration._

  var buffer: List[VisualMailboxMetric] = Nil

  system.eventStream.subscribe(self, classOf[VisualMailboxMetric])
  system.scheduler.schedule(1.second, 1.second, self, "flush")

  @scala.throws[Exception](classOf[Exception])
  override def postStop(): Unit = {
    system.eventStream.unsubscribe(self)
  }

  def receive: Receive = {
    case v: VisualMailboxMetric =>
      buffer ::= v
      if (buffer.size > 40) self ! "flush"

    case "flush" if buffer.nonEmpty =>
      udpSender ! Packing.pack(MetricEnvelope(1, Packing.pack(buffer)))
      buffer = Nil
  }
}

class UdpSender(remote: InetSocketAddress) extends Actor {

  import context._

  IO(Udp) ! Udp.SimpleSender

  def receive = {
    case Udp.SimpleSenderReady =>
      context.become(ready(sender()))
  }

  def ready(send: ActorRef): Receive = {
    case msg: Array[Byte] =>
      send ! Udp.Send(ByteString(msg), remote)
  }
} 
Example 51
Source File: ActiveMqExtension.scala    From reactive-activemq   with Apache License 2.0 5 votes vote down vote up
package akka.stream.integration.activemq.extension

import akka.actor.{ ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider }
import akka.camel.CamelExtension
import akka.stream.integration.activemq.extension.config.{ ActiveMqConfig, ConsumerConfig, ProducerConfig }
import org.apache.activemq.ActiveMQConnectionFactory
import org.apache.activemq.camel.component.ActiveMQComponent
import org.apache.camel.component.jms.JmsConfiguration

object ActiveMqExtension extends ExtensionId[ActiveMqExtensionImpl] with ExtensionIdProvider {
  override def createExtension(system: ExtendedActorSystem): ActiveMqExtensionImpl = new ActiveMqExtensionImpl(system)

  override def lookup(): ExtensionId[_ <: Extension] = ActiveMqExtension
}

trait ActiveMqExtension {
  def consumerEndpointUri(consumerName: String): String

  def producerEndpointUri(producerName: String): String
}

class ActiveMqExtensionImpl(val system: ExtendedActorSystem) extends Extension with ActiveMqExtension {

  import scala.collection.JavaConversions._

  system.settings.config.getStringList("reactive-activemq.connections").foreach { componentName =>
    val amqConfig = ActiveMqConfig(system.settings.config.getConfig(componentName))
    createComponent(componentName, amqConfig)
  }

  private def createComponent(componentName: String, amqConfig: ActiveMqConfig): Unit = {
    val connectionFactory = new ActiveMQConnectionFactory(amqConfig.user, amqConfig.pass, amqConfig.url)
    val jmsConfiguration: JmsConfiguration = new JmsConfiguration()
    jmsConfiguration.setConnectionFactory(connectionFactory)
    val ctx = CamelExtension(system).context
    val component = ctx.getComponent("activemq").asInstanceOf[ActiveMQComponent]
    component.setConfiguration(jmsConfiguration)
    component.setTransacted(true)
    ctx.addComponent(componentName, component)
  }

  override def consumerEndpointUri(consumerName: String): String =
    ConsumerConfig(system.settings.config.getConfig(consumerName), consumerName).endpoint

  override def producerEndpointUri(producerName: String): String =
    ProducerConfig(system.settings.config.getConfig(producerName)).endpoint
} 
Example 52
Source File: RangerSettings.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.config

import akka.actor.{ ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider }
import com.typesafe.config.Config

class RangerSettings(config: Config) extends Extension {
  val serviceType: String = config.getString("rokku.ranger.service_type")
  val appId: String = config.getString("rokku.ranger.app_id")
  val listBucketsEnabled: Boolean = config.getBoolean("rokku.ranger.allow-list-buckets")
  val userDomainPostfix: String = config.getString("rokku.ranger.user-domain-postfix")
  val auditEnabled: Boolean = config.getBoolean("rokku.ranger.enabled-audit")
  val rolePrefix: String = config.getString("rokku.ranger.role-prefix")
}

object RangerSettings extends ExtensionId[RangerSettings] with ExtensionIdProvider {
  override def createExtension(system: ExtendedActorSystem): RangerSettings = new RangerSettings(system.settings.config)
  override def lookup(): ExtensionId[RangerSettings] = RangerSettings
} 
Example 53
Source File: StorageS3Settings.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.config

import akka.actor.{ ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider }
import akka.http.scaladsl.model.Uri
import com.ing.wbaa.rokku.proxy.data.HealthCheck.{ HCMethod, RGWListBuckets, S3ListBucket }
import com.typesafe.config.Config

class StorageS3Settings(config: Config) extends Extension {
  private val storageS3Host: String = config.getString("rokku.storage.s3.host")
  private val storageS3Port: Int = config.getInt("rokku.storage.s3.port")
  val storageS3Authority = Uri.Authority(Uri.Host(storageS3Host), storageS3Port)

  val storageS3AdminAccesskey: String = config.getString("rokku.storage.s3.admin.accesskey")
  val storageS3AdminSecretkey: String = config.getString("rokku.storage.s3.admin.secretkey")
  val awsRegion: String = config.getString("rokku.storage.s3.region")
  val v2SignatureEnabled: Boolean = config.getBoolean("rokku.storage.s3.v2SignatureEnabled")
  val isRequestUserQueueEnabled: Boolean = config.getBoolean("rokku.storage.s3.request.queue.enable")
  private val hcMethodString = config.getString("rokku.storage.s3.healthCheck.method")
  val hcMethod: HCMethod = hcMethodString match {
    case "rgwListBuckets" => RGWListBuckets
    case "s3ListBucket"   => S3ListBucket
  }
  val hcInterval: Long = config.getLong("rokku.storage.s3.healthCheck.interval")
  val bucketName: String = config.getString("rokku.storage.s3.healthCheck.bucketName")
  val isCacheEnabled: Boolean = config.getBoolean("rokku.storage.s3.enabledCache")
  val eligibleCachePaths: Array[String] = config.getString("rokku.storage.s3.eligibleCachePaths").trim().split(",")
  val maxEligibleCacheObjectSizeInBytes: Long = config.getLong("rokku.storage.s3.maxEligibleCacheObjectSizeInBytes")
  val strictCacheDownloadTimeoutInSeconds: Int = config.getInt("rokku.storage.s3.strictCacheDownloadTimeoutInSeconds")

}

object StorageS3Settings extends ExtensionId[StorageS3Settings] with ExtensionIdProvider {
  override def createExtension(system: ExtendedActorSystem): StorageS3Settings = new StorageS3Settings(system.settings.config)
  override def lookup(): ExtensionId[StorageS3Settings] = StorageS3Settings
} 
Example 54
Source File: SerializerSpec.scala    From incubator-retired-gearpump   with Apache License 2.0 5 votes vote down vote up
package org.apache.gearpump.serializer

import akka.actor.{ActorSystem, ExtendedActorSystem}

import com.esotericsoftware.kryo.io.{Input, Output}
import com.esotericsoftware.kryo.{Kryo, Serializer => KryoSerializer}
import com.typesafe.config.{ConfigFactory, ConfigValueFactory}

import org.apache.gearpump.cluster.TestUtil
import org.apache.gearpump.serializer.SerializerSpec._

import org.scalatest.mock.MockitoSugar
import org.scalatest.{FlatSpec, Matchers}

import scala.collection.JavaConverters._
import scala.concurrent.Await
import scala.concurrent.duration.Duration


class SerializerSpec extends FlatSpec with Matchers with MockitoSugar {
  val config = ConfigFactory.empty.withValue("gearpump.serializers",
    ConfigValueFactory.fromAnyRef(Map(classOf[ClassA].getName -> classOf[ClassASerializer].getName,
      classOf[ClassB].getName -> classOf[ClassBSerializer].getName).asJava))

  "GearpumpSerialization" should "register custom serializers" in {
    val serialization = new GearpumpSerialization(config)
    val kryo = new Kryo
    serialization.customize(kryo)

    val forB = kryo.getRegistration(classOf[ClassB])
    assert(forB.getSerializer.isInstanceOf[ClassBSerializer])

    val forA = kryo.getRegistration(classOf[ClassA])
    assert(forA.getSerializer.isInstanceOf[ClassASerializer])
  }

  "FastKryoSerializer" should "serialize correctly" in {
    val myConfig = config.withFallback(TestUtil.DEFAULT_CONFIG.withoutPath("gearpump.serializers"))
    val system = ActorSystem("my", myConfig)

    val serializer = new FastKryoSerializer(system.asInstanceOf[ExtendedActorSystem])

    val bytes = serializer.serialize(new ClassA)
    val anotherA = serializer.deserialize(bytes)

    assert(anotherA.isInstanceOf[ClassA])
    system.terminate()
    Await.result(system.whenTerminated, Duration.Inf)
  }
}

object SerializerSpec {

  class ClassA {}

  class ClassASerializer extends KryoSerializer[ClassA] {
    override def write(kryo: Kryo, output: Output, `object`: ClassA): Unit = {
      output.writeString(classOf[ClassA].getName)
    }

    override def read(kryo: Kryo, input: Input, `type`: Class[ClassA]): ClassA = {
      val className = input.readString()
      Class.forName(className).newInstance().asInstanceOf[ClassA]
    }
  }

  class ClassB {}

  class ClassBSerializer extends KryoSerializer[ClassA] {
    override def write(kryo: Kryo, output: Output, `object`: ClassA): Unit = {}

    override def read(kryo: Kryo, input: Input, `type`: Class[ClassA]): ClassA = {
      null
    }
  }
} 
Example 55
Source File: AkkaSerializationMessageCodec.scala    From eventuate   with Apache License 2.0 5 votes vote down vote up
package com.rbmhtechnology.eventuate.adapter.vertx

import akka.actor.{ ActorSystem, ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider }
import com.rbmhtechnology.eventuate.serializer.CommonFormats.PayloadFormat
import com.rbmhtechnology.eventuate.serializer.DelegatingPayloadSerializer
import io.vertx.core.buffer.Buffer
import io.vertx.core.eventbus.MessageCodec

object AkkaSerializationMessageCodec {
  val Name = "akka-serialization-message-codec"

  def apply(name: String)(implicit system: ActorSystem): MessageCodec[AnyRef, AnyRef] =
    new AkkaSerializationMessageCodec(name)

  def apply(clazz: Class[_])(implicit system: ActorSystem): MessageCodec[AnyRef, AnyRef] =
    new AkkaSerializationMessageCodec(s"${AkkaSerializationMessageCodec.Name}-${clazz.getName}")
}

class AkkaSerializationMessageCodec(override val name: String)(implicit system: ActorSystem) extends MessageCodec[AnyRef, AnyRef] {

  val serializer = PayloadSerializationExtension(system)

  override def transform(o: AnyRef): AnyRef =
    o

  override def encodeToWire(buffer: Buffer, o: AnyRef): Unit = {
    val payload = serializer.toBinary(o)
    buffer.appendInt(payload.length)
    buffer.appendBytes(payload)
  }

  override def decodeFromWire(pos: Int, buffer: Buffer): AnyRef = {
    val payloadLength = buffer.getInt(pos)
    val payload = buffer.getBytes(pos + Integer.BYTES, pos + Integer.BYTES + payloadLength)
    serializer.fromBinary(payload).asInstanceOf[AnyRef]
  }

  override def systemCodecID(): Byte = -1
}

object PayloadSerializationExtension extends ExtensionId[PayloadSerializationExtension] with ExtensionIdProvider {

  override def lookup = PayloadSerializationExtension

  override def createExtension(system: ExtendedActorSystem): PayloadSerializationExtension =
    new PayloadSerializationExtension(system)

  override def get(system: ActorSystem): PayloadSerializationExtension =
    super.get(system)
}

class PayloadSerializationExtension(system: ExtendedActorSystem) extends Extension {

  val serializer = new DelegatingPayloadSerializer(system)

  def toBinary(o: AnyRef): Array[Byte] =
    serializer.payloadFormatBuilder(o).build().toByteArray

  def fromBinary(b: Array[Byte]): Any =
    serializer.payload(PayloadFormat.parseFrom(b))
} 
Example 56
Source File: CommonSerializer.scala    From eventuate   with Apache License 2.0 5 votes vote down vote up
package com.rbmhtechnology.eventuate.serializer

import akka.actor.ExtendedActorSystem

import com.rbmhtechnology.eventuate._
import com.rbmhtechnology.eventuate.serializer.CommonFormats._

import scala.collection.JavaConverters._

class CommonSerializer(system: ExtendedActorSystem) {

  val payloadSerializer = new DelegatingPayloadSerializer(system)

  // --------------------------------------------------------------------------------
  //  toBinary helpers
  // --------------------------------------------------------------------------------

  def vectorTimeFormatBuilder(vectorTime: VectorTime): VectorTimeFormat.Builder = {
    val builder = VectorTimeFormat.newBuilder
    vectorTime.value.foreach { entry =>
      builder.addEntries(VectorTimeEntryFormat.newBuilder
        .setProcessId(entry._1)
        .setLogicalTime(entry._2))
    }
    builder
  }

  def versionedFormatBuilder(versioned: Versioned[_]): VersionedFormat.Builder = {
    val builder = VersionedFormat.newBuilder
    builder.setPayload(payloadSerializer.payloadFormatBuilder(versioned.value.asInstanceOf[AnyRef]))
    builder.setVectorTimestamp(vectorTimeFormatBuilder(versioned.vectorTimestamp))
    builder.setSystemTimestamp(versioned.systemTimestamp)
    builder.setCreator(versioned.creator)
  }

  // --------------------------------------------------------------------------------
  //  fromBinary helpers
  // --------------------------------------------------------------------------------

  def vectorTime(vectorTimeFormat: VectorTimeFormat): VectorTime = {
    VectorTime(vectorTimeFormat.getEntriesList.iterator.asScala.foldLeft(Map.empty[String, Long]) {
      case (result, entry) => result.updated(entry.getProcessId, entry.getLogicalTime)
    })
  }

  def versioned(versionedFormat: VersionedFormat): Versioned[Any] = {
    Versioned[Any](
      payloadSerializer.payload(versionedFormat.getPayload),
      vectorTime(versionedFormat.getVectorTimestamp),
      versionedFormat.getSystemTimestamp,
      versionedFormat.getCreator)
  }
} 
Example 57
Source File: PayloadSerializer.scala    From eventuate   with Apache License 2.0 5 votes vote down vote up
package com.rbmhtechnology.eventuate.serializer

import akka.actor.ExtendedActorSystem
import akka.serialization.SerializationExtension
import akka.serialization.SerializerWithStringManifest
import com.google.protobuf.ByteString
import com.rbmhtechnology.eventuate.BinaryPayload
import com.rbmhtechnology.eventuate.serializer.CommonFormats.PayloadFormat


class BinaryPayloadSerializer(system: ExtendedActorSystem) extends PayloadSerializer {

  override def payloadFormatBuilder(payload: AnyRef): PayloadFormat.Builder = {
    val binaryPayload = payload.asInstanceOf[BinaryPayload]
    val builder = PayloadFormat.newBuilder()
      .setPayload(binaryPayload.bytes)
      .setSerializerId(binaryPayload.serializerId)
      .setIsStringManifest(binaryPayload.isStringManifest)
    binaryPayload.manifest.foreach(builder.setPayloadManifest)
    builder
  }

  override def payload(payloadFormat: PayloadFormat): AnyRef = {
    BinaryPayload(
      payloadFormat.getPayload,
      payloadFormat.getSerializerId,
      if (payloadFormat.hasPayloadManifest) Some(payloadFormat.getPayloadManifest) else None,
      payloadFormat.getIsStringManifest)
  }
} 
Example 58
Source File: ReplicationFilterSerializer.scala    From eventuate   with Apache License 2.0 5 votes vote down vote up
package com.rbmhtechnology.eventuate.serializer

import akka.actor.ExtendedActorSystem
import akka.serialization._

import com.rbmhtechnology.eventuate.ReplicationFilter.AndFilter
import com.rbmhtechnology.eventuate.ReplicationFilter.NoFilter
import com.rbmhtechnology.eventuate.ReplicationFilter.OrFilter

import com.rbmhtechnology.eventuate._
import com.rbmhtechnology.eventuate.serializer.ReplicationFilterFormats._

import scala.collection.JavaConverters._
import scala.language.existentials

class ReplicationFilterSerializer(system: ExtendedActorSystem) extends Serializer {
  import ReplicationFilterTreeFormat.NodeType._

  val payloadSerializer = new DelegatingPayloadSerializer(system)

  val AndFilterClass = classOf[AndFilter]
  val OrFilterClass = classOf[OrFilter]
  val NoFilterClass = NoFilter.getClass

  override def identifier: Int = 22564
  override def includeManifest: Boolean = true

  override def toBinary(o: AnyRef): Array[Byte] = o match {
    case NoFilter =>
      NoFilterFormat.newBuilder().build().toByteArray
    case f: ReplicationFilter =>
      filterTreeFormatBuilder(f).build().toByteArray
    case _ =>
      throw new IllegalArgumentException(s"can't serialize object of type ${o.getClass}")
  }

  override def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = manifest match {
    case None => throw new IllegalArgumentException("manifest required")
    case Some(clazz) => clazz match {
      case NoFilterClass =>
        NoFilter
      case AndFilterClass | OrFilterClass =>
        filterTree(ReplicationFilterTreeFormat.parseFrom(bytes))
      case _ =>
        throw new IllegalArgumentException(s"can't deserialize object of type ${clazz}")
    }
  }

  // --------------------------------------------------------------------------------
  //  toBinary helpers
  // --------------------------------------------------------------------------------

  def filterTreeFormatBuilder(filterTree: ReplicationFilter): ReplicationFilterTreeFormat.Builder = {
    val builder = ReplicationFilterTreeFormat.newBuilder()
    filterTree match {
      case AndFilter(filters) =>
        builder.setNodeType(AND)
        filters.foreach(filter => builder.addChildren(filterTreeFormatBuilder(filter)))
      case OrFilter(filters) =>
        builder.setNodeType(OR)
        filters.foreach(filter => builder.addChildren(filterTreeFormatBuilder(filter)))
      case filter =>
        builder.setNodeType(LEAF)
        builder.setFilter(payloadSerializer.payloadFormatBuilder(filter))
    }
    builder
  }

  // --------------------------------------------------------------------------------
  //  fromBinary helpers
  // --------------------------------------------------------------------------------

  def filterTree(filterTreeFormat: ReplicationFilterTreeFormat): ReplicationFilter = {
    filterTreeFormat.getNodeType match {
      case AND  => AndFilter(filterTreeFormat.getChildrenList.asScala.map(filterTree).toList)
      case OR   => OrFilter(filterTreeFormat.getChildrenList.asScala.map(filterTree).toList)
      case LEAF => payloadSerializer.payload(filterTreeFormat.getFilter).asInstanceOf[ReplicationFilter]
    }
  }
} 
Example 59
Source File: CouchbaseReplayExtension.scala    From akka-persistence-couchbase   with Apache License 2.0 5 votes vote down vote up
package akka.persistence.couchbase.replay

import java.util.concurrent.TimeUnit

import akka.actor.{ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider}
import akka.event.Logging
import akka.persistence.couchbase.CouchbaseExtension
import com.couchbase.client.java.document.JsonLongDocument
import com.couchbase.client.java.document.json.JsonObject
import com.couchbase.client.java.view._

import scala.util.{Failure, Try}

trait CouchbaseReplay extends Extension {

  def replayConfig: CouchbaseReplayConfig

  def replay(callback: ReplayCallback, journalMessageIdOption: Option[Long] = None): Unit

  def storeMessageId(identifier: String, journalMessageId: Long): Unit

  def readMessageId(identifier: String): Option[Long]
}

private class DefaultCouchbaseReplay(val system: ExtendedActorSystem) extends CouchbaseReplay {

  private val log = Logging(system, getClass.getName)

  val couchbase = CouchbaseExtension(system)

  override val replayConfig = CouchbaseReplayConfig(system)

  val cluster = replayConfig.createCluster(couchbase.environment)

  val replayBucket = replayConfig.openBucket(cluster)

  updateJournalDesignDocs()

  private def updateJournalDesignDocs(): Unit = {
    val designDocs = JsonObject.create()
      .put("views", JsonObject.create()
        .put("commits", JsonObject.create()
          .put("map", replayConfig.replayViewCode)
        )
      )

    Try {
      val designDocument = DesignDocument.from("recovery", designDocs)
      couchbase.journalBucket.bucketManager.upsertDesignDocument(designDocument)
    } recoverWith {
      case e =>
        log.error(e, "Updating design documents for recovery")
        Failure(e)
    }
  }

  override def replay(callback: ReplayCallback, journalMessageIdOption: Option[Long]): Unit = {
    system.actorOf(ReplayActor.props(callback)) ! ReplayActor.Recover(journalMessageIdOption)
  }

  override def storeMessageId(identifier: String, journalMessageId: Long): Unit = {
    Try {
      replayBucket.upsert(
        JsonLongDocument.create(s"replayId::$identifier", journalMessageId),
        replayConfig.persistTo,
        replayConfig.replicateTo,
        replayConfig.timeout.toSeconds,
        TimeUnit.SECONDS
      )
    } recoverWith {
      case e =>
        log.error(e, "Store replay id: {}", journalMessageId)
        Failure(e)
    }
  }

  override def readMessageId(identifier: String): Option[Long] = {
    Option(
      replayBucket.get(
        JsonLongDocument.create(s"replayId::$identifier"),
        replayConfig.timeout.toSeconds,
        TimeUnit.SECONDS
      )
    ).map(_.content())
  }
}


object CouchbaseReplayExtension extends ExtensionId[CouchbaseReplay] with ExtensionIdProvider {

  override def lookup(): ExtensionId[CouchbaseReplay] = CouchbaseReplayExtension

  override def createExtension(system: ExtendedActorSystem): CouchbaseReplay = {
    new DefaultCouchbaseReplay(system)
  }
} 
Example 60
Source File: AkkaUtils.scala    From DataXServer   with Apache License 2.0 5 votes vote down vote up
package org.tianlangstudio.data.hamal.yarn.util

import akka.actor.{ActorSystem, ExtendedActorSystem}
import com.typesafe.config.ConfigFactory
import org.apache.log4j.{Level, Logger}
import org.tianlangstudio.data.hamal.core.{Constants, HamalConf}
import org.tianlangstudio.data.hamal.core.HamalConf


  def maxFrameSizeBytes(conf: HamalConf): Int = {
    val frameSizeInMB = conf.getInt("datax.akka.frameSize", 128)
    if (frameSizeInMB > AKKA_MAX_FRAME_SIZE_IN_MB) {
      throw new IllegalArgumentException(
        s"spark.akka.frameSize should not be greater than $AKKA_MAX_FRAME_SIZE_IN_MB MB")
    }
    frameSizeInMB * 1024 * 1024
  }


  def protocol(actorSystem: ActorSystem): String = {
    val akkaConf = actorSystem.settings.config
    val sslProp = "akka.remote.netty.tcp.enable-ssl"
    protocol(akkaConf.hasPath(sslProp) && akkaConf.getBoolean(sslProp))
  }

  def protocol(ssl: Boolean = false): String = {
    if (ssl) {
      "akka.ssl.tcp"
    } else {
      "akka.tcp"
    }
  }

  def address(
      protocol: String,
      systemName: String,
      host: String,
      port: Int,
      actorName: String): String = {

        address(protocol,
          systemName,
          s"$host:$port",
          actorName
        )
  }
  def address(
               protocol: String,
               systemName: String,
               hostPort: String,
               actorName: String): String = {
    s"$protocol://$systemName@$hostPort/user/$actorName"
  }
} 
Example 61
Source File: Json4sSerializer.scala    From reliable-http-client   with Apache License 2.0 5 votes vote down vote up
package rhttpc.akkapersistence.json4s

import java.nio.ByteBuffer
import java.nio.charset.Charset

import akka.actor.ExtendedActorSystem
import akka.serialization.Serializer
import org.json4s.native.Serialization._
import org.json4s.{DefaultFormats, Formats, TypeHints}
import rhttpc.transport.json4s.{AllTypeHints, ObjectSerializer}

class Json4sSerializer(system: ExtendedActorSystem) extends Serializer {
  import Json4sSerializer._
  import rhttpc.transport.json4s.CommonFormats._

  override def identifier: Int = ID

  override def includeManifest: Boolean = true

  override def fromBinary(bytes: Array[Byte], manifestOpt: Option[Class[_]]): AnyRef = {
    implicit val manifest = manifestOpt match {
      case Some(x) => Manifest.classType(x)
      case None    => Manifest.AnyRef
    }
    read(new String(bytes, UTF8))
  }

  override def toBinary(o: AnyRef): Array[Byte] = {
    writePretty(o).getBytes(UTF8)
  }
}

object Json4sSerializer {
  private val UTF8: Charset = Charset.forName("UTF-8")
  private val ID: Int = ByteBuffer.wrap("json4s".getBytes(UTF8)).getInt
} 
Example 62
Source File: DynamoDBSnapshotStore.scala    From akka-persistence-dynamodb   with Apache License 2.0 5 votes vote down vote up
package com.github.j5ik2o.akka.persistence.dynamodb.snapshot

import akka.actor.ExtendedActorSystem
import akka.persistence.snapshot.SnapshotStore
import akka.persistence.{ SelectedSnapshot, SnapshotMetadata, SnapshotSelectionCriteria }
import akka.serialization.SerializationExtension
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{ Sink, Source }
import com.github.j5ik2o.akka.persistence.dynamodb.config.SnapshotPluginConfig
import com.github.j5ik2o.akka.persistence.dynamodb.model.{ PersistenceId, SequenceNumber }
import com.github.j5ik2o.akka.persistence.dynamodb.snapshot.dao.{ SnapshotDao, SnapshotDaoImpl }
import com.github.j5ik2o.akka.persistence.dynamodb.utils.V2DynamoDbClientBuilderUtils
import com.github.j5ik2o.reactive.aws.dynamodb.DynamoDbAsyncClient
import com.typesafe.config.Config
import software.amazon.awssdk.services.dynamodb.{ DynamoDbAsyncClient => JavaDynamoDbAsyncClient }

import scala.concurrent.{ ExecutionContext, Future }

object DynamoDBSnapshotStore {

  def toSelectedSnapshot(tupled: (SnapshotMetadata, Any)): SelectedSnapshot = tupled match {
    case (meta: SnapshotMetadata, snapshot: Any) => SelectedSnapshot(meta, snapshot)
  }
}

class DynamoDBSnapshotStore(config: Config) extends SnapshotStore {
  import DynamoDBSnapshotStore._

  implicit val ec: ExecutionContext        = context.dispatcher
  implicit val system: ExtendedActorSystem = context.system.asInstanceOf[ExtendedActorSystem]
  implicit val mat                         = ActorMaterializer()

  private val serialization                        = SerializationExtension(system)
  protected val pluginConfig: SnapshotPluginConfig = SnapshotPluginConfig.fromConfig(config)

  protected val javaClient: JavaDynamoDbAsyncClient =
    V2DynamoDbClientBuilderUtils.setupAsync(system.dynamicAccess, pluginConfig).build()
  protected val asyncClient: DynamoDbAsyncClient = DynamoDbAsyncClient(javaClient)

  protected val snapshotDao: SnapshotDao =
    new SnapshotDaoImpl(asyncClient, serialization, pluginConfig)

  override def loadAsync(
      persistenceId: String,
      criteria: SnapshotSelectionCriteria
  ): Future[Option[SelectedSnapshot]] = {
    val result = criteria match {
      case SnapshotSelectionCriteria(Long.MaxValue, Long.MaxValue, _, _) =>
        snapshotDao.latestSnapshot(PersistenceId(persistenceId))
      case SnapshotSelectionCriteria(Long.MaxValue, maxTimestamp, _, _) =>
        snapshotDao.snapshotForMaxTimestamp(PersistenceId(persistenceId), maxTimestamp)
      case SnapshotSelectionCriteria(maxSequenceNr, Long.MaxValue, _, _) =>
        snapshotDao.snapshotForMaxSequenceNr(PersistenceId(persistenceId), SequenceNumber(maxSequenceNr))
      case SnapshotSelectionCriteria(maxSequenceNr, maxTimestamp, _, _) =>
        snapshotDao.snapshotForMaxSequenceNrAndMaxTimestamp(
          PersistenceId(persistenceId),
          SequenceNumber(maxSequenceNr),
          maxTimestamp
        )
      case _ => Source.empty
    }
    result.map(_.map(toSelectedSnapshot)).runWith(Sink.head)
  }

  override def saveAsync(metadata: SnapshotMetadata, snapshot: Any): Future[Unit] =
    snapshotDao.save(metadata, snapshot).runWith(Sink.ignore).map(_ => ())

  override def deleteAsync(metadata: SnapshotMetadata): Future[Unit] =
    snapshotDao
      .delete(PersistenceId(metadata.persistenceId), SequenceNumber(metadata.sequenceNr)).map(_ => ()).runWith(
        Sink.ignore
      ).map(_ => ())

  override def deleteAsync(persistenceId: String, criteria: SnapshotSelectionCriteria): Future[Unit] = {
    val pid = PersistenceId(persistenceId)
    criteria match {
      case SnapshotSelectionCriteria(Long.MaxValue, Long.MaxValue, _, _) =>
        snapshotDao.deleteAllSnapshots(pid).runWith(Sink.ignore).map(_ => ())
      case SnapshotSelectionCriteria(Long.MaxValue, maxTimestamp, _, _) =>
        snapshotDao.deleteUpToMaxTimestamp(pid, maxTimestamp).runWith(Sink.ignore).map(_ => ())
      case SnapshotSelectionCriteria(maxSequenceNr, Long.MaxValue, _, _) =>
        snapshotDao
          .deleteUpToMaxSequenceNr(pid, SequenceNumber(maxSequenceNr)).runWith(Sink.ignore).map(_ => ())
      case SnapshotSelectionCriteria(maxSequenceNr, maxTimestamp, _, _) =>
        snapshotDao
          .deleteUpToMaxSequenceNrAndMaxTimestamp(pid, SequenceNumber(maxSequenceNr), maxTimestamp).runWith(
            Sink.ignore
          ).map(_ => ())
      case _ => Future.successful(())
    }
  }

} 
Example 63
Source File: EnvelopeSpec.scala    From prometheus-akka   with Apache License 2.0 5 votes vote down vote up
package akka.monitor.instrumentation

import com.workday.prometheus.akka.TestKitBaseSpec
import akka.actor.{Actor, ExtendedActorSystem, Props}
import akka.dispatch.Envelope

class EnvelopeSpec extends TestKitBaseSpec("envelope-spec") {

  "EnvelopeInstrumentation" should {
    "mixin EnvelopeContext" in {
      val actorRef = system.actorOf(Props[NoReply])
      val env = Envelope("msg", actorRef, system).asInstanceOf[Object]
      env match {
        case e: Envelope with InstrumentedEnvelope => e.setEnvelopeContext(EnvelopeContext())
        case _ => fail("InstrumentedEnvelope is not mixed in")
      }
      env match {
        case s: Serializable => {
          import java.io._
          val bos = new ByteArrayOutputStream
          val oos = new ObjectOutputStream(bos)
          oos.writeObject(env)
          oos.close()
          akka.serialization.JavaSerializer.currentSystem.withValue(system.asInstanceOf[ExtendedActorSystem])  {
            val ois = new ObjectInputStream(new ByteArrayInputStream(bos.toByteArray()))
            val obj = ois.readObject()
            ois.close()
            obj match {
              case e: Envelope with InstrumentedEnvelope => e.envelopeContext() should not be null
              case _ => fail("InstrumentedEnvelope is not mixed in")
            }
          }
        }
        case _ => fail("envelope is not serializable")
      }
    }
  }
}

class NoReply extends Actor {
  override def receive = {
    case any ⇒
  }
} 
Example 64
Source File: ClusterBootstrap.scala    From akka-management   with Apache License 2.0 5 votes vote down vote up
package akka.management.cluster.bootstrap

import java.util.concurrent.atomic.AtomicReference

import akka.AkkaVersion
import scala.concurrent.{ Future, Promise, TimeoutException }
import scala.concurrent.duration._

import akka.actor.ActorSystem
import akka.actor.ClassicActorSystemProvider
import akka.actor.ExtendedActorSystem
import akka.actor.Extension
import akka.actor.ExtensionId
import akka.actor.ExtensionIdProvider
import akka.annotation.InternalApi
import akka.cluster.Cluster
import akka.discovery.{ Discovery, ServiceDiscovery }
import akka.event.Logging
import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.server.Route
import akka.management.cluster.bootstrap.contactpoint.HttpClusterBootstrapRoutes
import akka.management.cluster.bootstrap.internal.BootstrapCoordinator
import akka.management.scaladsl.ManagementRouteProviderSettings
import akka.management.scaladsl.ManagementRouteProvider

final class ClusterBootstrap(implicit system: ExtendedActorSystem) extends Extension with ManagementRouteProvider {

  import ClusterBootstrap.Internal._
  import system.dispatcher

  private val log = Logging(system, classOf[ClusterBootstrap])

  private final val bootstrapStep = new AtomicReference[BootstrapStep](NotRunning)

  AkkaVersion.require("cluster-bootstrap", "2.5.27")

  val settings: ClusterBootstrapSettings = ClusterBootstrapSettings(system.settings.config, log)

  // used for initial discovery of contact points
  lazy val discovery: ServiceDiscovery =
    settings.contactPointDiscovery.discoveryMethod match {
      case "akka.discovery" =>
        val discovery = Discovery(system).discovery
        log.info("Bootstrap using default `akka.discovery` method: {}", Logging.simpleName(discovery))
        discovery

      case otherDiscoveryMechanism =>
        log.info("Bootstrap using `akka.discovery` method: {}", otherDiscoveryMechanism)
        Discovery(system).loadServiceDiscovery(otherDiscoveryMechanism)
    }

  private val joinDecider: JoinDecider = {
    system.dynamicAccess
      .createInstanceFor[JoinDecider](
        settings.joinDecider.implClass,
        List((classOf[ActorSystem], system), (classOf[ClusterBootstrapSettings], settings))
      )
      .get
  }

  private[this] val _selfContactPointUri: Promise[Uri] = Promise()

  override def routes(routeProviderSettings: ManagementRouteProviderSettings): Route = {
    log.info(s"Using self contact point address: ${routeProviderSettings.selfBaseUri}")
    this.setSelfContactPoint(routeProviderSettings.selfBaseUri)

    new HttpClusterBootstrapRoutes(settings).routes
  }

  def start(): Unit =
    if (Cluster(system).settings.SeedNodes.nonEmpty) {
      log.warning(
        "Application is configured with specific `akka.cluster.seed-nodes`: {}, bailing out of the bootstrap process! " +
        "If you want to use the automatic bootstrap mechanism, make sure to NOT set explicit seed nodes in the configuration. " +
        "This node will attempt to join the configured seed nodes.",
        Cluster(system).settings.SeedNodes.mkString("[", ", ", "]")
      )
    } else if (bootstrapStep.compareAndSet(NotRunning, Initializing)) {
      log.info("Initiating bootstrap procedure using {} method...", settings.contactPointDiscovery.discoveryMethod)

      ensureSelfContactPoint()
      val bootstrapProps = BootstrapCoordinator.props(discovery, joinDecider, settings)
      val bootstrap = system.systemActorOf(bootstrapProps, "bootstrapCoordinator")
      // Bootstrap already logs in several other execution points when it can't form a cluster, and why.
      selfContactPoint.foreach { uri =>
        bootstrap ! BootstrapCoordinator.Protocol.InitiateBootstrapping(uri)
      }
    } else log.warning("Bootstrap already initiated, yet start() method was called again. Ignoring.")

  
  private[bootstrap] object Internal {
    sealed trait BootstrapStep
    case object NotRunning extends BootstrapStep
    case object Initializing extends BootstrapStep
  }

} 
Example 65
Source File: EventSerializer.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.admin.persistence

import akka.actor.ExtendedActorSystem
import akka.serialization.SerializerWithStringManifest
import ch.epfl.bluebrain.nexus.admin.organizations.OrganizationEvent
import ch.epfl.bluebrain.nexus.admin.projects.ProjectEvent
import ch.epfl.bluebrain.nexus.commons.serialization.AkkaCoproductSerializer
import ch.epfl.bluebrain.nexus.rdf.implicits._
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig.HttpConfig
import ch.epfl.bluebrain.nexus.service.config.Settings
import io.circe.generic.extras.Configuration
import io.circe.generic.extras.semiauto.{deriveConfiguredDecoder, deriveConfiguredEncoder}
import io.circe.{Decoder, Encoder}
import shapeless.{:+:, CNil}

import scala.annotation.nowarn

@nowarn("cat=unused")
class EventSerializer(system: ExtendedActorSystem) extends SerializerWithStringManifest {

  implicit private val httpConfig: HttpConfig = Settings(system).serviceConfig.http

  implicit private val config: Configuration = Configuration.default.withDiscriminator("@type")

  implicit private val projectEventDecoder: Decoder[ProjectEvent]           = deriveConfiguredDecoder[ProjectEvent]
  implicit private val projectEventEncoder: Encoder[ProjectEvent]           = deriveConfiguredEncoder[ProjectEvent]
  implicit private val organizationEventDecoder: Decoder[OrganizationEvent] = deriveConfiguredDecoder[OrganizationEvent]
  implicit private val organizationEventEncoder: Encoder[OrganizationEvent] = deriveConfiguredEncoder[OrganizationEvent]

  private val serializer = new AkkaCoproductSerializer[OrganizationEvent :+: ProjectEvent :+: CNil](1129)

  override val identifier: Int = serializer.identifier

  override def manifest(o: AnyRef): String = serializer.manifest(o)

  override def toBinary(o: AnyRef): Array[Byte] = serializer.toBinary(o)

  override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = serializer.fromBinary(bytes, manifest)
} 
Example 66
Source File: AkkaTestUtils.scala    From bahir   with Apache License 2.0 5 votes vote down vote up
// scalastyle:off println
package org.apache.bahir.sql.streaming.akka

import java.io.File

import scala.collection.mutable
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.util.Random

import akka.actor.{Actor, ActorRef, ActorSystem, ExtendedActorSystem, Props}
import com.typesafe.config.{Config, ConfigFactory}

import org.apache.bahir.utils.Logging

class AkkaTestUtils extends Logging {
  private val actorSystemName = "feeder-actor-system"
  private var actorSystem: ActorSystem = _

  private val feederActorName = "feederActor"

  private var message: String = _
  private var count = 1

  def getFeederActorConfig(): Config = {
    val configFile = getClass.getClassLoader
                      .getResource("feeder_actor.conf").getFile
    ConfigFactory.parseFile(new File(configFile))
  }

  def getFeederActorUri(): String =
    s"${actorSystem.asInstanceOf[ExtendedActorSystem].provider.getDefaultAddress}" +
      s"/user/$feederActorName"

  class FeederActor extends Actor {

    val rand = new Random()
    val receivers = new mutable.LinkedHashSet[ActorRef]()

    val sendMessageThread =
      new Thread() {
        override def run(): Unit = {
          var counter = 0
          while (counter < count) {
//            Thread.sleep(500)
            receivers.foreach(_ ! message)
            counter += 1
          }
        }
      }

    override def receive: Receive = {
      case SubscribeReceiver(receiverActor: ActorRef) =>
        log.debug(s"received subscribe from ${receiverActor.toString}")
        receivers += receiverActor
        sendMessageThread.run()

      case UnsubscribeReceiver(receiverActor: ActorRef) =>
        log.debug(s"received unsubscribe from ${receiverActor.toString}")
        receivers -= receiverActor
    }
  }

  def setup(): Unit = {
    val feederConf = getFeederActorConfig()

    actorSystem = ActorSystem(actorSystemName, feederConf)
    actorSystem.actorOf(Props(new FeederActor), feederActorName)
  }

  def shutdown(): Unit = {
    Await.ready(actorSystem.terminate(), 5.seconds)
  }

  def setMessage(message: String): Unit = this.message = message
  def setCountOfMessages(messageCount: Int): Unit = count = messageCount
} 
Example 67
Source File: EmittedSerializer.scala    From akka-stream-eventsourcing   with Apache License 2.0 5 votes vote down vote up
package com.github.krasserm.ases.serializer

import java.io.NotSerializableException

import akka.actor.ExtendedActorSystem
import akka.serialization.Serializer
import com.github.krasserm.ases.Emitted
import com.github.krasserm.ases.serializer.EmittedFormatOuterClass.EmittedFormat

class EmittedSerializer(system: ExtendedActorSystem) extends Serializer {

  private val EmittedClass = classOf[Emitted[_]]
  private val payloadSerializer = new PayloadSerializer(system)

  override def identifier: Int = 17406883

  override def includeManifest: Boolean = true

  override def toBinary(o: AnyRef): Array[Byte] = o match {
    case emitted: Emitted[_] =>
      emittedFormat(emitted).toByteArray
    case _ =>
      throw new IllegalArgumentException(s"Invalid object of type '${o.getClass}' supplied to serializer [id = '$identifier']")
  }

  override def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = manifest match {
    case Some(`EmittedClass`) =>
      emitted(EmittedFormat.parseFrom(bytes))
    case None =>
      emitted(EmittedFormat.parseFrom(bytes))
    case _ =>
      throw new NotSerializableException(s"Unknown manifest '$manifest' supplied to serializer [id = '$identifier']")
  }

  private def emittedFormat(emitted: Emitted[Any]): EmittedFormat = {
    EmittedFormat.newBuilder()
      .setEvent(payloadSerializer.payloadFormatBuilder(emitted.event.asInstanceOf[AnyRef]))
      .setEmitterId(emitted.emitterId)
      .setEmissionUuid(emitted.emissionUuid)
      .build()
  }

  private def emitted(format: EmittedFormat): Emitted[_] = {
    Emitted(
      payloadSerializer.payload(format.getEvent),
      format.getEmitterId,
      format.getEmissionUuid
    )
  }
 } 
Example 68
Source File: PayloadSerializer.scala    From akka-stream-eventsourcing   with Apache License 2.0 5 votes vote down vote up
package com.github.krasserm.ases.serializer

import akka.actor.ExtendedActorSystem
import akka.serialization.{SerializationExtension, SerializerWithStringManifest}
import com.github.krasserm.ases.serializer.PayloadFormatOuterClass.PayloadFormat
import com.google.protobuf.ByteString

import scala.util.Try

class PayloadSerializer(system: ExtendedActorSystem) {

  def payloadFormatBuilder(payload: AnyRef): PayloadFormat.Builder = {
    val serializer = SerializationExtension(system).findSerializerFor(payload)
    val builder = PayloadFormat.newBuilder()

    if (serializer.includeManifest) {
      val (isStringManifest, manifest) = serializer match {
        case s: SerializerWithStringManifest => (true, s.manifest(payload))
        case _ => (false, payload.getClass.getName)
      }
      builder.setIsStringManifest(isStringManifest)
      builder.setPayloadManifest(manifest)
    }
    builder.setSerializerId(serializer.identifier)
    builder.setPayload(ByteString.copyFrom(serializer.toBinary(payload)))
  }

  def payload(payloadFormat: PayloadFormat): AnyRef = {
    val payload = if (payloadFormat.getIsStringManifest)
      payloadFromStringManifest(payloadFormat)
    else if (payloadFormat.getPayloadManifest.nonEmpty)
      payloadFromClassManifest(payloadFormat)
    else
      payloadFromEmptyManifest(payloadFormat)

    payload.get
  }

  private def payloadFromStringManifest(payloadFormat: PayloadFormat): Try[AnyRef] = {
    SerializationExtension(system).deserialize(
      payloadFormat.getPayload.toByteArray,
      payloadFormat.getSerializerId,
      payloadFormat.getPayloadManifest
    )
  }

  private def payloadFromClassManifest(payloadFormat: PayloadFormat): Try[AnyRef]  = {
    val manifestClass = system.dynamicAccess.getClassFor[AnyRef](payloadFormat.getPayloadManifest).get
    SerializationExtension(system).deserialize(
      payloadFormat.getPayload.toByteArray,
      payloadFormat.getSerializerId,
      Some(manifestClass)
    )
  }

  private def payloadFromEmptyManifest(payloadFormat: PayloadFormat): Try[AnyRef]  = {
    SerializationExtension(system).deserialize(
      payloadFormat.getPayload.toByteArray,
      payloadFormat.getSerializerId,
      None
    )
  }
} 
Example 69
Source File: ClusterHttpManagementRouteProviderSpec.scala    From akka-management   with Apache License 2.0 5 votes vote down vote up
package akka.management.cluster

import akka.actor.ExtendedActorSystem
import akka.cluster.Cluster
import akka.http.scaladsl.model.{ StatusCodes, Uri }
import akka.http.scaladsl.testkit.ScalatestRouteTest
import akka.management.scaladsl.ManagementRouteProviderSettings
import org.scalatest.{ Matchers, WordSpec }

object ClusterHttpManagementRouteProviderSpec {}

class ClusterHttpManagementRouteProviderSpec extends WordSpec with ScalatestRouteTest with Matchers {

  val cluster = Cluster(system)

  "Cluster HTTP Management Route" should {
    val routes = ClusterHttpManagementRouteProvider(
      system.asInstanceOf[ExtendedActorSystem]
    )
    "not expose write operations when readOnly set" in {
      val readOnlyRoutes = routes.routes(
        ManagementRouteProviderSettings(
          Uri("http://localhost"),
          readOnly = true
        )
      )
      Get("/cluster/members") ~> readOnlyRoutes ~> check {
        handled shouldEqual true
        status shouldEqual StatusCodes.OK
      }
      Post("/cluster/members") ~> readOnlyRoutes ~> check {
        status shouldEqual StatusCodes.MethodNotAllowed
      }
      Get("/cluster/members/member1") ~> readOnlyRoutes ~> check {
        handled shouldEqual true
        status shouldEqual StatusCodes.NotFound
      }
      Delete("/cluster/members/member1") ~> readOnlyRoutes ~> check {
        status shouldEqual StatusCodes.MethodNotAllowed
      }
      Put("/cluster/members/member1") ~> readOnlyRoutes ~> check {
        status shouldEqual StatusCodes.MethodNotAllowed
      }
    }

    "expose write when readOnly false" in {
      val allRoutes = routes.routes(
        ManagementRouteProviderSettings(
          Uri("http://localhost"),
          readOnly = false
        )
      )
      Get("/cluster/members") ~> allRoutes ~> check {
        handled shouldEqual true
      }
      Get("/cluster/members/member1") ~> allRoutes ~> check {
        handled shouldEqual true
        status shouldEqual StatusCodes.NotFound
      }
      Delete("/cluster/members/member1") ~> allRoutes ~> check {
        handled shouldEqual true
        status shouldEqual StatusCodes.NotFound
      }
      Put("/cluster/members/member1") ~> allRoutes ~> check {
        handled shouldEqual true
        status shouldEqual StatusCodes.NotFound
      }
    }
  }

} 
Example 70
Source File: ClusterHttpManagementRouteProvider.scala    From akka-management   with Apache License 2.0 5 votes vote down vote up
package akka.management.cluster

import akka.actor.ClassicActorSystemProvider
import akka.actor.{ ActorSystem, ExtendedActorSystem, ExtensionId, ExtensionIdProvider }
import akka.cluster.Cluster
import akka.http.scaladsl.server.Route
import akka.management.cluster.scaladsl.ClusterHttpManagementRoutes
import akka.management.scaladsl.ManagementRouteProviderSettings
import akka.management.scaladsl.ManagementRouteProvider

object ClusterHttpManagementRouteProvider
    extends ExtensionId[ClusterHttpManagementRouteProvider]
    with ExtensionIdProvider {
  override def lookup: ClusterHttpManagementRouteProvider.type = ClusterHttpManagementRouteProvider

  override def get(system: ActorSystem): ClusterHttpManagementRouteProvider = super.get(system)

  override def get(system: ClassicActorSystemProvider): ClusterHttpManagementRouteProvider = super.get(system)

  override def createExtension(system: ExtendedActorSystem): ClusterHttpManagementRouteProvider =
    new ClusterHttpManagementRouteProvider(system)

}


  override def routes(routeProviderSettings: ManagementRouteProviderSettings): Route =
    if (routeProviderSettings.readOnly) {
      ClusterHttpManagementRoutes.readOnly(cluster)
    } else {
      ClusterHttpManagementRoutes(cluster)
    }

} 
Example 71
Source File: LogLevelRoutesSpec.scala    From akka-management   with Apache License 2.0 5 votes vote down vote up
package akka.management.loglevels.logback

import akka.actor.ExtendedActorSystem
import akka.http.javadsl.server.MalformedQueryParamRejection
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.testkit.ScalatestRouteTest
import akka.management.scaladsl.ManagementRouteProviderSettings
import org.scalatest.Matchers
import org.scalatest.WordSpec
import org.slf4j.LoggerFactory
import akka.event.{ Logging => ClassicLogging }

class LogLevelRoutesSpec extends WordSpec with Matchers with ScalatestRouteTest {

  override def testConfigSource: String =
    """
      akka.loglevel = INFO
      """

  val routes = LogLevelRoutes
    .createExtension(system.asInstanceOf[ExtendedActorSystem])
    .routes(ManagementRouteProviderSettings(Uri("https://example.com"), readOnly = false))

  "The logback log level routes" must {

    "show log level of a Logger" in {
      Get("/loglevel/logback?logger=LogLevelRoutesSpec") ~> routes ~> check {
        responseAs[String]
      }
    }

    "change log level of a Logger" in {
      Put("/loglevel/logback?logger=LogLevelRoutesSpec&level=DEBUG") ~> routes ~> check {
        response.status should ===(StatusCodes.OK)
        LoggerFactory.getLogger("LogLevelRoutesSpec").isDebugEnabled should ===(true)
      }
    }

    "fail for unknown log level" in {
      Put("/loglevel/logback?logger=LogLevelRoutesSpec&level=MONKEY") ~> routes ~> check {
        rejection shouldBe an[MalformedQueryParamRejection]
      }
    }

    "not change loglevel if read only" in {
      val readOnlyRoutes = LogLevelRoutes
        .createExtension(system.asInstanceOf[ExtendedActorSystem])
        .routes(ManagementRouteProviderSettings(Uri("https://example.com"), readOnly = true))
      Put("/loglevel/logback?logger=LogLevelRoutesSpec&level=DEBUG") ~> readOnlyRoutes ~> check {
        response.status should ===(StatusCodes.Forbidden)
      }
    }

    "allow inspecting classic Akka loglevel" in {
      Get("/loglevel/akka") ~> routes ~> check {
        response.status should ===(StatusCodes.OK)
        responseAs[String] should ===("INFO")
      }
    }

    "allow changing classic Akka loglevel" in {
      Put("/loglevel/akka?level=DEBUG") ~> routes ~> check {
        response.status should ===(StatusCodes.OK)
        system.eventStream.logLevel should ===(ClassicLogging.DebugLevel)
      }
    }
  }

} 
Example 72
Source File: HealthCheckRoutesSpec.scala    From akka-management   with Apache License 2.0 5 votes vote down vote up
package akka.management

import akka.actor.ExtendedActorSystem
import akka.http.scaladsl.model.{ StatusCodes, Uri }
import akka.http.scaladsl.server._
import akka.http.scaladsl.testkit.ScalatestRouteTest
import akka.management.scaladsl.{ HealthChecks, ManagementRouteProviderSettings }
import org.scalatest.{ Matchers, WordSpec }

import scala.concurrent.Future

class HealthCheckRoutesSpec extends WordSpec with Matchers with ScalatestRouteTest {

  private val eas = system.asInstanceOf[ExtendedActorSystem]

  private def testRoute(
      readyResultValue: Future[Either[String, Unit]] = Future.successful(Right(())),
      aliveResultValue: Future[Either[String, Unit]] = Future.successful(Right(()))
  ): Route = {
    new HealthCheckRoutes(eas) {
      override protected val healthChecks: HealthChecks = new HealthChecks {
        override def readyResult(): Future[Either[String, Unit]] = readyResultValue
        override def ready(): Future[Boolean] = readyResultValue.map(_.isRight)
        override def aliveResult(): Future[Either[String, Unit]] = aliveResultValue
        override def alive(): Future[Boolean] = aliveResultValue.map(_.isRight)
      }
    }.routes(ManagementRouteProviderSettings(Uri("http://whocares"), readOnly = false))
  }

  tests("/ready", result => testRoute(readyResultValue = result))
  tests("/alive", result => testRoute(aliveResultValue = result))

  def tests(endpoint: String, route: Future[Either[String, Unit]] => Route) = {
    s"Health check ${endpoint} endpoint" should {
      "return 200 for Right" in {
        Get(endpoint) ~> route(Future.successful(Right(()))) ~> check {
          status shouldEqual StatusCodes.OK
        }
      }
      "return 500 for Left" in {
        Get(endpoint) ~> route(Future.successful(Left("com.someclass.MyCheck"))) ~> check {
          status shouldEqual StatusCodes.InternalServerError
          responseAs[String] shouldEqual "Not Healthy: com.someclass.MyCheck"
        }
      }
      "return 500 for fail" in {
        Get(endpoint) ~> route(Future.failed(new RuntimeException("darn it"))) ~> check {
          status shouldEqual StatusCodes.InternalServerError
          responseAs[String] shouldEqual "Health Check Failed: darn it"
        }
      }
    }
  }
} 
Example 73
Source File: HealthCheckRoutes.scala    From akka-management   with Apache License 2.0 5 votes vote down vote up
package akka.management

import akka.actor.ExtendedActorSystem
import akka.annotation.InternalApi
import akka.http.scaladsl.model._
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.{ PathMatchers, Route }
import akka.management.scaladsl.{ HealthChecks, ManagementRouteProvider, ManagementRouteProviderSettings }

import scala.util.{ Failure, Success, Try }


@InternalApi
private[akka] class HealthCheckRoutes(system: ExtendedActorSystem) extends ManagementRouteProvider {

  private val settings: HealthCheckSettings = HealthCheckSettings(
    system.settings.config.getConfig("akka.management.health-checks")
  )

  // exposed for testing
  protected val healthChecks = HealthChecks(system, settings)

  private val healthCheckResponse: Try[Either[String, Unit]] => Route = {
    case Success(Right(())) => complete(StatusCodes.OK)
    case Success(Left(failingChecks)) =>
      complete(StatusCodes.InternalServerError -> s"Not Healthy: $failingChecks")
    case Failure(t) =>
      complete(
        StatusCodes.InternalServerError -> s"Health Check Failed: ${t.getMessage}"
      )
  }

  override def routes(mrps: ManagementRouteProviderSettings): Route = {
    concat(
      path(PathMatchers.separateOnSlashes(settings.readinessPath)) {
        get {
          onComplete(healthChecks.readyResult())(healthCheckResponse)
        }
      },
      path(PathMatchers.separateOnSlashes(settings.livenessPath)) {
        get {
          onComplete(healthChecks.aliveResult())(healthCheckResponse)
        }
      }
    )
  }
} 
Example 74
Source File: AkkaExecutionSequencer.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.grpc.adapter

import akka.Done
import akka.actor.{Actor, ActorLogging, ActorRef, ActorSystem, ExtendedActorSystem, Props}
import akka.pattern.{AskTimeoutException, ask}
import akka.util.Timeout
import com.daml.grpc.adapter.RunnableSequencingActor.ShutdownRequest

import scala.concurrent.duration.FiniteDuration
import scala.concurrent.{ExecutionContext, Future}
import scala.util.control.NonFatal
import com.daml.dec.DirectExecutionContext


  def closeAsync(implicit ec: ExecutionContext): Future[Done] =
    (actorRef ? ShutdownRequest).mapTo[Done].recover {
      case askTimeoutException: AskTimeoutException if actorIsTerminated(askTimeoutException) =>
        Done
    }

  private def actorIsTerminated(askTimeoutException: AskTimeoutException) = {
    AkkaExecutionSequencer.actorTerminatedRegex.findFirstIn(askTimeoutException.getMessage).nonEmpty
  }
}

object AkkaExecutionSequencer {
  def apply(name: String, terminationTimeout: FiniteDuration)(
      implicit system: ActorSystem): AkkaExecutionSequencer = {
    system match {
      case extendedSystem: ExtendedActorSystem =>
        new AkkaExecutionSequencer(
          extendedSystem.systemActorOf(Props[RunnableSequencingActor], name))(
          Timeout.durationToTimeout(terminationTimeout))
      case _ =>
        new AkkaExecutionSequencer(system.actorOf(Props[RunnableSequencingActor], name))(
          Timeout.durationToTimeout(terminationTimeout))

    }
  }

  private val actorTerminatedRegex = """Recipient\[.*]\] had already been terminated.""".r
}

private[grpc] class RunnableSequencingActor extends Actor with ActorLogging {
  @SuppressWarnings(Array("org.wartremover.warts.Any"))
  override val receive: Receive = {
    case runnable: Runnable =>
      try {
        runnable.run()
      } catch {
        case NonFatal(t) => log.error("Unexpected exception while executing Runnable", t)
      }
    case ShutdownRequest =>
      context.stop(self) // processing of the current message will continue
      sender() ! Done
  }
}

private[grpc] object RunnableSequencingActor {
  case object ShutdownRequest
} 
Example 75
Source File: KubernetesLease.scala    From akka-management   with Apache License 2.0 5 votes vote down vote up
package akka.coordination.lease.kubernetes

import java.util.concurrent.atomic.{ AtomicBoolean, AtomicInteger }

import akka.actor.ExtendedActorSystem
import akka.coordination.lease.{ LeaseException, LeaseSettings, LeaseTimeoutException }
import akka.coordination.lease.scaladsl.Lease
import akka.coordination.lease.kubernetes.LeaseActor._
import akka.coordination.lease.kubernetes.internal.KubernetesApiImpl
import akka.dispatch.ExecutionContexts
import akka.pattern.AskTimeoutException
import akka.util.{ ConstantFun, Timeout }

import scala.concurrent.Future

object KubernetesLease {
  val configPath = "akka.coordination.lease.kubernetes"
  private val leaseCounter = new AtomicInteger(1)
}

class KubernetesLease private[akka] (system: ExtendedActorSystem, leaseTaken: AtomicBoolean, settings: LeaseSettings)
    extends Lease(settings) {

  private val k8sSettings = KubernetesSettings(settings.leaseConfig, settings.timeoutSettings)
  private val k8sApi = new KubernetesApiImpl(system, k8sSettings)
  private val leaseActor = system.systemActorOf(
    LeaseActor.props(k8sApi, settings, leaseTaken),
    s"kubernetesLease${KubernetesLease.leaseCounter.incrementAndGet}-${settings.leaseName}-${settings.ownerName}"
  )

  def this(leaseSettings: LeaseSettings, system: ExtendedActorSystem) =
    this(system, new AtomicBoolean(false), leaseSettings)

  import akka.pattern.ask
  import system.dispatcher

  private implicit val timeout: Timeout = Timeout(settings.timeoutSettings.operationTimeout)

  override def checkLease(): Boolean = leaseTaken.get()

  override def release(): Future[Boolean] = {
    // replace with transform once 2.11 dropped
    (leaseActor ? Release())
      .flatMap {
        case LeaseReleased       => Future.successful(true)
        case InvalidRequest(msg) => Future.failed(new LeaseException(msg))
      }(ExecutionContexts.sameThreadExecutionContext)
      .recoverWith {
        case _: AskTimeoutException =>
          Future.failed(new LeaseTimeoutException(
            s"Timed out trying to release lease [${settings.leaseName}, ${settings.ownerName}]. It may still be taken."))
      }
  }

  override def acquire(): Future[Boolean] = {
    acquire(ConstantFun.scalaAnyToUnit)

  }
  override def acquire(leaseLostCallback: Option[Throwable] => Unit): Future[Boolean] = {
    // replace with transform once 2.11 dropped
    (leaseActor ? Acquire(leaseLostCallback))
      .flatMap {
        case LeaseAcquired       => Future.successful(true)
        case LeaseTaken          => Future.successful(false)
        case InvalidRequest(msg) => Future.failed(new LeaseException(msg))
      }
      .recoverWith {
        case _: AskTimeoutException =>
          Future.failed[Boolean](new LeaseTimeoutException(
            s"Timed out trying to acquire lease [${settings.leaseName}, ${settings.ownerName}]. It may still be taken."))
      }(ExecutionContexts.sameThreadExecutionContext)
  }
} 
Example 76
Source File: AkkaManagementTrigger.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.akka.management

import akka.Done
import akka.actor.ActorSystem
import akka.actor.CoordinatedShutdown
import akka.actor.ExtendedActorSystem
import akka.management.scaladsl.AkkaManagement
import com.typesafe.config.Config
import play.api.Logger

import scala.concurrent.ExecutionContext
import scala.concurrent.Future


  private[lagom] def forcedStart(requester: String): Future[Done] = {
    if (!enabled) {
      logger.warn(
        s"'lagom.akka.management.enabled' property is set to '$enabledRenderedValue', " +
          s"but Akka Management is being required to start by: '$requester'."
      )
    }

    doStart()
  }

  private def doStart(): Future[Done] = {
    val akkaManagement = AkkaManagement(system.asInstanceOf[ExtendedActorSystem])
    akkaManagement.start().map { _ =>
      // add a task to stop
      coordinatedShutdown.addTask(
        CoordinatedShutdown.PhaseBeforeServiceUnbind,
        "stop-akka-http-management"
      ) { () =>
        akkaManagement.stop()
      }
      Done
    }
  }
} 
Example 77
Source File: ClusterMessageSerializerSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.cluster

import akka.actor.ActorSystem
import akka.actor.ExtendedActorSystem
import akka.testkit.ImplicitSender
import akka.testkit.TestKit
import com.lightbend.lagom.internal.cluster.ClusterDistribution.EnsureActive
import com.lightbend.lagom.internal.cluster.protobuf.msg.ClusterMessages.{ EnsureActive => ProtobufEnsureActive }
import com.typesafe.config.ConfigFactory
import org.scalactic.TypeCheckedTripleEquals
import org.scalatest.BeforeAndAfterAll
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

object ClusterMessageSerializerSpec {
  def actorSystem(): ActorSystem = {
    val config = ConfigFactory.defaultReference()
    ActorSystem(classOf[ClusterMessageSerializerSpec].getSimpleName, config)
  }
}

class ClusterMessageSerializerSpec
    extends TestKit(ClusterMessageSerializerSpec.actorSystem())
    with AnyWordSpecLike
    with Matchers
    with BeforeAndAfterAll
    with TypeCheckedTripleEquals
    with ImplicitSender {
  val clusterMessageSerializer = new ClusterMessageSerializer(system.asInstanceOf[ExtendedActorSystem])

  "ClusterMessageSerializer" must {
    "serialize EnsureActive" in {
      val ensureActive = EnsureActive("entity-1")
      val bytes        = clusterMessageSerializer.toBinary(ensureActive)
      ProtobufEnsureActive.parseFrom(bytes).getEntityId should be("entity-1")
    }

    "deserialize EnsureActive" in {
      val bytes        = ProtobufEnsureActive.newBuilder().setEntityId("entity-2").build().toByteArray
      val ensureActive = clusterMessageSerializer.fromBinary(bytes, "E").asInstanceOf[EnsureActive]
      ensureActive.entityId should be("entity-2")
    }

    "fail to serialize other types" in {
      assertThrows[IllegalArgumentException] {
        clusterMessageSerializer.toBinary("Strings are not supported")
      }
    }

    "fail to deserialize with the wrong manifest" in {
      assertThrows[IllegalArgumentException] {
        val bytes = ProtobufEnsureActive.newBuilder().setEntityId("entity-2").build().toByteArray
        clusterMessageSerializer.fromBinary(bytes, "WRONG-MANIFEST")
      }
    }
  }

  protected override def afterAll(): Unit = {
    shutdown()
    super.afterAll()
  }
} 
Example 78
Source File: ClusterMessageSerializer.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.cluster

import akka.actor.ExtendedActorSystem
import akka.serialization.SerializerWithStringManifest
import akka.serialization.BaseSerializer
import com.lightbend.lagom.internal.cluster.ClusterDistribution.EnsureActive
import com.lightbend.lagom.internal.cluster.protobuf.msg.{ ClusterMessages => cm }

private[lagom] class ClusterMessageSerializer(val system: ExtendedActorSystem)
    extends SerializerWithStringManifest
    with BaseSerializer {
  val EnsureActiveManifest = "E"

  override def manifest(obj: AnyRef): String = obj match {
    case _: EnsureActive => EnsureActiveManifest
    case _ =>
      throw new IllegalArgumentException(s"Can't serialize object of type ${obj.getClass} in [${getClass.getName}]")
  }

  def toBinary(obj: AnyRef): Array[Byte] = obj match {
    case ea: EnsureActive => ensureActiveToProto(ea).toByteArray
    case _ =>
      throw new IllegalArgumentException(s"Can't serialize object of type ${obj.getClass} in [${getClass.getName}]")
  }

  private def ensureActiveToProto(ensureActive: EnsureActive): cm.EnsureActive = {
    cm.EnsureActive.newBuilder().setEntityId(ensureActive.entityId).build()
  }

  override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = manifest match {
    case EnsureActiveManifest => ensureActiveFromBinary(bytes)
    case _ =>
      throw new IllegalArgumentException(
        s"Unimplemented deserialization of message with manifest [$manifest] in [${getClass.getName}]"
      )
  }

  private def ensureActiveFromBinary(bytes: Array[Byte]): EnsureActive = {
    ensureActiveFromProto(cm.EnsureActive.parseFrom(bytes))
  }

  private def ensureActiveFromProto(ensureActive: cm.EnsureActive): EnsureActive = {
    EnsureActive(ensureActive.getEntityId)
  }
} 
Example 79
Source File: CassandraReadSideSessionProvider.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.persistence.cassandra

import akka.Done
import akka.actor.ActorSystem
import akka.actor.ExtendedActorSystem
import akka.event.Logging
import akka.persistence.cassandra.session.CassandraSessionSettings
import akka.persistence.cassandra.session.scaladsl.{ CassandraSession => AkkaScaladslCassandraSession }
import akka.persistence.cassandra.CassandraPluginConfig
import akka.persistence.cassandra.SessionProvider
import com.datastax.driver.core.Session

import scala.concurrent.ExecutionContext
import scala.concurrent.Future


private[lagom] object CassandraReadSideSessionProvider {
  def apply(
      system: ActorSystem,
      settings: CassandraSessionSettings,
      executionContext: ExecutionContext
  ): AkkaScaladslCassandraSession = {
    import akka.persistence.cassandra.ListenableFutureConverter
    import akka.util.Helpers.Requiring

    import scala.collection.JavaConverters._ // implicit asScala conversion

    val cfg = settings.config
    val replicationStrategy: String = CassandraPluginConfig.getReplicationStrategy(
      cfg.getString("replication-strategy"),
      cfg.getInt("replication-factor"),
      cfg.getStringList("data-center-replication-factors").asScala.toSeq
    )

    val keyspaceAutoCreate: Boolean = cfg.getBoolean("keyspace-autocreate")
    val keyspace: String = cfg
      .getString("keyspace")
      .requiring(
        !keyspaceAutoCreate || _ > "",
        "'keyspace' configuration must be defined, or use keyspace-autocreate=off"
      )

    def init(session: Session): Future[Done] = {
      implicit val ec = executionContext
      if (keyspaceAutoCreate) {
        val result1 =
          session.executeAsync(s"""
            CREATE KEYSPACE IF NOT EXISTS $keyspace
            WITH REPLICATION = { 'class' : $replicationStrategy }
            """).asScala
        result1
          .flatMap { _ =>
            session.executeAsync(s"USE $keyspace;").asScala
          }
          .map(_ => Done)
      } else if (keyspace != "")
        session.executeAsync(s"USE $keyspace;").asScala.map(_ => Done)
      else
        Future.successful(Done)
    }

    val metricsCategory = "lagom-" + system.name

    // using the scaladsl API because the init function
    new AkkaScaladslCassandraSession(
      system,
      SessionProvider(system.asInstanceOf[ExtendedActorSystem], settings.config),
      settings,
      executionContext,
      Logging.getLogger(system, this.getClass),
      metricsCategory,
      init
    )
  }
} 
Example 80
Source File: ClusterStartupTaskSerializerSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.persistence.cluster

import akka.actor.ExtendedActorSystem
import akka.serialization.SerializationExtension
import com.lightbend.lagom.persistence.ActorSystemSpec

class ClusterStartupTaskSerializerSpec extends ActorSystemSpec {
  val serializer = new ClusterStartupTaskSerializer(system.asInstanceOf[ExtendedActorSystem])

  def checkSerialization(obj: AnyRef): Unit = {
    // check that it is configured
    SerializationExtension(system).serializerFor(obj.getClass).getClass should be(classOf[ClusterStartupTaskSerializer])

    // verify serialization-deserialization round trip
    val blob = serializer.toBinary(obj)
    val obj2 = serializer.fromBinary(blob, serializer.manifest(obj))
    obj2 should be(obj)
  }

  "ClusterStartupTaskSerializerSpec" must {
    "serialize Execute" in {
      checkSerialization(ClusterStartupTaskActor.Execute)
    }
  }
} 
Example 81
Source File: ClusterStartupTaskSerializer.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.persistence.cluster

import akka.actor.ExtendedActorSystem
import akka.serialization.BaseSerializer
import akka.serialization.SerializerWithStringManifest
import com.lightbend.lagom.internal.persistence.cluster.ClusterStartupTaskActor.Execute

private[lagom] class ClusterStartupTaskSerializer(val system: ExtendedActorSystem)
    extends SerializerWithStringManifest
    with BaseSerializer {
  val ExecuteManifest = "E"

  override def manifest(obj: AnyRef) = obj match {
    case Execute => ExecuteManifest
    case _ =>
      throw new IllegalArgumentException(s"Can't serialize object of type ${obj.getClass} in [${getClass.getName}]")
  }

  override def toBinary(obj: AnyRef) = obj match {
    case Execute => Array.emptyByteArray
    case _ =>
      throw new IllegalArgumentException(s"Can't serialize object of type ${obj.getClass} in [${getClass.getName}]")
  }

  override def fromBinary(bytes: Array[Byte], manifest: String) = manifest match {
    case `ExecuteManifest` => Execute
    case _ =>
      throw new IllegalArgumentException(
        s"Unimplemented deserialization of message with manifest [$manifest] in [${getClass.getName}]"
      )
  }
} 
Example 82
Source File: PersistenceMessageSerializerSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.scaladsl.persistence.protobuf

import java.io.NotSerializableException

import scala.concurrent.duration._
import akka.actor.ExtendedActorSystem
import akka.serialization.SerializationExtension
import com.lightbend.lagom.internal.cluster.ClusterDistribution.EnsureActive
import com.lightbend.lagom.persistence.ActorSystemSpec
import com.lightbend.lagom.scaladsl.persistence.CommandEnvelope
import com.lightbend.lagom.scaladsl.persistence.PersistentEntity.InvalidCommandException
import com.lightbend.lagom.scaladsl.persistence.PersistentEntity.PersistException
import com.lightbend.lagom.scaladsl.persistence.PersistentEntity.UnhandledCommandException
import com.lightbend.lagom.scaladsl.persistence.PersistentEntityRef
import com.lightbend.lagom.scaladsl.persistence.TestEntity
import com.lightbend.lagom.scaladsl.playjson.JsonSerializerRegistry
import com.lightbend.lagom.scaladsl.persistence.TestEntitySerializerRegistry

class PersistenceMessageSerializerSpec
    extends ActorSystemSpec(JsonSerializerRegistry.actorSystemSetupFor(TestEntitySerializerRegistry)) {
  val serializer = new PersistenceMessageSerializer(system.asInstanceOf[ExtendedActorSystem])

  def checkSerialization(obj: AnyRef): Unit = {
    // check that it is configured
    SerializationExtension(system).serializerFor(obj.getClass).getClass should be(classOf[PersistenceMessageSerializer])

    // verify serialization-deserialization round trip
    val blob = serializer.toBinary(obj)
    val obj2 = serializer.fromBinary(blob, serializer.manifest(obj))
    obj2 should be(obj)
  }

  "PersistenceMessageSerializer" must {
    "serialize CommandEnvelope" in {
      checkSerialization(CommandEnvelope("entityId", TestEntity.Add("a")))
    }

    "serialize EnsureActive" in {
      checkSerialization(EnsureActive("foo"))
    }

    "serialize InvalidCommandException" in {
      checkSerialization(InvalidCommandException("wrong"))
    }

    "serialize UnhandledCommandException" in {
      checkSerialization(UnhandledCommandException("unhandled"))
    }

    "serialize PersistException" in {
      checkSerialization(PersistException("not stored"))
    }

    "not serialize PersistentEntityRef" in {
      intercept[NotSerializableException] {
        SerializationExtension(system)
          .serialize(new PersistentEntityRef[String]("abc", system.deadLetters, system, 5.seconds))
          .get
      }
    }
  }
} 
Example 83
Source File: PersistenceMessageSerializerSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.javadsl.persistence.protobuf

import java.io.NotSerializableException

import akka.actor.ExtendedActorSystem
import akka.serialization.SerializationExtension
import com.lightbend.lagom.internal.cluster.ClusterDistribution.EnsureActive
import com.lightbend.lagom.persistence.ActorSystemSpec
import com.lightbend.lagom.javadsl.persistence.CommandEnvelope
import com.lightbend.lagom.javadsl.persistence.PersistentEntity.InvalidCommandException
import com.lightbend.lagom.javadsl.persistence.PersistentEntity.PersistException
import com.lightbend.lagom.javadsl.persistence.PersistentEntity.UnhandledCommandException
import com.lightbend.lagom.javadsl.persistence.PersistentEntityRef
import com.lightbend.lagom.javadsl.persistence.TestEntity

import java.time.{ Duration => JDuration }

class PersistenceMessageSerializerSpec extends ActorSystemSpec {
  val serializer = new PersistenceMessageSerializer(system.asInstanceOf[ExtendedActorSystem])

  def checkSerialization(obj: AnyRef): Unit = {
    // check that it is configured
    SerializationExtension(system).serializerFor(obj.getClass).getClass should be(classOf[PersistenceMessageSerializer])

    // verify serialization-deserialization round trip
    val blob = serializer.toBinary(obj)
    val obj2 = serializer.fromBinary(blob, serializer.manifest(obj))
    obj2 should be(obj)
  }

  "PersistenceMessageSerializer" must {
    "serialize CommandEnvelope" in {
      checkSerialization(CommandEnvelope("entityId", TestEntity.Add.of("a")))
    }

    "serialize EnsureActive" in {
      checkSerialization(EnsureActive("foo"))
    }

    "serialize InvalidCommandException" in {
      checkSerialization(InvalidCommandException("wrong"))
    }

    "serialize UnhandledCommandException" in {
      checkSerialization(UnhandledCommandException("unhandled"))
    }

    "serialize PersistException" in {
      checkSerialization(PersistException("not stored"))
    }

    "not serialize PersistentEntityRef" in {
      intercept[NotSerializableException] {
        SerializationExtension(system)
          .serialize(new PersistentEntityRef[String]("abc", system.deadLetters, JDuration.ofSeconds(5)))
          .get
      }
    }
  }
}