org.scalatest.WordSpecLike Scala Examples

The following examples show how to use org.scalatest.WordSpecLike. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: RegressITCase.scala    From flink-tensorflow   with Apache License 2.0 6 votes vote down vote up
package org.apache.flink.contrib.tensorflow.ml

import com.twitter.bijection.Conversion._
import org.apache.flink.api.common.functions.RichFlatMapFunction
import org.apache.flink.api.scala._
import org.apache.flink.configuration.Configuration
import org.apache.flink.contrib.tensorflow.ml.signatures.RegressionMethod._
import org.apache.flink.contrib.tensorflow.types.TensorInjections.{message2Tensor, messages2Tensor}
import org.apache.flink.contrib.tensorflow.util.TestData._
import org.apache.flink.contrib.tensorflow.util.{FlinkTestBase, RegistrationUtils}
import org.apache.flink.core.fs.Path
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.util.Collector
import org.apache.flink.util.Preconditions.checkState
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{Matchers, WordSpecLike}
import org.tensorflow.Tensor
import org.tensorflow.contrib.scala.Arrays._
import org.tensorflow.contrib.scala.Rank._
import org.tensorflow.contrib.scala._
import org.tensorflow.example.Example
import resource._

@RunWith(classOf[JUnitRunner])
class RegressITCase extends WordSpecLike
  with Matchers
  with FlinkTestBase {

  override val parallelism = 1

  type LabeledExample = (Example, Float)

  def examples(): Seq[LabeledExample] = {
    for (v <- Seq(0.0f -> 2.0f, 1.0f -> 2.5f, 2.0f -> 3.0f, 3.0f -> 3.5f))
      yield (example("x" -> feature(v._1)), v._2)
  }

  "A RegressFunction" should {
    "process elements" in {
      val env = StreamExecutionEnvironment.getExecutionEnvironment
      RegistrationUtils.registerTypes(env.getConfig)

      val model = new HalfPlusTwo(new Path("../models/half_plus_two"))

      val outputs = env
        .fromCollection(examples())
        .flatMap(new RichFlatMapFunction[LabeledExample, Float] {
          override def open(parameters: Configuration): Unit = model.open()
          override def close(): Unit = model.close()

          override def flatMap(value: (Example, Float), out: Collector[Float]): Unit = {
            for {
              x <- managed(Seq(value._1).toList.as[Tensor].taggedAs[ExampleTensor])
              y <- model.regress_x_to_y(x)
            } {
              // cast as a 1D tensor to use the available conversion
              val o = y.taggedAs[TypedTensor[`1D`,Float]].as[Array[Float]]
              val actual = o(0)
              checkState(actual == value._2)
              out.collect(actual)
            }
          }
        })
        .print()

      env.execute()
    }
  }
} 
Example 2
Source File: CacheBehaviorSpecBase.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.caching

import org.scalatest.{Matchers, WordSpecLike}

trait CacheBehaviorSpecBase extends CacheSpecBase with WordSpecLike with Matchers {
  name should {
    "compute the correct results" in {
      val cache = newCache()

      cache.get(1, _.toString) should be("1")
      cache.get(2, _.toString) should be("2")
      cache.get(3, _.toString) should be("3")
      cache.get(2, _.toString) should be("2")
    }
  }
} 
Example 3
Source File: CacheEvictionSpecBase.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.caching

import org.scalatest.{Matchers, WordSpecLike}
import org.scalatest.concurrent.Eventually
import org.scalatest.time.{Second, Span}

import scala.util.Random

trait CacheEvictionSpecBase
    extends CacheBehaviorSpecBase
    with WordSpecLike
    with Matchers
    with Eventually {
  override implicit def patienceConfig: PatienceConfig = PatienceConfig(scaled(Span(1, Second)))

  protected def newLargeCache(): Cache[Integer, String]

  name should {
    "evict values eventually, once the limit has been reached" in {
      val cache = newLargeCache()
      val values = Iterator.continually[Integer](Random.nextInt).take(1000).toSet.toVector

      values.foreach { value =>
        cache.get(value, _.toString)
      }

      // The cache may not evict straight away. We should keep trying.
      eventually {
        val cachedValues = values.map(cache.getIfPresent).filter(_.isDefined)
        // It may evict more than expected, and it might grow past the bounds again before we check.
        cachedValues.length should (be > 16 and be < 500)
      }
    }
  }
} 
Example 4
Source File: CacheCachingSpecBase.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.caching

import java.util.concurrent.atomic.AtomicInteger

import org.scalatest.{Matchers, WordSpecLike}

trait CacheCachingSpecBase extends CacheSpecBase with WordSpecLike with Matchers {
  name should {
    "compute once, and cache" in {
      val cache = newCache()
      val counter = new AtomicInteger(0)

      def compute(value: Integer): String = {
        counter.incrementAndGet()
        value.toString
      }

      cache.get(1, compute)
      cache.get(1, compute)
      cache.get(1, compute)
      cache.get(2, compute)

      counter.get() should be(2)
    }

    "return `None` on `getIfPresent` if the value is not present" in {
      val cache = newCache()

      cache.getIfPresent(7) should be(None)
    }

    "return the value on `getIfPresent` if the value is present" in {
      val cache = newCache()

      cache.get(7, _.toString) should be("7")
      cache.getIfPresent(7) should be(Some("7"))
    }

    "`put` values" in {
      val cache = newCache()

      cache.put(7, "7")
      cache.getIfPresent(7) should be(Some("7"))

      val counter = new AtomicInteger(0)

      def compute(value: Integer): String = {
        counter.incrementAndGet()
        value.toString
      }

      cache.get(7, compute) should be("7")
      counter.get() should be(0)
    }
  }
} 
Example 5
Source File: DropRepeatedSpec.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.platform.server.api

import akka.actor.ActorSystem
import akka.pattern.pipe
import akka.stream.Materializer
import akka.stream.scaladsl.{Sink, Source}
import akka.testkit.{TestKit, TestProbe}
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

import scala.collection.immutable
import scala.concurrent.ExecutionContext

final class DropRepeatedSpec
    extends TestKit(ActorSystem(classOf[DropRepeatedSpec].getSimpleName))
    with WordSpecLike
    with Matchers
    with BeforeAndAfterAll {

  private[this] implicit val materializer: Materializer = Materializer(system)
  private[this] implicit val executionContext: ExecutionContext = materializer.executionContext

  override def afterAll: Unit = {
    TestKit.shutdownActorSystem(system)
  }

  "DropRepeated" should {
    "drop repeated elements" in {
      val probe = TestProbe()
      val input = immutable.Seq(1, 2, 2, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 5)

      val _ = Source(input)
        .via(DropRepeated())
        .runWith(Sink.seq)
        .pipeTo(probe.ref)
        .failed
        .foreach(fail(_))

      probe.expectMsg(Vector(1, 2, 3, 4, 5))
    }

    "does not drop duplicate elements that are not repeated" in {
      val probe = TestProbe()
      val input = immutable.Seq(1, 1, 2, 2, 1, 1, 2, 2)

      val _ = Source(input)
        .via(DropRepeated())
        .runWith(Sink.seq)
        .pipeTo(probe.ref)
        .failed
        .foreach(fail(_))

      probe.expectMsg(Vector(1, 2, 1, 2))
    }
  }
} 
Example 6
Source File: GlobPathFilterSpec.scala    From prometheus-akka   with Apache License 2.0 5 votes vote down vote up
package com.workday.prometheus.akka.impl

import org.scalatest.{Matchers, WordSpecLike}

class GlobPathFilterSpec extends WordSpecLike with Matchers {
  "The GlobPathFilter" should {

    "match a single expression" in {
      val filter = new GlobPathFilter("/user/actor")

      filter.accept("/user/actor") shouldBe true

      filter.accept("/user/actor/something") shouldBe false
      filter.accept("/user/actor/somethingElse") shouldBe false
    }

    "match all expressions in the same level" in {
      val filter = new GlobPathFilter("/user/*")

      filter.accept("/user/actor") shouldBe true
      filter.accept("/user/otherActor") shouldBe true

      filter.accept("/user/something/actor") shouldBe false
      filter.accept("/user/something/otherActor") shouldBe false
    }

    "match all expressions" in {
      val filter = new GlobPathFilter("**")

      filter.accept("GET: /ping") shouldBe true
      filter.accept("GET: /ping/pong") shouldBe true
    }

    "match all expressions and crosses the path boundaries" in {
      val filter = new GlobPathFilter("/user/actor-**")

      filter.accept("/user/actor-") shouldBe true
      filter.accept("/user/actor-one") shouldBe true
      filter.accept("/user/actor-one/other") shouldBe true

      filter.accept("/user/something/actor") shouldBe false
      filter.accept("/user/something/otherActor") shouldBe false
    }

    "match exactly one character" in {
      val filter = new GlobPathFilter("/user/actor-?")

      filter.accept("/user/actor-1") shouldBe true
      filter.accept("/user/actor-2") shouldBe true
      filter.accept("/user/actor-3") shouldBe true

      filter.accept("/user/actor-one") shouldBe false
      filter.accept("/user/actor-two") shouldBe false
      filter.accept("/user/actor-tree") shouldBe false
    }
  }
} 
Example 7
Source File: RegexPathFilterSpec.scala    From prometheus-akka   with Apache License 2.0 5 votes vote down vote up
package com.workday.prometheus.akka.impl

import org.scalatest.{Matchers, WordSpecLike}

class RegexPathFilterSpec extends WordSpecLike with Matchers {
  "The RegexPathFilter" should {

    "match a single expression" in {
      val filter = new RegexPathFilter("/user/actor")

      filter.accept("/user/actor") shouldBe true

      filter.accept("/user/actor/something") shouldBe false
      filter.accept("/user/actor/somethingElse") shouldBe false
    }

    "match arbitray expressions ending with wildcard" in {
      val filter = new RegexPathFilter("/user/.*")

      filter.accept("/user/actor") shouldBe true
      filter.accept("/user/otherActor") shouldBe true
      filter.accept("/user/something/actor") shouldBe true
      filter.accept("/user/something/otherActor") shouldBe true

      filter.accept("/otheruser/actor") shouldBe false
      filter.accept("/otheruser/otherActor") shouldBe false
      filter.accept("/otheruser/something/actor") shouldBe false
      filter.accept("/otheruser/something/otherActor") shouldBe false
    }

    "match numbers" in {
      val filter = new RegexPathFilter("/user/actor-\\d")

      filter.accept("/user/actor-1") shouldBe true
      filter.accept("/user/actor-2") shouldBe true
      filter.accept("/user/actor-3") shouldBe true

      filter.accept("/user/actor-one") shouldBe false
      filter.accept("/user/actor-two") shouldBe false
      filter.accept("/user/actor-tree") shouldBe false
    }
  }
} 
Example 8
Source File: DatasetFunctionsSpec.scala    From daf   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package daf.dataset

import java.io.ByteArrayInputStream

import akka.stream.ActorMaterializer
import akka.stream.scaladsl.StreamConverters
import controllers.modules.TestAbstractModule
import daf.filesystem.MergeStrategy
import daf.instances.{ AkkaInstance, ConfigurationInstance }
import org.scalatest.{ BeforeAndAfterAll, MustMatchers, WordSpecLike }

import scala.concurrent.Await
import scala.concurrent.duration._
import scala.util.Random

class DatasetFunctionsSpec extends TestAbstractModule
  with WordSpecLike
  with MustMatchers
  with BeforeAndAfterAll
  with ConfigurationInstance
  with AkkaInstance {

  implicit lazy val executionContext = actorSystem.dispatchers.lookup("akka.actor.test-dispatcher")

  protected implicit lazy val materializer = ActorMaterializer.create { actorSystem }

  override def beforeAll() = {
    startAkka()
  }

  def data = (1 to 5) .map { i =>
    Random.alphanumeric.grouped(20).take(5).map { s => s"$i - ${s.mkString}" }.toStream :+ defaultSeparator
  }

  def stream = MergeStrategy.coalesced {
    data.map { iter =>
      new ByteArrayInputStream(
        iter.mkString(defaultSeparator).getBytes("UTF-8")
      )
    }
  }

  def source = StreamConverters.fromInputStream(() => stream, 5)

  "Source manipulation" must {

    "convert to a string source" in {
      Await.result(
        wrapDefault { asStringSource(source) }.runFold("") { _ + _ },
        5.seconds
      ).split(defaultSeparator).length must be { 25 }
    }

    "convert to a json source" in {
      Await.result(
        wrapJson { asStringSource(source) }.runFold("") { _ + _ },
        5.seconds
      ).split(jsonSeparator).length must be { 25 }
    }

  }

} 
Example 9
Source File: LiigaJournalistSpec.scala    From avoin-voitto   with MIT License 5 votes vote down vote up
package liigavoitto.journalist

import org.scalatest.{BeforeAndAfterAll, MustMatchers, WordSpecLike}

import scala.util.Try

class LiigaJournalistSpec
  extends WordSpecLike
  with TestUtils
  with BeforeAndAfterAll
  with MustMatchers
  with MockData {

  "LiigaJournalist" must {
    "create an article with language" in {
      val data = md

      val finnishRes = LiigaJournalist.createArticle(data, "fi")
      assert(finnishRes.isDefined)
      assert(finnishRes.get.language == "fi")

      val swedishRes = LiigaJournalist.createArticle(data, "sv")
      assert(swedishRes.isDefined)
      assert(swedishRes.get.language == "sv")
    }
  }
} 
Example 10
Source File: BlackListTests.scala    From EncryCore   with GNU General Public License v3.0 5 votes vote down vote up
package encry.network

import java.net.{InetAddress, InetSocketAddress}

import akka.actor.ActorSystem
import akka.testkit.{TestActorRef, TestProbe}
import encry.modifiers.InstanceFactory
import encry.network.BlackList.BanReason._
import encry.network.PeerConnectionHandler.{ConnectedPeer, Outgoing}
import encry.network.PeerConnectionHandler.ReceivableMessages.CloseConnection
import encry.network.PeersKeeper.BanPeer
import encry.settings.TestNetSettings
import org.encryfoundation.common.network.BasicMessagesRepo.Handshake
import org.scalatest.{BeforeAndAfterAll, Matchers, OneInstancePerTest, WordSpecLike}
import scala.concurrent.duration._

class BlackListTests extends WordSpecLike
  with Matchers
  with BeforeAndAfterAll
  with InstanceFactory
  with OneInstancePerTest
  with TestNetSettings {

  implicit val system: ActorSystem = ActorSystem()

  override def afterAll(): Unit = system.terminate()

  val knowPeersSettings = testNetSettings.copy(
    network = settings.network.copy(
      knownPeers = List(new InetSocketAddress("172.16.11.11", 9001)),
      connectOnlyWithKnownPeers = Some(true)
    ),
    blackList = settings.blackList.copy(
      banTime = 2 seconds,
      cleanupTime = 3 seconds
    ))

  
  "Peers keeper" should {
    "handle ban peer message correctly" in {
      val peersKeeper: TestActorRef[PeersKeeper] = TestActorRef[PeersKeeper](PeersKeeper.props(knowPeersSettings, TestProbe().ref, TestProbe().ref))
      val address: InetSocketAddress = new InetSocketAddress("0.0.0.0", 9000)
      val peerHandler: TestProbe = TestProbe()
      val connectedPeer: ConnectedPeer = ConnectedPeer(
        address,
        peerHandler.ref,
        Outgoing,
        Handshake(protocolToBytes(knowPeersSettings.network.appVersion), "test node", Some(address), System.currentTimeMillis())
      )
      peersKeeper ! BanPeer(connectedPeer, SpamSender)
      peerHandler.expectMsg(CloseConnection)
      peersKeeper.underlyingActor.blackList.contains(address.getAddress) shouldBe true
    }
    "cleanup black list by scheduler correctly" in {
      val peersKeeper: TestActorRef[PeersKeeper] = TestActorRef[PeersKeeper](PeersKeeper.props(knowPeersSettings, TestProbe().ref, TestProbe().ref))
      val address: InetSocketAddress = new InetSocketAddress("0.0.0.0", 9000)
      val peerHandler: TestProbe = TestProbe()
      val connectedPeer: ConnectedPeer = ConnectedPeer(
        address,
        peerHandler.ref,
        Outgoing,
        Handshake(protocolToBytes(knowPeersSettings.network.appVersion), "test node", Some(address), System.currentTimeMillis())
      )
      peersKeeper ! BanPeer(connectedPeer, SentPeersMessageWithoutRequest)
      Thread.sleep(6000)
      peersKeeper.underlyingActor.blackList.contains(address.getAddress) shouldBe false
    }
    "don't remove peer from black list before ban time expired" in {
      val peersKeeper: TestActorRef[PeersKeeper] = TestActorRef[PeersKeeper](PeersKeeper.props(knowPeersSettings, TestProbe().ref, TestProbe().ref))
      val address: InetSocketAddress = new InetSocketAddress("0.0.0.0", 9000)
      val peerHandler: TestProbe = TestProbe()
      val connectedPeer: ConnectedPeer = ConnectedPeer(
        address,
        peerHandler.ref,
        Outgoing,
        Handshake(protocolToBytes(knowPeersSettings.network.appVersion), "test node", Some(address), System.currentTimeMillis())
      )
      Thread.sleep(4000)
      peersKeeper ! BanPeer(connectedPeer, CorruptedSerializedBytes)
      Thread.sleep(2000)
      peersKeeper.underlyingActor.blackList.contains(address.getAddress) shouldBe true
    }
  }
} 
Example 11
Source File: SnapshotDownloadControllerStorageAPITests.scala    From EncryCore   with GNU General Public License v3.0 5 votes vote down vote up
package encry.view.fast.sync

import encry.settings.EncryAppSettings
import encry.storage.levelDb.versionalLevelDB.LevelDbFactory
import encry.utils.FileHelper
import encry.view.fast.sync.SnapshotHolder.SnapshotManifest.ChunkId
import org.iq80.leveldb.{ DB, Options }
import org.scalatest.{ Matchers, WordSpecLike }
import scorex.utils.Random

class SnapshotDownloadControllerStorageAPITests extends WordSpecLike with Matchers {

  val settingsR: EncryAppSettings = EncryAppSettings.read()

  def init: SnapshotDownloadControllerStorageAPI = new SnapshotDownloadControllerStorageAPI {
    override val storage: DB                = LevelDbFactory.factory.open(FileHelper.getRandomTempDir, new Options)
    override val settings: EncryAppSettings = settingsR
  }

  "Inside SnapshotDownloadControllerStorageAPI class" should {
    "insert many should insert all ids correctly / split for groups with correct size" in {
      val api: SnapshotDownloadControllerStorageAPI = init
      val randomIds: List[ChunkId]                  = (1 to 20001).map(_ => Random.randomBytes()).toList.map(ChunkId @@ _)
      val groups                                    = randomIds.grouped(settingsR.snapshotSettings.chunksNumberPerRequestWhileFastSyncMod).toList
      val insertionsResult                          = api.insertMany(groups)
      insertionsResult.isRight shouldBe true
    }
    "get next for request should return batch if such exists / remove returned batch" in {
      val api: SnapshotDownloadControllerStorageAPI = init
      val randomIds: List[ChunkId]                  = (1 to 5000).map(_ => Random.randomBytes()).toList.map(ChunkId @@ _)
      val groups                                    = randomIds.grouped(settingsR.snapshotSettings.chunksNumberPerRequestWhileFastSyncMod).toList
      val _                                         = api.insertMany(groups)
      val groupsL                                   = randomIds.grouped(settingsR.snapshotSettings.chunksNumberPerRequestWhileFastSyncMod).toList
      (0 until groupsL.size).foreach { r =>
        val res = api.getNextForRequest(r)
        api.getNextForRequest(r).isLeft shouldBe true
        res.isRight shouldBe true
        res.right.get.nonEmpty shouldBe true
        res.right.get.head.sameElements(groupsL(r).head) shouldBe true
        res.right.get.forall(j => groupsL(r).exists(_.sameElements(j))) shouldBe true
        groupsL(r).forall(j => res.right.get.exists(_.sameElements(j))) shouldBe true
      }
    }
  }
} 
Example 12
Source File: MemoryPoolTests.scala    From EncryCore   with GNU General Public License v3.0 5 votes vote down vote up
package encry.view.mempool

import akka.actor.ActorSystem
import akka.testkit.{ TestActorRef, TestProbe }
import com.typesafe.scalalogging.StrictLogging
import encry.modifiers.InstanceFactory
import encry.settings.{ EncryAppSettings, TestNetSettings }
import encry.utils.NetworkTimeProvider
import encry.view.mempool.MemoryPool.{ NewTransaction, TransactionsForMiner }
import org.scalatest.{ BeforeAndAfterAll, Matchers, OneInstancePerTest, WordSpecLike }

import scala.concurrent.duration._

class MemoryPoolTests
    extends WordSpecLike
    with Matchers
    with InstanceFactory
    with BeforeAndAfterAll
    with OneInstancePerTest
    with TestNetSettings
    with StrictLogging {

  implicit val system: ActorSystem = ActorSystem()

  override def afterAll(): Unit = system.terminate()

  val timeProvider: NetworkTimeProvider = new NetworkTimeProvider(testNetSettings.ntp)

  "MemoryPool" should {
    "add new unique transactions" in {
      val mempool                = MemoryPoolStorage.empty(testNetSettings, timeProvider)
      val transactions           = genValidPaymentTxs(10)
      val (newMempool, validTxs) = mempool.validateTransactions(transactions)
      newMempool.size shouldBe 10
      validTxs.map(_.encodedId).forall(transactions.map(_.encodedId).contains) shouldBe true
    }
    "reject not unique transactions" in {
      val mempool                          = MemoryPoolStorage.empty(testNetSettings, timeProvider)
      val transactions                     = genValidPaymentTxs(10)
      val (newMempool, validTxs)           = mempool.validateTransactions(transactions)
      val (newMempoolAgain, validTxsAgain) = newMempool.validateTransactions(validTxs)
      newMempoolAgain.size shouldBe 10
      validTxsAgain.size shouldBe 0
    }
    "mempoolMaxCapacity works correct" in {
      val mempool                = MemoryPoolStorage.empty(testNetSettings, timeProvider)
      val transactions           = genValidPaymentTxs(11)
      val (newMempool, validTxs) = mempool.validateTransactions(transactions)
      newMempool.size shouldBe 10
      validTxs.size shouldBe 10
    }
    "getTransactionsForMiner works fine" in {
      val mempool         = MemoryPoolStorage.empty(testNetSettings, timeProvider)
      val transactions    = (0 until 10).map(k => coinbaseAt(k))
      val (newMempool, _) = mempool.validateTransactions(transactions)
      val (uPool, txs)    = newMempool.getTransactionsForMiner
      uPool.size shouldBe 0
      txs.map(_.encodedId).forall(transactions.map(_.encodedId).contains) shouldBe true
      transactions.map(_.encodedId).forall(txs.map(_.encodedId).contains) shouldBe true
    }
  }
  "Mempool actor" should {
    "send transactions to miner" in {
      val miner1 = TestProbe()
      val mempool1: TestActorRef[MemoryPool] =
        TestActorRef[MemoryPool](MemoryPool.props(testNetSettings, timeProvider, miner1.ref, Some(TestProbe().ref)))
      val transactions1 = (0 until 4).map { k =>
        val a = coinbaseAt(k)
        a
      }
      transactions1.foreach(mempool1 ! NewTransaction(_))
      mempool1.underlyingActor.memoryPool.size shouldBe 4
      logger.info(s"generated: ${transactions1.map(_.encodedId)}")
      miner1.expectMsg(20.seconds, TransactionsForMiner(transactions1))
    }
  }
} 
Example 13
Source File: ModifiersValidationTest.scala    From EncryCore   with GNU General Public License v3.0 5 votes vote down vote up
package encry.view.history

import encry.modifiers.InstanceFactory
import encry.network.DeliveryManagerTests.DMUtils.generateBlocks
import encry.settings.{EncryAppSettings, TestNetSettings}
import org.encryfoundation.common.modifiers.history.Block
import org.scalatest.{Matchers, OneInstancePerTest, WordSpecLike}

class ModifiersValidationTest extends WordSpecLike
  with Matchers
  with InstanceFactory
  with OneInstancePerTest
  with TestNetSettings {

  "Modifiers validator" should {
    "validate genesis block" in {
      val newHistory: History = generateDummyHistory(testNetSettings)
      val genesisBlock: Block = generateGenesisBlock(testNetSettings.constants.GenesisHeight)
      newHistory.testApplicable(genesisBlock.header).isRight shouldBe true
      newHistory.append(genesisBlock.header)
      val updatedHistory: History = newHistory.reportModifierIsValid(genesisBlock.header)
      updatedHistory.testApplicable(genesisBlock.payload).isRight shouldBe true
    }
    "reject incorrect modifiers" in {
      val blocks: List[Block] = generateBlocks(2, generateDummyHistory(testNetSettings))._2
      val newHistory: History = generateDummyHistory(testNetSettings)
      blocks.take(1).foldLeft(newHistory) { case (history, block) =>
        history.testApplicable(block.header).isRight shouldBe true
        history.append(block.header)
        history.reportModifierIsValid(block.header)
        history.testApplicable(block.payload).isRight shouldBe true
        history.append(block.payload)
        history.reportModifierIsValid(block)
      }
      blocks.takeRight(1).foldLeft(newHistory) { case (history, block) =>
        history.testApplicable(block.header).isRight shouldBe false
        history.append(block.header)
        history.reportModifierIsValid(block.header)
        history.testApplicable(block.payload).isRight shouldBe true
        history.append(block.payload)
        history.reportModifierIsValid(block)
      }
    }
  }
} 
Example 14
Source File: QuickDataStreamSpec.scala    From flink-jpmml   with GNU Affero General Public License v3.0 5 votes vote down vote up
package io.radicalbit.flink.pmml.scala

import io.radicalbit.flink.pmml.scala.api.reader.ModelReader
import io.radicalbit.flink.pmml.scala.models.prediction.{Prediction, Score, Target}
import io.radicalbit.flink.pmml.scala.utils.PmmlLoaderKit
import io.radicalbit.flink.streaming.spec.core.{FlinkPipelineTestKit, FlinkTestKitCompanion}
import org.apache.flink.api.scala.ClosureCleaner
import org.apache.flink.ml.math.{DenseVector, SparseVector, Vector}
import org.apache.flink.runtime.client.JobExecutionException
import org.apache.flink.streaming.api.scala._
import org.scalatest.{Matchers, WordSpecLike}

object QuickDataStreamSpec extends FlinkTestKitCompanion[(Prediction, Vector)]

class QuickDataStreamSpec
    extends FlinkPipelineTestKit[Vector, (Prediction, Vector)]
    with WordSpecLike
    with Matchers
    with PmmlLoaderKit {

  private implicit val companion = QuickDataStreamSpec

  private val defaultInput: Vector = DenseVector(1.0, 1.0, 1.0, 1.0)
  private val defaultSparseInput: Vector = SparseVector(4, Array(0, 1, 2, 3), Array(1.0, 1.0, 1.0, 1.0))

  private val defaultPrediction = (Prediction(Score(3.0)), defaultInput)
  private val sparsePrediction = (Prediction(Score(3.0)), defaultSparseInput)
  private val emptyPrediction = (Prediction(Target.empty), defaultInput)

  private def pipelineBuilder(source: Option[String]) = {
    val reader = ModelReader(source getOrElse getPMMLSource(Source.KmeansPmml))

    (in: DataStream[Vector]) =>
      in.quickEvaluate(reader)
  }

  "QuickDataStream" should {

    "quick DataStream should be serializable" in {
      noException should be thrownBy ClosureCleaner.clean(pipelineBuilder(None), checkSerializable = true)
    }

    "return correct output sequence on heterogeneous input" in {
      val in: Seq[Vector] = Seq(defaultInput, defaultSparseInput)
      val out = Seq(defaultPrediction, sparsePrediction)
      executePipeline(in)(pipelineBuilder(None)) shouldBe out
    }

    "compute quick prediction with any dense input vector" in {
      val in: Seq[Vector] = Seq(defaultInput)
      val out = Seq(defaultPrediction)

      executePipeline(in)(pipelineBuilder(None)) shouldBe out
    }

    "compute quick predictions with any sparse input vector" in {
      val in: Seq[Vector] = Seq(defaultSparseInput)
      val out = Seq(sparsePrediction)

      executePipeline(in)(pipelineBuilder(None)) shouldBe out
    }

    "throw JobExecutionException if the model path cannot be loaded" in {
      val invalidSource = Source.NotExistingPath

      an[JobExecutionException] should be thrownBy {
        executePipeline(Seq(defaultInput))(pipelineBuilder(Some(invalidSource))) shouldBe Seq(defaultPrediction)
      }
    }

    "Emit empty prediction if the input is not valid" in {
      val shortInput: Vector = SparseVector(2, Array(0, 3), Array(1.0, 1.0))

      executePipeline(Seq(shortInput))(pipelineBuilder(None)) shouldBe Seq((Prediction(Target.empty), shortInput))
    }

    "Emit empty prediction if the model is not valid" in {
      val invalidModelSource = getPMMLSource(Source.KmeansPmmlEmpty)

      an[JobExecutionException] should be thrownBy {
        executePipeline(Seq(defaultInput))(pipelineBuilder(Some(invalidModelSource))) shouldBe Seq(emptyPrediction)
      }
    }

  }

} 
Example 15
Source File: RichDataStreamSpec.scala    From flink-jpmml   with GNU Affero General Public License v3.0 5 votes vote down vote up
package io.radicalbit.flink.pmml.scala

import io.radicalbit.flink.pmml.scala.api.PmmlModel
import io.radicalbit.flink.pmml.scala.api.reader.ModelReader
import io.radicalbit.flink.pmml.scala.models.prediction.{Prediction, Score, Target}
import io.radicalbit.flink.pmml.scala.utils.models.{BaseInput, Input}
import io.radicalbit.flink.pmml.scala.utils.PmmlLoaderKit
import io.radicalbit.flink.streaming.spec.core.{FlinkPipelineTestKit, FlinkTestKitCompanion}
import org.apache.flink.api.scala.ClosureCleaner
import org.apache.flink.runtime.client.JobExecutionException
import org.apache.flink.streaming.api.scala._
import org.scalatest.{Matchers, WordSpecLike}

object RichDataStreamSpec extends FlinkTestKitCompanion[Prediction] {

  private val defaultDenseEvalFunction = { (in: Input, model: PmmlModel) =>
    model.predict(BaseInput.toDenseVector(in), None)
  }

}

class RichDataStreamSpec
    extends FlinkPipelineTestKit[Input, Prediction]
    with WordSpecLike
    with Matchers
    with PmmlLoaderKit {

  import RichDataStreamSpec._

  private implicit val companion = RichDataStreamSpec

  private val defaultInput = Input(1.0, 1.0, 1.0, 1.0)

  private val defaultPrediction = Prediction(Score(3.0))
  private val emptyPrediction = Prediction(Target.empty)

  private def pipelineBuilder(source: Option[String])(
      f: (Input, PmmlModel) => Prediction): DataStream[Input] => DataStream[Prediction] = {

    val evaluator = ModelReader(source getOrElse getPMMLSource(Source.KmeansPmml))

    (dataInput: DataStream[Input]) =>
      dataInput.evaluate(evaluator)(f)
  }

  "flink-jpmml" should {

    "richDataStream should be serializable" in {
      val evalFunction = defaultDenseEvalFunction

      noException should be thrownBy ClosureCleaner.clean(pipelineBuilder(None)(evalFunction))
    }

    "compute predictions with any input and an evaluation function" in {
      val in = Seq(defaultInput)
      val out = Seq(defaultPrediction)

      val evalFunction = defaultDenseEvalFunction

      executePipeline(in)(pipelineBuilder(None)(evalFunction)) shouldBe out
    }

    "throw JobExecutionException if the model path cannot be loaded" in {
      val randomSource = Source.NotExistingPath

      an[JobExecutionException] should be thrownBy {
        executePipeline(Seq(defaultInput))(pipelineBuilder(Some(randomSource))(defaultDenseEvalFunction)) shouldBe Seq(
          defaultPrediction)
      }
    }

    "Emit empty prediction if the input is not valid" in {
      val evalFunction = { (in: Input, model: PmmlModel) =>
        model.predict(BaseInput.toSparseVector(in, 2), None)
      }
      executePipeline(Seq(Input(1.0, 3.0)))(pipelineBuilder(None)(evalFunction)) shouldBe Seq(emptyPrediction)
    }

    "Emit empty prediction if the model is not valid" in {
      val invalidModelSource = getPMMLSource(Source.KmeansPmmlEmpty)

      an[JobExecutionException] should be thrownBy {
        executePipeline(Seq(defaultInput))(pipelineBuilder(Some(invalidModelSource))(defaultDenseEvalFunction)) shouldBe Seq(
          emptyPrediction)
      }
    }

  }

} 
Example 16
Source File: EvaluationFunctionSpec.scala    From flink-jpmml   with GNU Affero General Public License v3.0 5 votes vote down vote up
package io.radicalbit.flink.pmml.scala.api.functions

import io.radicalbit.flink.pmml.scala.api.PmmlModel
import io.radicalbit.flink.pmml.scala.api.reader.ModelReader
import io.radicalbit.flink.pmml.scala.models.prediction.{Prediction, Score}
import io.radicalbit.flink.pmml.scala.utils.models.Input
import io.radicalbit.flink.pmml.scala.utils.PmmlLoaderKit
import io.radicalbit.flink.streaming.spec.core.{FlinkPipelineTestKit, FlinkTestKitCompanion}
import org.apache.flink.api.scala.ClosureCleaner
import org.apache.flink.streaming.api.scala._
import org.apache.flink.util.Collector
import org.scalatest.{Matchers, WordSpecLike}

object EvaluationFunctionSpec extends FlinkTestKitCompanion[Prediction]

class EvaluationFunctionSpec
    extends FlinkPipelineTestKit[Input, Prediction]
    with WordSpecLike
    with Matchers
    with PmmlLoaderKit {

  private implicit val companion = EvaluationFunctionSpec

  private val reader = ModelReader(getPMMLSource(Source.KmeansPmml))

  private def evaluationOperator[T](source: ModelReader)(f: (T, PmmlModel) => Prediction) =
    new EvaluationFunction[T, Prediction](source) {
      override def flatMap(value: T, out: Collector[Prediction]): Unit = out.collect(f(value, evaluator))
    }

  private val operator = evaluationOperator(reader) { (in: Input, model: PmmlModel) =>
    Prediction(Score(1.0))
  }

  private def pipeline(source: DataStream[Input]): DataStream[Prediction] = source.flatMap(operator)

  "EvaluationFunction" should {

    "be Serializable" in {
      noException should be thrownBy ClosureCleaner.clean(operator, checkSerializable = true)
    }

    "return expected behavior on given function" in {
      executePipeline(Seq(Input(1.0, 2.0)))(pipeline) shouldBe Seq(Prediction(Score(1.0)))
    }

  }

} 
Example 17
Source File: BaseSpecTest.scala    From wookiee   with Apache License 2.0 5 votes vote down vote up
package com.webtrends.harness.service.test

import akka.actor.ActorSystem
import ch.qos.logback.classic.Level
import com.typesafe.config.{Config, ConfigFactory}
import com.webtrends.harness.component.Component
import com.webtrends.harness.service.Service
import org.specs2.mutable.SpecificationLike
import org.scalatest.{MustMatchers, WordSpecLike}

import scala.concurrent.duration._

trait BaseWookieeTest {
  def config:Config = ConfigFactory.empty()
  def componentMap:Option[Map[String, Class[_<:Component]]] = None
  def servicesMap:Option[Map[String, Class[_<:Service]]] = None
  def logLevel: Level = Level.INFO
  def startupWait: FiniteDuration = 15 seconds

  TestHarness(config, servicesMap, componentMap, logLevel, startupWait)
  Thread.sleep(1000)
  implicit val system: ActorSystem = TestHarness.system.get
}

trait BaseWookieeSpecTest extends BaseWookieeTest with SpecificationLike
trait BaseWookieeScalaTest extends BaseWookieeTest with WordSpecLike with MustMatchers 
Example 18
Source File: StabilityProtocolSpec.scala    From eventuate   with Apache License 2.0 5 votes vote down vote up
package com.rbmhtechnology.eventuate.crdt.pure

import com.rbmhtechnology.eventuate.VectorTime
import com.rbmhtechnology.eventuate.crdt.pure.StabilityProtocol._
import org.scalatest.Matchers
import org.scalatest.WordSpecLike

class StabilityProtocolSpec extends WordSpecLike with Matchers {

  val A = "A"
  val B = "B"
  val C = "C"

  def partitions: Set[String] = Set(A, B, C)

  def initialRTM = RTM(StabilityConf(A, partitions))

  def vt(a: Long, b: Long, c: Long) = VectorTime(A -> a, B -> b, C -> c)

  def tcstable(a: Long, b: Long, c: Long) = Some(TCStable(vt(a, b, c)))

  "Stability" should {
    "drop updates from local partition" in {
      initialRTM
        .update(A, vt(1, 1, 1))
        .update(B, vt(2, 2, 2))
        .update(C, vt(2, 2, 2))
        .stable shouldBe tcstable(2, 2, 2)
    }
    "not emit tcstable when B = (1,1,1), C = unknown " in {
      initialRTM
        .update(B, vt(1, 1, 1))
        .stable shouldBe None
    }
    "emit TCStable(0,1) when B = (0,1,1), C = (0,0,1) " in {
      initialRTM
        .update(B, vt(0, 1, 1))
        .update(C, vt(0, 0, 1))
        .stable shouldBe tcstable(0, 0, 1)
    }
    "emit TCStable(1,1) when A = (1,1,1), B = (1,1,1)" in {
      initialRTM
        .update(B, vt(1, 1, 1))
        .update(C, vt(1, 1, 1))
        .stable shouldBe tcstable(1, 1, 1)
    }
    "emit TCStable(1,1) when A = (2,1), B = (1,2)" in {
      initialRTM
        .update(B, vt(2, 2, 1))
        .update(C, vt(1, 1, 2))
        .stable shouldBe tcstable(1, 1, 1)
    }
  }

} 
Example 19
Source File: EventMetadataSpec.scala    From eventuate   with Apache License 2.0 5 votes vote down vote up
package com.rbmhtechnology.eventuate.adapter.vertx

import com.rbmhtechnology.eventuate.adapter.vertx.api.EventMetadata
import io.vertx.core.MultiMap
import org.scalatest.{ MustMatchers, WordSpecLike }

import scala.collection.JavaConverters._

class EventMetadataSpec extends WordSpecLike with MustMatchers {

  import EventMetadata._
  import EventMetadata.Headers._

  def headers(elems: (String, Any)*): MultiMap = {
    val headers = MultiMap.caseInsensitiveMultiMap()
    headers.setAll(Map(elems: _*).mapValues(_.toString).asJava)
    headers
  }

  "An EventMetadata" when {
    "supplied with valid headers" must {
      "be instantiated with all metadata" in {
        val metadata = EventMetadata.fromHeaders(headers(
          MessageProducer -> VertxProducer,
          LocalLogId -> "logA",
          LocalSequenceNr -> 1L,
          EmitterId -> "emitter1")
        )

        metadata.map(_.localLogId) mustBe Some("logA")
        metadata.map(_.localSequenceNr) mustBe Some(1L)
        metadata.map(_.emitterId) mustBe Some("emitter1")
      }
    }
    "supplied with invalid headers" must {
      "be empty if the source is not specified" in {
        val metadata = EventMetadata.fromHeaders(headers(
          LocalLogId -> "logA",
          LocalSequenceNr -> 1L,
          EmitterId -> "emitter1")
        )

        metadata mustBe None
      }
      "be empty if the headers are empty" in {
        val metadata = EventMetadata.fromHeaders(headers())

        metadata mustBe None
      }
      "fail to instantiate if the values have the wrong type" in {
        a[NumberFormatException] must be thrownBy EventMetadata.fromHeaders(headers(
          MessageProducer -> VertxProducer,
          LocalLogId -> "logA",
          LocalSequenceNr -> "i_am_not_a_long_value",
          EmitterId -> "emitter1")
        )
      }
      "fail to instantiate if the a value is missing" in {
        an[IllegalArgumentException] must be thrownBy EventMetadata.fromHeaders(headers(
          MessageProducer -> VertxProducer,
          LocalSequenceNr -> 1L,
          EmitterId -> "emitter1")
        )
      }
    }
  }
} 
Example 20
Source File: VertxAdapterSpec.scala    From eventuate   with Apache License 2.0 5 votes vote down vote up
package com.rbmhtechnology.eventuate.adapter.vertx

import akka.actor.ActorSystem
import akka.testkit.TestKit
import com.rbmhtechnology.eventuate.adapter.vertx.api.{ EventProducer, VertxAdapterConfig }
import com.rbmhtechnology.eventuate.log.EventLogWriter
import com.rbmhtechnology.eventuate.log.leveldb.LeveldbEventLog
import com.rbmhtechnology.eventuate.utilities._
import com.rbmhtechnology.eventuate.{ LocationCleanupLeveldb, ReplicationEndpoint }
import com.typesafe.config.Config
import org.scalatest.{ BeforeAndAfterAll, MustMatchers, WordSpecLike }

import scala.collection.immutable.Seq

object VertxAdapterSpec {
  case class Event(id: String)

  val Config = TestConfig.withReplayBatchSize(10)
}

class VertxAdapterSpec extends TestKit(ActorSystem("test", VertxAdapterSpec.Config))
  with WordSpecLike with MustMatchers with BeforeAndAfterAll with StopSystemAfterAll with LocationCleanupLeveldb
  with VertxEnvironment with VertxEventBusProbes {

  import VertxAdapterSpec._
  import utilities._

  val logName = "logA"
  val adapterId = "adapter1"
  var storage: ActorStorageProvider = _
  var endpoint: ReplicationEndpoint = _

  override def config: Config = VertxAdapterSpec.Config

  override def beforeAll(): Unit = {
    super.beforeAll()
    storage = new ActorStorageProvider(adapterId)
    endpoint = new ReplicationEndpoint(id = "1", logNames = Set(logName), logFactory = logId => LeveldbEventLog.props(logId), connections = Set())
  }

  "A VertxAdapter" must {
    "read events from an inbound log and deliver them to the Vert.x eventbus" in {
      val log = endpoint.logs(logName)
      val adapterConfig = VertxAdapterConfig()
        .addProducer(EventProducer.fromLog(log)
          .publishTo {
            case _ => endpoint1.address
          }
          .as("adapter1"))
        .registerDefaultCodecFor(classOf[Event])

      val vertxAdapter = VertxAdapter(adapterConfig, vertx, storage)
      val logWriter = new EventLogWriter("w1", endpoint.logs(logName))

      endpoint.activate()
      vertxAdapter.start()

      logWriter.write(Seq(Event("1"))).await.head

      storage.expectRead(replySequenceNr = 0)
      storage.expectWrite(sequenceNr = 1)

      endpoint1.probe.expectVertxMsg(body = Event("1"))

      logWriter.write(Seq(Event("2"))).await

      storage.expectWrite(sequenceNr = 2)

      endpoint1.probe.expectVertxMsg(body = Event("2"))

      logWriter.write(Seq(Event("3"), Event("4"))).await

      storage.expectWriteAnyOf(sequenceNrs = Seq(3, 4))

      endpoint1.probe.expectVertxMsg(body = Event("3"))
      endpoint1.probe.expectVertxMsg(body = Event("4"))
    }
  }
} 
Example 21
Source File: ProducerStreamSpec.scala    From reactive-kafka-microservice-template   with Apache License 2.0 5 votes vote down vote up
package akka.kafka

import akka.actor.ActorSystem
import akka.stream.scaladsl.{Sink, Source}
import akka.testkit.{DefaultTimeout, ImplicitSender, TestKit, TestProbe}
import com.omearac.consumers.ConsumerStream
import com.omearac.producers.ProducerStream
import com.omearac.settings.Settings
import com.omearac.shared.JsonMessageConversion.Conversion
import com.omearac.shared.KafkaMessages.{ExampleAppEvent, KafkaMessage}
import org.apache.kafka.clients.producer.ProducerRecord
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}


class ProducerStreamSpec extends TestKit(ActorSystem("ProducerStreamSpec"))
    with DefaultTimeout with ImplicitSender
    with WordSpecLike with Matchers with BeforeAndAfterAll
    with ConsumerStream with ProducerStream {

    val settings = Settings(system).KafkaProducers
    val probe = TestProbe()

    override def afterAll: Unit = {
        shutdown()
    }

    "Sending KafkaMessages to the KafkaMessage producerStream" should {
        "be converted to JSON and obtained by the Stream Sink " in {

            //Creating Producer Stream Components for publishing KafkaMessages
            val producerProps = settings.KafkaProducerInfo("KafkaMessage")
            val numOfMessages = 50
            val kafkaMsgs = for { i <- 0 to numOfMessages} yield KafkaMessage("sometime", "somestuff", i)
            val producerSource= Source(kafkaMsgs)
            val producerFlow = createStreamFlow[KafkaMessage](producerProps)
            val producerSink = Sink.actorRef(probe.ref, "complete")

            val jsonKafkaMsgs = for { msg <- kafkaMsgs} yield Conversion[KafkaMessage].convertToJson(msg)

            producerSource.via(producerFlow).runWith(producerSink)
            for (i <- 0 to jsonKafkaMsgs.length) {
                probe.expectMsgPF(){
                    case m: ProducerRecord[_,_] => if (jsonKafkaMsgs.contains(m.value())) () else fail()
                    case "complete" => ()
                }
            }
        }
    }

    "Sending ExampleAppEvent messages to the EventMessage producerStream" should {
        "be converted to JSON and obtained by the Stream Sink " in {

            //Creating Producer Stream Components for publishing ExampleAppEvent messages
            val producerProps = settings.KafkaProducerInfo("ExampleAppEvent")
            val numOfMessages = 50
            val eventMsgs = for { i <- 0 to 50} yield ExampleAppEvent("sometime", "senderID", s"Event number $i occured")

            val producerSource= Source(eventMsgs)
            val producerFlow = createStreamFlow[ExampleAppEvent](producerProps)
            val producerSink = Sink.actorRef(probe.ref, "complete")

            val jsonAppEventMsgs = for{ msg <- eventMsgs} yield Conversion[ExampleAppEvent].convertToJson(msg)
            producerSource.via(producerFlow).runWith(producerSink)
            for (i <- 0 to jsonAppEventMsgs.length){
                probe.expectMsgPF(){
                    case m: ProducerRecord[_,_] => if (jsonAppEventMsgs.contains(m.value())) () else fail()
                    case "complete" => ()
                }
            }
        }
    }
} 
Example 22
Source File: EventConsumerSpec.scala    From reactive-kafka-microservice-template   with Apache License 2.0 5 votes vote down vote up
package akka.kafka

import akka.actor.{Actor, ActorSystem, Props}
import akka.testkit.{DefaultTimeout, ImplicitSender, TestActorRef, TestKit}
import com.omearac.consumers.ConsumerStreamManager.{InitializeConsumerStream, TerminateConsumerStream}
import com.omearac.consumers.DataConsumer.{ConsumerActorReply, ManuallyInitializeStream, ManuallyTerminateStream}
import com.omearac.consumers.EventConsumer
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

import scala.collection.mutable.ArrayBuffer


class EventConsumerSpec extends TestKit(ActorSystem("EventConsumerSpec"))
  with DefaultTimeout with ImplicitSender
  with WordSpecLike with Matchers with BeforeAndAfterAll {

  //Creating the Actors
  val testConsumer = TestActorRef(new EventConsumer)
  val mockStreamAndManager = system.actorOf(Props(new MockStreamAndManager), "mockStreamAndManager")

  override def afterAll: Unit = {
    shutdown()
  }

  class MockStreamAndManager extends Actor {
    val receive: Receive = {
      case InitializeConsumerStream(_, _) => testConsumer ! "STREAM_INIT"
      case TerminateConsumerStream(_) => testConsumer ! "STREAM_DONE"
    }
  }


  "Sending ManuallyTerminateStream to EventConsumer in receive state" should {
    "return a Stream Already Stopped reply " in {
      testConsumer ! ManuallyTerminateStream
      expectMsg(ConsumerActorReply("Event Consumer Stream Already Stopped"))
    }
  }

  "Sending ManuallyInitializeStream to EventConsumer in receive state" should {
    "forward the message to the ConsumerStreamManager and change state to consuming" in {
      testConsumer.underlyingActor.consumerStreamManager = mockStreamAndManager
      testConsumer ! ManuallyInitializeStream
      expectMsg(ConsumerActorReply("Event Consumer Stream Started"))
      //Now check for state change
      Thread.sleep(750)
      testConsumer ! ManuallyInitializeStream
      expectMsg(ConsumerActorReply("Event Consumer Already Started"))
    }
  }

  "Sending STREAM_DONE to EventConsumer while in consuming state" should {
    "change state to idle state" in {
      val consuming = testConsumer.underlyingActor.consumingEvents
      testConsumer.underlyingActor.context.become(consuming)
      testConsumer ! "STREAM_DONE"
      //Now check for state change
      Thread.sleep(750)
      testConsumer ! ManuallyTerminateStream
      expectMsg(ConsumerActorReply("Event Consumer Stream Already Stopped"))
    }
  }
  "Sending ManuallyTerminateStream to EventConsumer while in consuming state" should {
    "forward the message to the ConsumerStreamManager and then upon reply, change state to idle" in {
      val consuming = testConsumer.underlyingActor.consumingEvents
      testConsumer.underlyingActor.context.become(consuming)
      testConsumer ! ManuallyTerminateStream
      expectMsg(ConsumerActorReply("Event Consumer Stream Stopped"))
      //Now check for state change
      Thread.sleep(750)
      testConsumer ! ManuallyTerminateStream
      expectMsg(ConsumerActorReply("Event Consumer Stream Already Stopped"))
    }
  }

  "Sending ConsumerMessageBatch message" should {
    "reply OK" in {
      val msgBatch: ArrayBuffer[String] = ArrayBuffer("test1")
      val consuming = testConsumer.underlyingActor.consumingEvents
      testConsumer.underlyingActor.context.become(consuming)
      testConsumer.underlyingActor.consumerStreamManager = mockStreamAndManager
      testConsumer ! msgBatch
      expectMsg("OK")
    }
  }
} 
Example 23
Source File: EventProducerSpec.scala    From reactive-kafka-microservice-template   with Apache License 2.0 5 votes vote down vote up
package akka.kafka

import java.util.Date

import akka.Done
import akka.actor.ActorSystem
import akka.serialization.Serialization
import akka.stream.QueueOfferResult
import akka.stream.QueueOfferResult.Enqueued
import akka.stream.scaladsl.SourceQueueWithComplete
import akka.testkit.{DefaultTimeout, EventFilter, ImplicitSender, TestActorRef, TestKit, TestProbe}
import com.omearac.producers.EventProducer
import com.omearac.shared.AkkaStreams
import com.omearac.shared.EventMessages.{ActivatedProducerStream, MessagesPublished}
import com.omearac.shared.KafkaMessages.ExampleAppEvent
import com.typesafe.config.ConfigFactory
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

import scala.concurrent.Future


class EventProducerSpec extends TestKit(ActorSystem("EventProducerSpec",ConfigFactory.parseString("""
    akka.loggers = ["akka.testkit.TestEventListener"] """)))
    with DefaultTimeout with ImplicitSender
    with WordSpecLike with Matchers with BeforeAndAfterAll
    with AkkaStreams {

    val testProducer = TestActorRef(new EventProducer)
    val producerActor = testProducer.underlyingActor
    val mockProducerStream: SourceQueueWithComplete[Any] = new SourceQueueWithComplete[Any] {
        override def complete(): Unit = println("complete")

        override def fail(ex: Throwable): Unit = println("fail")

        override def offer(elem: Any): Future[QueueOfferResult] = Future{Enqueued}

        override def watchCompletion(): Future[Done] = Future{Done}
    }

    override def afterAll: Unit = {
        shutdown()
    }

    //Create an test event listener for the local message bus
    val testEventListener = TestProbe()
    system.eventStream.subscribe(testEventListener.ref, classOf[ExampleAppEvent])


    "Sending ActivatedProducerStream to EventProducer in receive state" should {
        "save the stream ref and change state to producing " in {
            testProducer ! ActivatedProducerStream(mockProducerStream, "TestTopic")
            Thread.sleep(500)
            producerActor.producerStream should be(mockProducerStream)
            EventFilter.error(message = "EventProducer got the unknown message while producing: testMessage", occurrences = 1) intercept {
                testProducer ! "testMessage"
            }
        }
    }

    "Sending ExampleAppEvent to system bus while EventProducer is in publishEvent state" should {
        "offer the ExampleAppEvent to the stream " in {
            val producingState = producerActor.publishEvent
            producerActor.context.become(producingState)
            producerActor.producerStream = mockProducerStream
            val dateFormat = new java.text.SimpleDateFormat("dd:MM:yy:HH:mm:ss.SSS")
            lazy val timetag = dateFormat.format(new Date(System.currentTimeMillis()))
            val eventMsg = MessagesPublished(5)
            val testMessage = ExampleAppEvent(timetag,Serialization.serializedActorPath(self),eventMsg.toString)
            system.eventStream.publish(testMessage)
            testEventListener.expectMsgPF(){
                case ExampleAppEvent(_,_,m) => if (m == eventMsg.toString) () else fail()
            }
        }
    }
 } 
Example 24
Source File: DataProducerSpec.scala    From reactive-kafka-microservice-template   with Apache License 2.0 5 votes vote down vote up
package akka.kafka

import akka.Done
import akka.actor.ActorSystem
import akka.stream.QueueOfferResult
import akka.stream.QueueOfferResult.Enqueued
import akka.stream.scaladsl.SourceQueueWithComplete
import akka.testkit.{DefaultTimeout, EventFilter, ImplicitSender, TestActorRef, TestKit, TestProbe}
import com.omearac.producers.DataProducer
import com.omearac.producers.DataProducer.PublishMessages
import com.omearac.shared.AkkaStreams
import com.omearac.shared.EventMessages.{ActivatedProducerStream, MessagesPublished}
import com.omearac.shared.KafkaMessages.ExampleAppEvent
import com.typesafe.config.ConfigFactory
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

import scala.concurrent.Future


class DataProducerSpec extends TestKit(ActorSystem("DataProducerSpec", ConfigFactory.parseString(
  """
    akka.loggers = ["akka.testkit.TestEventListener"] """)))
  with DefaultTimeout with ImplicitSender
  with WordSpecLike with Matchers with BeforeAndAfterAll
  with AkkaStreams {

  val testProducer = TestActorRef(new DataProducer)
  val producerActor = testProducer.underlyingActor

  val mockProducerStream: SourceQueueWithComplete[Any] = new SourceQueueWithComplete[Any] {
    override def complete(): Unit = println("complete")

    override def fail(ex: Throwable): Unit = println("fail")

    override def offer(elem: Any): Future[QueueOfferResult] = Future {
      Enqueued
    }

    override def watchCompletion(): Future[Done] = Future {
      Done
    }
  }

  override def afterAll: Unit = {
    shutdown()
  }

  //Create an test event listener for the local message bus
  val testEventListener = TestProbe()
  system.eventStream.subscribe(testEventListener.ref, classOf[ExampleAppEvent])


  "Sending ActivatedProducerStream to DataProducer in receive state" should {
    "save the stream ref and change state to producing " in {
      testProducer ! ActivatedProducerStream(mockProducerStream, "TestTopic")
      Thread.sleep(500)
      producerActor.producerStream should be(mockProducerStream)
      EventFilter.error(message = "DataProducer got the unknown message while producing: testMessage", occurrences = 1) intercept {
        testProducer ! "testMessage"
      }
    }
  }

  "Sending PublishMessages(number: Int) to DataProducer in publishData state" should {
    "return MessagesPublished(number: Int) and publish the local event " in {
      val producing = producerActor.publishData
      producerActor.context.become(producing)
      producerActor.producerStream = mockProducerStream
      val resultMessage = MessagesPublished(5)
      testProducer ! PublishMessages(5)
      expectMsg(resultMessage)
      testEventListener.expectMsgPF() {
        case ExampleAppEvent(_, _, m) => if (m == resultMessage.toString) () else fail()
      }
    }
  }
} 
Example 25
Source File: DataConsumerSpec.scala    From reactive-kafka-microservice-template   with Apache License 2.0 5 votes vote down vote up
package akka.kafka

import akka.actor.{Actor, ActorSystem, Props}
import akka.testkit.{DefaultTimeout, ImplicitSender, TestActorRef, TestKit}
import com.omearac.consumers.ConsumerStreamManager.{InitializeConsumerStream, TerminateConsumerStream}
import com.omearac.consumers.DataConsumer
import com.omearac.consumers.DataConsumer.{ConsumerActorReply, ManuallyInitializeStream, ManuallyTerminateStream}
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

import scala.collection.mutable.ArrayBuffer


class DataConsumerSpec extends TestKit(ActorSystem("DataConsumerSpec"))
  with DefaultTimeout with ImplicitSender
  with WordSpecLike with Matchers with BeforeAndAfterAll {

  //Creating the Actors
  val testConsumer = TestActorRef(new DataConsumer)
  val mockStreamAndManager = system.actorOf(Props(new MockStreamAndManager), "mockStreamAndManager")

  override def afterAll: Unit = {
    shutdown()
  }

  class MockStreamAndManager extends Actor {
    val receive: Receive = {
      case InitializeConsumerStream(_, _) => testConsumer ! "STREAM_INIT"
      case TerminateConsumerStream(_) => testConsumer ! "STREAM_DONE"
    }
  }


  "Sending ManuallyTerminateStream to DataConsumer in receive state" should {
    "return a Stream Already Stopped reply " in {
      testConsumer ! ManuallyTerminateStream
      expectMsg(ConsumerActorReply("Data Consumer Stream Already Stopped"))
    }
  }

  "Sending ManuallyInitializeStream to DataConsumer in receive state" should {
    "forward the message to the ConsumerStreamManager and change state to consuming" in {
      testConsumer.underlyingActor.consumerStreamManager = mockStreamAndManager
      testConsumer ! ManuallyInitializeStream
      expectMsg(ConsumerActorReply("Data Consumer Stream Started"))
      //Now check for state change
      Thread.sleep(750)
      testConsumer ! ManuallyInitializeStream
      expectMsg(ConsumerActorReply("Data Consumer Already Started"))
    }
  }

  "Sending STREAM_DONE to DataConsumer while in consuming state" should {
    "change state to idle state" in {
      val consuming = testConsumer.underlyingActor.consumingData
      testConsumer.underlyingActor.context.become(consuming)
      testConsumer ! "STREAM_DONE"
      //Now check for state change
      Thread.sleep(750)
      testConsumer ! ManuallyTerminateStream
      expectMsg(ConsumerActorReply("Data Consumer Stream Already Stopped"))
    }
  }
  "Sending ManuallyTerminateStream to DataConsumer while in consuming state" should {
    "forward the message to the ConsumerStreamManager and then upon reply, change state to idle" in {
      val consuming = testConsumer.underlyingActor.consumingData
      testConsumer.underlyingActor.context.become(consuming)
      testConsumer ! ManuallyTerminateStream
      expectMsg(ConsumerActorReply("Data Consumer Stream Stopped"))
      //Now check for state change
      Thread.sleep(750)
      testConsumer ! ManuallyTerminateStream
      expectMsg(ConsumerActorReply("Data Consumer Stream Already Stopped"))
    }
  }

  "Sending ConsumerMessageBatch message" should {
    "reply OK" in {
      val msgBatch: ArrayBuffer[String] = ArrayBuffer("test1")
      val consuming = testConsumer.underlyingActor.consumingData
      testConsumer.underlyingActor.context.become(consuming)
      testConsumer.underlyingActor.consumerStreamManager = mockStreamAndManager
      testConsumer ! msgBatch
      expectMsg("OK")
    }
  }
} 
Example 26
Source File: ProducerStreamManagerSpec.scala    From reactive-kafka-microservice-template   with Apache License 2.0 5 votes vote down vote up
package akka.kafka

import akka.actor.ActorSystem
import akka.stream.scaladsl.SourceQueueWithComplete
import akka.testkit.{DefaultTimeout, ImplicitSender, TestActorRef, TestKit, TestProbe}
import com.omearac.producers.ProducerStreamManager
import com.omearac.producers.ProducerStreamManager.InitializeProducerStream
import com.omearac.shared.AkkaStreams
import com.omearac.shared.EventMessages.ActivatedProducerStream
import com.omearac.shared.KafkaMessages.{ExampleAppEvent, KafkaMessage}
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}


class ProducerStreamManagerSpec extends TestKit(ActorSystem("ProducerStreamManagerSpec"))
  with DefaultTimeout with ImplicitSender
  with WordSpecLike with Matchers with BeforeAndAfterAll
  with AkkaStreams {

  val testProducerStreamManager = TestActorRef(new ProducerStreamManager)
  val producerStreamManagerActor = testProducerStreamManager.underlyingActor

  //Create an test event listener for the local message bus
  val testEventListener = TestProbe()
  system.eventStream.subscribe(testEventListener.ref, classOf[ExampleAppEvent])

  override def afterAll: Unit = {
    shutdown()
  }


  "Sending InitializeProducerStream(self, KafkaMessage) to ProducerStreamManager" should {
    "initialize the stream for that particular message type, return ActivatedProducerStream(streaRef, \"TempChannel1\") and produce local event " in {
      testProducerStreamManager ! InitializeProducerStream(self, KafkaMessage)
      Thread.sleep(500)
      var streamRef: SourceQueueWithComplete[Any] = null
      expectMsgPF() {
        case ActivatedProducerStream(sr, kt) => if (kt == "TempChannel1") {
          streamRef = sr; ()
        } else fail()
      }

      Thread.sleep(500)
      val resultMessage = ActivatedProducerStream(streamRef, "TempChannel1")
      testEventListener.expectMsgPF() {
        case ExampleAppEvent(_, _, m) => if (m == resultMessage.toString) () else fail()
      }
    }
  }

  "Sending InitializeProducerStream(self, ExampleAppEvent) to ProducerStreamManager" should {
    "initialize the stream for that particular message type, return ActivatedProducerStream(streaRef, \"TempChannel2\") and produce local event " in {
      testProducerStreamManager ! InitializeProducerStream(self, ExampleAppEvent)
      Thread.sleep(500)
      var streamRef: SourceQueueWithComplete[Any] = null
      expectMsgPF() {
        case ActivatedProducerStream(sr, kt) => if (kt == "TempChannel2") {
          streamRef = sr; ()
        } else fail()
      }

      Thread.sleep(500)
      val resultMessage = ActivatedProducerStream(streamRef, "TempChannel2")
      testEventListener.expectMsgPF() {
        case ExampleAppEvent(_, _, m) => if (m == resultMessage.toString) () else fail()
      }
    }
  }
} 
Example 27
Source File: BakerySpec.scala    From Learn-Scala-Programming   with MIT License 5 votes vote down vote up
package ch11

import akka.actor.{ActorSystem, Props}
import akka.testkit.{ImplicitSender, TestKit, TestProbe}
import ch11.Cook.RawCookies
import ch11.Manager.ShoppingList
import ch11.Oven.Cookies
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

import scala.concurrent.duration._
import scala.language.postfixOps
import scala.util.Random

class BakerySpec(_system: ActorSystem)
    extends TestKit(_system)
    with Matchers
    with WordSpecLike
    with BeforeAndAfterAll
    with ImplicitSender {

  def this() = this(ActorSystem("BakerySpec"))

  override def afterAll: Unit = shutdown(system)

  "The boy should" should {
    val boyProps = Boy.props(system.actorSelection(testActor.path))
    val boy = system.actorOf(boyProps)

    "forward given ShoppingList to the seller" in {
      val list = ShoppingList(0, 0, 0, 0)
      boy ! list
      within(3 millis, 20 millis) {
        expectMsg(list)
        lastSender shouldBe testActor
      }
    }
    "ignore other message types" in {
      boy ! 'GoHome
      expectNoMessage(500 millis)
    }
  }
  "The baker should" should {
    val parent = TestProbe()
    val baker = parent.childActorOf(Props(classOf[Baker], 0 millis))
    "bake cookies in batches" in {
      val count = Random.nextInt(100)
      baker ! RawCookies(Oven.size * count)
      parent.expectMsgAllOf(List.fill(count)(Cookies(Oven.size)):_*)
    }
  }
} 
Example 28
Source File: StoreSpec.scala    From Learn-Scala-Programming   with MIT License 5 votes vote down vote up
package ch11

import akka.testkit.TestKit
import ch11.Manager.ShoppingList
import ch11.Mixer.Groceries
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

import scala.language.postfixOps

class StoreSpec(store: Store) extends TestKit(store.store)
    with Matchers with WordSpecLike with BeforeAndAfterAll {

  def this() = this(new Store {})

  override def afterAll: Unit = shutdown(system)

  "A seller in store" should {
    "do nothing for all unexpected message types" in {
      store.seller ! 'UnexpectedMessage
      expectNoMessage()
    }
    "return groceries if given a shopping list" in {
      store.seller.tell(ShoppingList(1, 1, 1, 1), testActor)
      expectMsg(Groceries(1,1,1,1))
    }
  }
} 
Example 29
Source File: TraceFriendlyThreadPoolExecutorSpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.core.concurrent

import java.util.concurrent.{ Callable, ExecutorService }

import com.comcast.money.api.SpanId
import com.comcast.money.core.SpecHelpers
import com.comcast.money.core.internal.SpanLocal
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{ Matchers, OneInstancePerTest, WordSpecLike }
import org.slf4j.MDC

class TraceFriendlyThreadPoolExecutorSpec
  extends WordSpecLike
  with MockitoSugar with Matchers with ConcurrentSupport with OneInstancePerTest with SpecHelpers {

  val executor: ExecutorService = TraceFriendlyThreadPoolExecutor.newCachedThreadPool

  "TraceFriendlyThreadPoolExecutor cachedThreadPool" should {
    "propagate the current span local value" in {
      val traceId = new SpanId("1", 2L, 3L)
      SpanLocal.push(testSpan(traceId))

      val future = executor.submit(testCallable)

      future.get shouldEqual Some(traceId)
      SpanLocal.clear()
    }
    "propagate no span value if none is present" in {
      SpanLocal.clear()

      val future = executor.submit(testCallable)

      future.get shouldEqual None
      SpanLocal.current shouldEqual None
    }
    "propagate only the current span id value" in {
      val traceId1 = new SpanId()
      val traceId2 = new SpanId()
      SpanLocal.push(testSpan(traceId1))
      SpanLocal.push(testSpan(traceId2))

      val future = executor.submit(testCallable)
      future.get shouldEqual Some(traceId2)
    }
    "propagate MDC" in {
      val traceId = new SpanId("1", 2L, 3L)
      SpanLocal.push(testSpan(traceId))
      MDC.put("foo", "bar")

      val mdcCallable = new Callable[String] {
        override def call(): String = MDC.get("foo")
      }

      val future = executor.submit(mdcCallable)

      future.get shouldEqual "bar"
      SpanLocal.clear()
    }
  }
  "TraceFriendlyThreadPoolExecutor fixedThreadPool" should {
    val threadPool: TraceFriendlyThreadPoolExecutor = TraceFriendlyThreadPoolExecutor.newFixedThreadPool(1)
      .asInstanceOf[TraceFriendlyThreadPoolExecutor]

    "created the pool with the specified number of threads" in {
      threadPool.getCorePoolSize shouldEqual 1
    }
  }
} 
Example 30
Source File: DirectExecutionContextSpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.core.async

import com.comcast.money.core.SpecHelpers
import com.comcast.money.core.concurrent.ConcurrentSupport
import org.scalatest.{ Matchers, OneInstancePerTest, WordSpecLike }
import org.scalatest.mockito.MockitoSugar

class DirectExecutionContextSpec
  extends WordSpecLike
  with MockitoSugar with Matchers with ConcurrentSupport with OneInstancePerTest with SpecHelpers {

  val underTest = new DirectExecutionContext()

  "DirectExecutionContext" should {
    "execute the Runnable on the current thread" in {
      val currentThreadId = Thread.currentThread().getId
      var callbackThreadId: Long = 0

      underTest.execute(new Runnable {
        override def run(): Unit = {
          callbackThreadId = Thread.currentThread().getId
        }
      })

      callbackThreadId shouldEqual currentThreadId
    }
  }
} 
Example 31
Source File: AkkaMoneyScope.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.akka

import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.testkit.TestKit
import com.comcast.money.akka.SpanHandlerMatchers.clearHandlerChain
import com.typesafe.config.ConfigFactory
import org.scalatest.{ BeforeAndAfterAll, BeforeAndAfterEach, Matchers, WordSpecLike }

abstract class AkkaMoneyScope extends WordSpecLike with Matchers with BeforeAndAfterAll with BeforeAndAfterEach {

  val configString: String =
    """
      | money {
      |  handling = {
      |    async = false
      |    handlers = [
      |    {
      |      class = "com.comcast.money.akka.CollectingSpanHandler"
      |      log-level = "INFO"
      |    }]
      |  }
      | }""".stripMargin

  implicit val actorSystem: ActorSystem = ActorSystem("MoneyAkkaScope", ConfigFactory.parseString(configString))

  implicit val moneyExtension: MoneyExtension = MoneyExtension(actorSystem)

  implicit val matierializer: ActorMaterializer = ActorMaterializer()

  override def afterAll(): Unit = TestKit.shutdownActorSystem(actorSystem)

  override def beforeEach(): Unit = clearHandlerChain
} 
Example 32
Source File: MoneyExtensionSpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.akka.acceptance.stream

import akka.actor.ActorSystem
import akka.testkit.TestKit
import com.comcast.money.akka.MoneyExtension
import com.typesafe.config.ConfigFactory
import org.scalatest.{ Matchers, WordSpecLike }

class MoneyExtensionSpec(_system: ActorSystem) extends TestKit(_system) with WordSpecLike with Matchers {

  def this() = this{
    val configString: String =
      """
        | money {
        |  enabled = false
        | }""".stripMargin

    ActorSystem("MoneyExtensionSpec", ConfigFactory.parseString(configString))
  }

  "A MoneyExtension" should {
    "construct a new MoneyExtension from an ActorSystem without Money config" in {
      MoneyExtension(system) shouldBe a[MoneyExtension]
    }
  }

} 
Example 33
Source File: JsoneyStringTest.scala    From sparta   with Apache License 2.0 5 votes vote down vote up
package com.stratio.sparta.sdk.properties

import org.json4s.jackson.JsonMethods._
import org.json4s.jackson.Serialization.write
import org.json4s.{DefaultFormats, _}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{Matchers, WordSpecLike}

@RunWith(classOf[JUnitRunner])
class JsoneyStringTest extends WordSpecLike
with Matchers {

  "A JsoneyString" should {
    "have toString equivalent to its internal string" in {
      assertResult("foo")(new JsoneyString("foo").toString)
    }

    "be deserialized if its JSON" in {
      implicit val json4sJacksonFormats = DefaultFormats + new JsoneyStringSerializer()
      val result = parse( """{ "foo": "bar" }""").extract[JsoneyString]
      assertResult(new JsoneyString( """{"foo":"bar"}"""))(result)
    }

    "be deserialized if it's a String" in {
      implicit val json4sJacksonFormats = DefaultFormats + new JsoneyStringSerializer()
      val result = parse("\"foo\"").extract[JsoneyString]
      assertResult(new JsoneyString("foo"))(result)
    }

    "be deserialized if it's an Int" in {
      implicit val json4sJacksonFormats = DefaultFormats + new JsoneyStringSerializer()
      val result = parse("1").extract[JsoneyString]
      assertResult(new JsoneyString("1"))(result)
    }

    "be serialized as JSON" in {
      implicit val json4sJacksonFormats = DefaultFormats + new JsoneyStringSerializer()

      var result = write(new JsoneyString("foo"))
      assertResult("\"foo\"")(result)

      result = write(new JsoneyString("{\"foo\":\"bar\"}"))
      assertResult("\"{\\\"foo\\\":\\\"bar\\\"}\"")(result)
    }

    "be deserialized if it's an JBool" in {
      implicit val json4sJacksonFormats = DefaultFormats + new JsoneyStringSerializer()
      val result = parse("true").extract[JsoneyString]
      assertResult(new JsoneyString("true"))(result)
    }

    "have toSeq equivalent to its internal string" in {
      assertResult(Seq("o"))(new JsoneyString("foo").toSeq)
    }
  }
} 
Example 34
Source File: HierarchyFieldTest.scala    From sparta   with Apache License 2.0 5 votes vote down vote up
package com.stratio.sparta.plugin.cube.field.hierarchy

import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.prop.TableDrivenPropertyChecks
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, Matchers, WordSpecLike}

@RunWith(classOf[JUnitRunner])
class HierarchyFieldTest extends WordSpecLike
with Matchers
with BeforeAndAfter
with BeforeAndAfterAll
with TableDrivenPropertyChecks {

  var hbs: Option[HierarchyField] = _

  before {
    hbs = Some(new HierarchyField())
  }

  after {
    hbs = None
  }

  "A HierarchyDimension" should {
    "In default implementation, get 4 precisions for all precision sizes" in {
      val precisionLeftToRight = hbs.get.precisionValue(HierarchyField.LeftToRightName, "")
      val precisionRightToLeft = hbs.get.precisionValue(HierarchyField.RightToLeftName, "")
      val precisionLeftToRightWithWildCard = hbs.get.precisionValue(HierarchyField.LeftToRightWithWildCardName, "")
      val precisionRightToLeftWithWildCard = hbs.get.precisionValue(HierarchyField.RightToLeftWithWildCardName, "")

      precisionLeftToRight._1.id should be(HierarchyField.LeftToRightName)
      precisionRightToLeft._1.id should be(HierarchyField.RightToLeftName)
      precisionLeftToRightWithWildCard._1.id should be(HierarchyField.LeftToRightWithWildCardName)
      precisionRightToLeftWithWildCard._1.id should be(HierarchyField.RightToLeftWithWildCardName)
    }

    "In default implementation, every proposed combination should be ok" in {
      val data = Table(
        ("i", "o"),
        ("google.com", Seq("google.com", "*.com", "*"))
      )

      forAll(data) { (i: String, o: Seq[String]) =>
        val result = hbs.get.precisionValue(HierarchyField.LeftToRightWithWildCardName, i)
        assertResult(o)(result._2)
      }
    }
    "In reverse implementation, every proposed combination should be ok" in {
      hbs = Some(new HierarchyField())
      val data = Table(
        ("i", "o"),
        ("com.stratio.sparta", Seq("com.stratio.sparta", "com.stratio.*", "com.*", "*"))
      )

      forAll(data) { (i: String, o: Seq[String]) =>
        val result = hbs.get.precisionValue(HierarchyField.RightToLeftWithWildCardName, i.asInstanceOf[Any])
        assertResult(o)(result._2)
      }
    }
    "In reverse implementation without wildcards, every proposed combination should be ok" in {
      hbs = Some(new HierarchyField())
      val data = Table(
        ("i", "o"),
        ("com.stratio.sparta", Seq("com.stratio.sparta", "com.stratio", "com", "*"))
      )

      forAll(data) { (i: String, o: Seq[String]) =>
        val result = hbs.get.precisionValue(HierarchyField.RightToLeftName, i.asInstanceOf[Any])
        assertResult(o)(result._2)
      }
    }
    "In non-reverse implementation without wildcards, every proposed combination should be ok" in {
      hbs = Some(new HierarchyField())
      val data = Table(
        ("i", "o"),
        ("google.com", Seq("google.com", "com", "*"))
      )

      forAll(data) { (i: String, o: Seq[String]) =>
        val result = hbs.get.precisionValue(HierarchyField.LeftToRightName, i.asInstanceOf[Any])
        assertResult(o)(result._2)
      }
    }
  }
} 
Example 35
Source File: DateTimeFieldTest.scala    From sparta   with Apache License 2.0 5 votes vote down vote up
package com.stratio.sparta.plugin.cube.field.datetime

import java.io.{Serializable => JSerializable}
import java.util.Date

import com.stratio.sparta.sdk.pipeline.schema.TypeOp
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{Matchers, WordSpecLike}

@RunWith(classOf[JUnitRunner])
class DateTimeFieldTest extends WordSpecLike with Matchers {

  val dateTimeDimension = new DateTimeField(Map("second" -> "long", "minute" -> "date", "typeOp" -> "datetime"))

  "A DateTimeDimension" should {
    "In default implementation, get 6 dimensions for a specific time" in {
      val newDate = new Date()
      val precision5s =
        dateTimeDimension.precisionValue("5s", newDate.asInstanceOf[JSerializable])
      val precision10s =
        dateTimeDimension.precisionValue("10s", newDate.asInstanceOf[JSerializable])
      val precision15s =
        dateTimeDimension.precisionValue("15s", newDate.asInstanceOf[JSerializable])
      val precisionSecond =
        dateTimeDimension.precisionValue("second", newDate.asInstanceOf[JSerializable])
      val precisionMinute =
        dateTimeDimension.precisionValue("minute", newDate.asInstanceOf[JSerializable])
      val precisionHour =
        dateTimeDimension.precisionValue("hour", newDate.asInstanceOf[JSerializable])
      val precisionDay =
        dateTimeDimension.precisionValue("day", newDate.asInstanceOf[JSerializable])
      val precisionMonth =
        dateTimeDimension.precisionValue("month", newDate.asInstanceOf[JSerializable])
      val precisionYear =
        dateTimeDimension.precisionValue("year", newDate.asInstanceOf[JSerializable])

      precision5s._1.id should be("5s")
      precision10s._1.id should be("10s")
      precision15s._1.id should be("15s")
      precisionSecond._1.id should be("second")
      precisionMinute._1.id should be("minute")
      precisionHour._1.id should be("hour")
      precisionDay._1.id should be("day")
      precisionMonth._1.id should be("month")
      precisionYear._1.id should be("year")
    }

    "Each precision dimension have their output type, second must be long, minute must be date, others datetime" in {
      dateTimeDimension.precision("5s").typeOp should be(TypeOp.DateTime)
      dateTimeDimension.precision("10s").typeOp should be(TypeOp.DateTime)
      dateTimeDimension.precision("15s").typeOp should be(TypeOp.DateTime)
      dateTimeDimension.precision("second").typeOp should be(TypeOp.Long)
      dateTimeDimension.precision("minute").typeOp should be(TypeOp.Date)
      dateTimeDimension.precision("day").typeOp should be(TypeOp.DateTime)
      dateTimeDimension.precision("month").typeOp should be(TypeOp.DateTime)
      dateTimeDimension.precision("year").typeOp should be(TypeOp.DateTime)
      dateTimeDimension.precision(DateTimeField.TimestampPrecision.id).typeOp should be(TypeOp.Timestamp)
    }
  }
} 
Example 36
Source File: DefaultFieldTest.scala    From sparta   with Apache License 2.0 5 votes vote down vote up
package com.stratio.sparta.plugin.cube.field.defaultField

import com.stratio.sparta.plugin.default.DefaultField
import com.stratio.sparta.sdk.pipeline.aggregation.cube.{DimensionType, Precision}
import com.stratio.sparta.sdk.pipeline.schema.TypeOp
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{Matchers, WordSpecLike}

@RunWith(classOf[JUnitRunner])
class DefaultFieldTest extends WordSpecLike with Matchers {

  val defaultDimension: DefaultField = new DefaultField(Map("typeOp" -> "int"))

  "A DefaultDimension" should {
    "In default implementation, get one precisions for a specific time" in {
      val precision: (Precision, Any) = defaultDimension.precisionValue("", "1".asInstanceOf[Any])

      precision._2 should be(1)

      precision._1.id should be(DimensionType.IdentityName)
    }

    "The precision must be int" in {
      defaultDimension.precision(DimensionType.IdentityName).typeOp should be(TypeOp.Int)
    }
  }
} 
Example 37
Source File: MorphlinesParserTest.scala    From sparta   with Apache License 2.0 5 votes vote down vote up
package com.stratio.sparta.plugin.transformation.morphline

import java.io.Serializable

import com.stratio.sparta.sdk.pipeline.input.Input
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.{StringType, StructField, StructType}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, Matchers, WordSpecLike}


@RunWith(classOf[JUnitRunner])
class MorphlinesParserTest extends WordSpecLike with Matchers with BeforeAndAfter with BeforeAndAfterAll {

  val morphlineConfig = """
          id : test1
          importCommands : ["org.kitesdk.**"]
          commands: [
          {
              readJson {},
          }
          {
              extractJsonPaths {
                  paths : {
                      col1 : /col1
                      col2 : /col2
                  }
              }
          }
          {
            java {
              code : "return child.process(record);"
            }
          }
          {
              removeFields {
                  blacklist:["literal:_attachment_body"]
              }
          }
          ]
                        """
  val inputField = Some(Input.RawDataKey)
  val outputsFields = Seq("col1", "col2")
  val props: Map[String, Serializable] = Map("morphline" -> morphlineConfig)

  val schema = StructType(Seq(StructField("col1", StringType), StructField("col2", StringType)))

  val parser = new MorphlinesParser(1, inputField, outputsFields, schema, props)

  "A MorphlinesParser" should {

    "parse a simple json" in {
      val simpleJson =
        """{
            "col1":"hello",
            "col2":"word"
            }
        """
      val input = Row(simpleJson)
      val result = parser.parse(input)

      val expected = Seq(Row(simpleJson, "hello", "world"))

      result should be eq(expected)
    }

    "parse a simple json removing raw" in {
      val simpleJson =
        """{
            "col1":"hello",
            "col2":"word"
            }
        """
      val input = Row(simpleJson)
      val result = parser.parse(input)

      val expected = Seq(Row("hello", "world"))

      result should be eq(expected)
    }

    "exclude not configured fields" in {
      val simpleJson =
        """{
            "col1":"hello",
            "col2":"word",
            "col3":"!"
            }
        """
      val input = Row(simpleJson)
      val result = parser.parse(input)

      val expected = Seq(Row(simpleJson, "hello", "world"))

      result should be eq(expected)
    }
  }
} 
Example 38
Source File: DateTimeParserTest.scala    From sparta   with Apache License 2.0 5 votes vote down vote up
package com.stratio.sparta.plugin.transformation.datetime

import com.stratio.sparta.sdk.properties.JsoneyString
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.{StringType, StructField, StructType}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{Matchers, WordSpecLike}

@RunWith(classOf[JUnitRunner])
class DateTimeParserTest extends WordSpecLike with Matchers {

  val inputField = Some("ts")
  val outputsFields = Seq("ts")

  //scalastyle:off
  "A DateTimeParser" should {
    "parse unixMillis to string" in {
      val input = Row(1416330788000L)
      val schema = StructType(Seq(StructField("ts", StringType)))

      val result =
        new DateTimeParser(1, inputField, outputsFields, schema, Map("inputFormat" -> "unixMillis"))
          .parse(input)

      val expected = Seq(Row(1416330788000L, "1416330788000"))

      assertResult(result)(expected)
    }

    "parse unix to string" in {
      val input = Row(1416330788)
      val schema = StructType(Seq(StructField("ts", StringType)))

      val result =
        new DateTimeParser(1, inputField, outputsFields, schema, Map("inputFormat" -> "unix"))
          .parse(input)

      val expected = Seq(Row(1416330788, "1416330788000"))

      assertResult(result)(expected)
    }

    "parse unix to string removing raw" in {
      val input = Row(1416330788)
      val schema = StructType(Seq(StructField("ts", StringType)))

      val result =
        new DateTimeParser(1, inputField, outputsFields, schema, Map("inputFormat" -> "unix",
          "removeInputField" -> JsoneyString.apply("true")))
          .parse(input)

      val expected = Seq(Row("1416330788000"))

      assertResult(result)(expected)
    }

    "not parse anything if the field does not match" in {
      val input = Row("1212")
      val schema = StructType(Seq(StructField("otherField", StringType)))

      an[IllegalStateException] should be thrownBy new DateTimeParser(1, inputField, outputsFields, schema, Map("inputFormat" -> "unixMillis")).parse(input)
    }

    "not parse anything and generate a new Date" in {
      val input = Row("anything")
      val schema = StructType(Seq(StructField("ts", StringType)))

      val result =
        new DateTimeParser(1, inputField, outputsFields, schema, Map("inputFormat" -> "autoGenerated"))
          .parse(input)

      assertResult(result.head.size)(2)
    }

    "Auto generated if inputFormat does not exist" in {
      val input = Row("1416330788")
      val schema = StructType(Seq(StructField("ts", StringType)))

      val result =
        new DateTimeParser(1, inputField, outputsFields, schema, Map()).parse(input)

      assertResult(result.head.size)(2)
    }

    "parse dateTime in hive format" in {
      val input = Row("2015-11-08 15:58:58")
      val schema = StructType(Seq(StructField("ts", StringType)))

      val result =
        new DateTimeParser(1, inputField, outputsFields, schema, Map("inputFormat" -> "hive"))
          .parse(input)

      val expected = Seq(Row("2015-11-08 15:58:58", "1446998338000"))

      assertResult(result)(expected)
    }
  }
} 
Example 39
Source File: DagManagerSpec.scala    From incubator-retired-gearpump   with Apache License 2.0 5 votes vote down vote up
package org.apache.gearpump.streaming.appmaster

import akka.actor.{ActorSystem, Props}
import akka.testkit.TestProbe
import org.apache.gearpump.cluster.{TestUtil, UserConfig}
import org.apache.gearpump.streaming.partitioner.{HashPartitioner, Partitioner}
import org.apache.gearpump.streaming.appmaster.DagManager.{DAGOperationFailed, DAGOperationSuccess, GetLatestDAG, GetTaskLaunchData, LatestDAG, NewDAGDeployed, ReplaceProcessor, TaskLaunchData, WatchChange}
import org.apache.gearpump.streaming.task.{Subscriber, TaskActor}
import org.apache.gearpump.streaming._
import org.apache.gearpump.util.Graph
import org.apache.gearpump.util.Graph._
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

import scala.concurrent.Await
import scala.concurrent.duration.Duration

class DagManagerSpec extends WordSpecLike with Matchers with BeforeAndAfterAll {

  val hash = Partitioner[HashPartitioner]
  val task1 = ProcessorDescription(id = 1, taskClass = classOf[TaskActor].getName, parallelism = 1)
  val task2 = ProcessorDescription(id = 2, taskClass = classOf[TaskActor].getName, parallelism = 1)
  val graph = Graph(task1 ~ hash ~> task2)
  val dag = DAG(graph)
  implicit var system: ActorSystem = null
  val appId = 0
  lazy val userConfig = UserConfig.empty.withValue(StreamApplication.DAG, graph)

  "DagManager" should {
    import org.apache.gearpump.streaming.appmaster.ClockServiceSpec.Store
    "maintain the dags properly" in {
      val store = new Store

      val dagManager = system.actorOf(Props(new DagManager(appId, userConfig, store, Some(dag))))
      val client = TestProbe()
      client.send(dagManager, GetLatestDAG)
      client.expectMsg(LatestDAG(dag))

      client.send(dagManager, GetTaskLaunchData(dag.version, task1.id, null))
      val task1LaunchData = TaskLaunchData(task1, Subscriber.of(task1.id, dag))
      client.expectMsg(task1LaunchData)

      val task2LaunchData = TaskLaunchData(task2, Subscriber.of(task2.id, dag))
      client.send(dagManager, GetTaskLaunchData(dag.version, task2.id, null))
      client.expectMsg(task2LaunchData)

      val watcher = TestProbe()
      client.send(dagManager, WatchChange(watcher.ref))
      val task3 = task2.copy(id = 3, life = LifeTime(100, Long.MaxValue))

      client.send(dagManager, ReplaceProcessor(task2.id, task3, inheritConf = false))
      client.expectMsg(DAGOperationSuccess)

      client.send(dagManager, GetLatestDAG)
      val newDag = client.expectMsgPF() {
        case LatestDAG(latestDag) => latestDag
      }
      assert(newDag.processors.contains(task3.id))
      watcher.expectMsgType[LatestDAG]

      val task4 = task3.copy(id = 4)
      client.send(dagManager, ReplaceProcessor(task3.id, task4, inheritConf = false))
      client.expectMsgType[DAGOperationFailed]

      client.send(dagManager, NewDAGDeployed(newDag.version))
      client.send(dagManager, ReplaceProcessor(task3.id, task4, inheritConf = false))
      client.expectMsg(DAGOperationSuccess)
    }

    "retrieve last stored dag properly" in {
      val store = new Store
      val newGraph = Graph(task1 ~ hash ~> task2)
      val newDag = DAG(newGraph)
      val dagManager = system.actorOf(Props(new DagManager(appId, userConfig, store, Some(newDag))))
      dagManager ! NewDAGDeployed(0)
      val client = TestProbe()
      client.send(dagManager, GetLatestDAG)
      client.expectMsgType[LatestDAG].dag shouldBe newDag
    }
  }

  override def afterAll(): Unit = {
    system.terminate()
    Await.result(system.whenTerminated, Duration.Inf)
  }

  override def beforeAll(): Unit = {
    this.system = ActorSystem("DagManagerSpec", TestUtil.DEFAULT_CONFIG)
  }
} 
Example 40
Source File: TimeOutSchedulerSpec.scala    From incubator-retired-gearpump   with Apache License 2.0 5 votes vote down vote up
package org.apache.gearpump.util

import scala.concurrent.duration._

import akka.actor._
import akka.testkit.{ImplicitSender, TestActorRef, TestKit, TestProbe}
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
import org.slf4j.Logger

import org.apache.gearpump.cluster.TestUtil

class TimeOutSchedulerSpec(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
  with WordSpecLike with Matchers with BeforeAndAfterAll {

  def this() = this(ActorSystem("WorkerSpec", TestUtil.DEFAULT_CONFIG))
  val mockActor = TestProbe()

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  "The TimeOutScheduler" should {
    "handle the time out event" in {
      val testActorRef = TestActorRef(Props(classOf[TestActor], mockActor.ref))
      val testActor = testActorRef.underlyingActor.asInstanceOf[TestActor]
      testActor.sendMsgToIgnore()
      mockActor.expectMsg(30.seconds, MessageTimeOut)
    }
  }
}

case object Echo
case object MessageTimeOut

class TestActor(mock: ActorRef) extends Actor with TimeOutScheduler {
  private val LOG: Logger = LogUtil.getLogger(getClass)

  val target = context.actorOf(Props(classOf[EchoActor]))

  override def receive: Receive = {
    case _ =>
  }

  def sendMsgToIgnore(): Unit = {
    sendMsgWithTimeOutCallBack(target, Echo, 2000, sendMsgTimeOut())
  }

  private def sendMsgTimeOut(): Unit = {
    mock ! MessageTimeOut
  }
}

class EchoActor extends Actor {
  override def receive: Receive = {
    case _ =>
  }
} 
Example 41
Source File: ProxyMultiJvm.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.channels

// multi-jvm:test-only aia.channels.ReliableProxySampleSpec 로 시작할것

import org.scalatest.{WordSpecLike, BeforeAndAfterAll, MustMatchers}
import akka.testkit.ImplicitSender
import akka.actor.{Props, Actor}




import akka.remote.testkit.MultiNodeSpecCallbacks
import akka.remote.testkit.MultiNodeConfig
import akka.remote.testkit.MultiNodeSpec

trait STMultiNodeSpec
  extends MultiNodeSpecCallbacks
  with WordSpecLike
  with MustMatchers
  with BeforeAndAfterAll {

  override def beforeAll() = multiNodeSpecBeforeAll()

  override def afterAll() = multiNodeSpecAfterAll()
}


object ReliableProxySampleConfig extends MultiNodeConfig {
  val client = role("Client")
  val server = role("Server")
  testTransport(on = true)
}

class ReliableProxySampleSpecMultiJvmNode1 extends ReliableProxySample
class ReliableProxySampleSpecMultiJvmNode2 extends ReliableProxySample

import akka.remote.transport.ThrottlerTransportAdapter.Direction
import scala.concurrent.duration._
import concurrent.Await
import akka.contrib.pattern.ReliableProxy

class ReliableProxySample
  extends MultiNodeSpec(ReliableProxySampleConfig)
  with STMultiNodeSpec
  with ImplicitSender {

  import ReliableProxySampleConfig._

  def initialParticipants = roles.size

  "A MultiNodeSample" must {

    "wait for all nodes to enter a barrier" in {
      enterBarrier("startup")
    }

    "send to and receive from a remote node" in {
      runOn(client) {
        enterBarrier("deployed")
        val pathToEcho = node(server) / "user" / "echo"
        val echo = system.actorSelection(pathToEcho)
        val proxy = system.actorOf(
          ReliableProxy.props(pathToEcho, 500.millis), "proxy")

        proxy ! "message1"
        expectMsg("message1")
        Await.ready(
          testConductor.blackhole( client, server, Direction.Both),
          1 second)

        echo ! "DirectMessage"
        proxy ! "ProxyMessage"
        expectNoMsg(3 seconds)

        Await.ready(
          testConductor.passThrough( client, server, Direction.Both),
          1 second)

        expectMsg("ProxyMessage")

        echo ! "DirectMessage2"
        expectMsg("DirectMessage2")
      }

      runOn(server) {
        system.actorOf(Props(new Actor {
          def receive = {
            case msg: AnyRef => {
              sender() ! msg
            }
          }
        }), "echo")
        enterBarrier("deployed")
      }

      enterBarrier("finished")
    }
  }
} 
Example 42
Source File: DeadLetterTest.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.channels

import akka.testkit.{ ImplicitSender, TestProbe, TestKit }
import akka.actor.{ PoisonPill, Props, DeadLetter, ActorSystem }
import org.scalatest.{WordSpecLike, BeforeAndAfterAll, MustMatchers}
import java.util.Date

class DeadLetterTest extends TestKit(ActorSystem("DeadLetterTest"))
  with WordSpecLike with BeforeAndAfterAll with MustMatchers
  with ImplicitSender {

  override def afterAll()  {
    system.terminate()
  }

  "DeadLetter" must {
    "catch messages send to deadLetters" in {
      val deadLetterMonitor = TestProbe()

      system.eventStream.subscribe(
        deadLetterMonitor.ref,
        classOf[DeadLetter])

      val msg = new StateEvent(new Date(), "Connected")
      system.deadLetters ! msg

      val dead = deadLetterMonitor.expectMsgType[DeadLetter]
      dead.message must be(msg)
      dead.sender must be(testActor)
      dead.recipient must be(system.deadLetters)
    }
    "catch deadLetter messages send to deadLetters" in {

      val deadLetterMonitor = TestProbe()
      val actor = system.actorOf(Props[EchoActor], "echo")

      system.eventStream.subscribe(
        deadLetterMonitor.ref,
        classOf[DeadLetter])

      val msg = new Order("me", "Akka in Action", 1)
      val dead = DeadLetter(msg, testActor, actor)
      system.deadLetters ! dead

      deadLetterMonitor.expectMsg(dead)

      system.stop(actor)

    }

    "catch messages send to terminated Actor" in {

      val deadLetterMonitor = TestProbe()

      system.eventStream.subscribe(
        deadLetterMonitor.ref,
        classOf[DeadLetter])

      val actor = system.actorOf(Props[EchoActor], "echo")
      actor ! PoisonPill
      val msg = new Order("me", "Akka in Action", 1)
      actor ! msg

      val dead = deadLetterMonitor.expectMsgType[DeadLetter]
      dead.message must be(msg)
      dead.sender must be(testActor)
      dead.recipient must be(actor)

    }

  }
} 
Example 43
Source File: ThroughputCPUTest.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.performance.throughput

import akka.testkit.TestProbe
import akka.actor.{Props, ActorSystem}
import org.scalatest.{WordSpecLike, BeforeAndAfterAll, MustMatchers}
import akka.routing.RoundRobinPool
import com.typesafe.config.ConfigFactory
import aia.performance.{ProcessCPURequest, SystemMessage, ProcessRequest}
import concurrent.duration._


class ThroughputCPUTest extends WordSpecLike
  with BeforeAndAfterAll
  with MustMatchers {

  val configuration = ConfigFactory.load("performance/through")
  implicit val system = ActorSystem("ThroughputTest", configuration)

  "System" must {
    "fails to with cpu" in {
      val nrWorkers = 40
      val nrMessages = nrWorkers * 40

      val end = TestProbe()
      val workers = system.actorOf(
        RoundRobinPool(nrWorkers).props(
          Props(new ProcessCPURequest(250 millis, end.ref)).withDispatcher("my-dispatcher")),
        "Workers-cpu")

      val startTime = System.currentTimeMillis()
      for (i <- 0 until nrMessages) {
        workers ! new SystemMessage(startTime, 0, "")
      }
      val msg = end.receiveN(n = nrMessages, max = 9000 seconds).asInstanceOf[Seq[SystemMessage]]
      val endTime = System.currentTimeMillis()
      val total = endTime - startTime
      println("total process time %d Average=%d".format(total, total / nrMessages))
      val grouped = msg.groupBy(_.id)
      grouped.map {
        case (key, listMsg) => (key, listMsg.foldLeft(0L) { (m, x) => math.max(m, x.duration) })
      }.foreach(println(_))

      Thread.sleep(1000)

      system.stop(workers)
      
    }
  }
} 
Example 44
Source File: ThroughputTest.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.performance.throughput

import akka.testkit.TestProbe
import akka.actor.{Props, ActorSystem}
import org.scalatest.{WordSpecLike, BeforeAndAfterAll, MustMatchers}
import akka.routing.RoundRobinPool
import com.typesafe.config.ConfigFactory
import aia.performance.{ProcessCPURequest, SystemMessage, ProcessRequest}
import concurrent.duration._

class ThroughputTest extends WordSpecLike
  with BeforeAndAfterAll
  with MustMatchers {

  val configuration = ConfigFactory.load("performance/through")
  implicit val system = ActorSystem("ThroughputTest", configuration)

  "System" must {
    "fails to perform" in {
      val nrMessages = 99
      val nrWorkers = 3
      val statDuration = 2000 millis //((nrMessages * 10)+1000)/4 millis

      val end = TestProbe()
      val workers = system.actorOf(
        RoundRobinPool(nrWorkers).props(Props(new ProcessRequest(1 second, end.ref)).withDispatcher("my-dispatcher")),
        "Workers")

      val startTime = System.currentTimeMillis()
      for (i <- 0 until nrMessages) {
        workers ! new SystemMessage(startTime, 0, "")
      }
      val msg = end.receiveN(n = nrMessages, max = 9000 seconds).asInstanceOf[Seq[SystemMessage]]
      val endTime = System.currentTimeMillis()
      val total = endTime - startTime
      println("total process time %d Average=%d".format(total, total / nrMessages))
      val grouped = msg.groupBy(_.id)
      grouped.map {
        case (key, listMsg) => (key, listMsg.foldLeft(0L) { (m, x) => math.max(m, x.duration) })
      }.foreach(println(_))

      Thread.sleep(1000)

      system.stop(workers)
      
    }
  }
} 
Example 45
Source File: MonitorMailboxTest.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.performance.monitor

import akka.testkit.TestProbe
import akka.actor.{ Props, Actor, ActorSystem }
import org.scalatest.{WordSpecLike, BeforeAndAfterAll, MustMatchers}
import concurrent.duration._
import com.typesafe.config.ConfigFactory

class MonitorMailboxTest extends WordSpecLike with BeforeAndAfterAll
  with MustMatchers {

  val configuration = ConfigFactory.load("monitor/mailbox")
  implicit val system = ActorSystem("MonitorMailboxTest", configuration)

  override protected def afterAll(): Unit = {
    system.terminate()
    super.afterAll()
  }

  "mailbox" must {

    "send statistics with dispatcher" in {
      val statProbe = TestProbe()
      system.eventStream.subscribe(
        statProbe.ref,
        classOf[MailboxStatistics])
      val testActor = system.actorOf(Props(
        new ProcessTestActor(1.second))
        .withDispatcher("my-dispatcher"), "monitorActor")
      statProbe.send(testActor, "message")
      statProbe.send(testActor, "message2")
      statProbe.send(testActor, "message3")

      val stat = statProbe.expectMsgType[MailboxStatistics]
      println(stat)
      stat.queueSize must be(1)
      val stat2 = statProbe.expectMsgType[MailboxStatistics]
      println(stat2)
      stat2.queueSize must (be(2) or be(1))
      val stat3 = statProbe.expectMsgType[MailboxStatistics]
      println(stat3)
      stat3.queueSize must (be(3) or be(2))

      Thread.sleep(2000)
      system.stop(testActor)
      system.eventStream.unsubscribe(statProbe.ref)
    }

    "send statistics with default" in {

      val statProbe = TestProbe()
      system.eventStream.subscribe(
        statProbe.ref,
        classOf[MailboxStatistics])
      val testActor = system.actorOf(Props(
        new ProcessTestActor(1.second)), "monitorActor2")
      statProbe.send(testActor, "message")
      statProbe.send(testActor, "message2")
      statProbe.send(testActor, "message3")
      val stat = statProbe.expectMsgType[MailboxStatistics]

      stat.queueSize must be(1)
      val stat2 = statProbe.expectMsgType[MailboxStatistics]

      stat2.queueSize must (be(2) or be(1))
      val stat3 = statProbe.expectMsgType[MailboxStatistics]

      stat3.queueSize must (be(3) or be(2))


      Thread.sleep(2000)
      system.stop(testActor)
      system.eventStream.unsubscribe(statProbe.ref)
    }
  }
}

class ProcessTestActor(serviceTime: Duration) extends Actor {
  def receive = {
    case _ => {
      Thread.sleep(serviceTime.toMillis)
    }
  }
} 
Example 46
Source File: MonitorActorTest.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.performance.monitor

import org.scalatest.{WordSpecLike, BeforeAndAfterAll, MustMatchers}
import akka.testkit.{ TestProbe, TestKit }
import akka.actor.{ Props, ActorSystem }
import concurrent.duration._


class MonitorActorTest extends TestKit(ActorSystem("MonitorActorTest"))
  with WordSpecLike
  with BeforeAndAfterAll
  with MustMatchers {

  "Actor" must {
    "send statistics" in {
      val statProbe = TestProbe()
      system.eventStream.subscribe(
        statProbe.ref,
        classOf[ActorStatistics])
      val testActor = system.actorOf(Props(
        new ProcessTestActor(1.second) with MonitorActor), "monitorActor")
      statProbe.send(testActor, "message")
      statProbe.send(testActor, "message2")
      statProbe.send(testActor, "message3")

      val stat = statProbe.expectMsgType[ActorStatistics]
      println(stat)
      stat.exitTime - stat.entryTime must be(1000L +- 20)
      val stat2 = statProbe.expectMsgType[ActorStatistics]
      println(stat2)
      stat2.exitTime - stat2.entryTime must be(1000L +- 20)
      val stat3 = statProbe.expectMsgType[ActorStatistics]
      println(stat3)
      stat3.exitTime - stat3.entryTime must be(1000L +- 20)

      Thread.sleep(2000)
      system.stop(testActor)
      system.eventStream.unsubscribe(statProbe.ref)
    }
  }
} 
Example 47
Source File: TicketSellerSpec.scala    From 006877   with MIT License 5 votes vote down vote up
package com.goticks

import akka.actor.{Props, ActorSystem}

import akka.testkit.{ImplicitSender, TestKit}

import org.scalatest.{WordSpecLike, MustMatchers}

class TickerSellerSpec extends TestKit(ActorSystem("testTickets"))
                         with WordSpecLike
                         with MustMatchers
                         with ImplicitSender
                         with StopSystemAfterAll {
  "The TicketSeller" must {
    "Sell tickets until they are sold out" in {
      import TicketSeller._

      def mkTickets = (1 to 10).map(i=>Ticket(i)).toVector
      val event = "RHCP"
      val ticketingActor = system.actorOf(TicketSeller.props(event))

      ticketingActor ! Add(mkTickets)
      ticketingActor ! Buy(1)

      expectMsg(Tickets(event, Vector(Ticket(1))))

      val nrs = (2 to 10)
      nrs.foreach(_ => ticketingActor ! Buy(1))

      val tickets = receiveN(9)
      tickets.zip(nrs).foreach { case (Tickets(event, Vector(Ticket(id))), ix) => id must be(ix) }

      ticketingActor ! Buy(1)
      expectMsg(Tickets(event))
    }

    "Sell tickets in batches until they are sold out" in {
      import TicketSeller._

      val firstBatchSize = 10

      def mkTickets = (1 to (10 * firstBatchSize)).map(i=>Ticket(i)).toVector

      val event = "Madlib"
      val ticketingActor = system.actorOf(TicketSeller.props(event))

      ticketingActor ! Add(mkTickets)
      ticketingActor ! Buy(firstBatchSize)
      val bought = (1 to firstBatchSize).map(Ticket).toVector

      expectMsg(Tickets(event, bought))

      val secondBatchSize = 5
      val nrBatches = 18

      val batches = (1 to nrBatches * secondBatchSize)
      batches.foreach(_ => ticketingActor ! Buy(secondBatchSize))

      val tickets = receiveN(nrBatches)

      tickets.zip(batches).foreach {
        case (Tickets(event, bought), ix) =>
          bought.size must equal(secondBatchSize)
          val last = ix * secondBatchSize + firstBatchSize
          val first = ix * secondBatchSize + firstBatchSize - (secondBatchSize - 1)
          bought.map(_.id) must equal((first to last).toVector)
		case _ => 
      }

      ticketingActor ! Buy(1)
      expectMsg(Tickets(event))

      ticketingActor ! Buy(10)
      expectMsg(Tickets(event))
    }
  }
} 
Example 48
Source File: BoxOfficeSpec.scala    From 006877   with MIT License 5 votes vote down vote up
package com.goticks

import akka.actor.{ ActorRef, ActorSystem, Props }
import akka.testkit.{ DefaultTimeout, ImplicitSender, TestKit }
import com.goticks.BoxOffice._
import com.goticks.TicketSeller._
import org.scalatest.{ MustMatchers, WordSpecLike }

class BoxOfficeSpec extends TestKit(ActorSystem("testBoxOffice"))
    with WordSpecLike
    with MustMatchers
    with ImplicitSender
    with DefaultTimeout
    with StopSystemAfterAll {
  "The BoxOffice" must {

    "Create an event and get tickets from the correct Ticket Seller" in {

      val boxOffice = system.actorOf(BoxOffice.props)
      val eventName = "RHCP"
      boxOffice ! CreateEvent(eventName, 10)
      expectMsg(EventCreated(Event(eventName, 10)))

      boxOffice ! GetEvents
      expectMsg(Events(Vector(Event(eventName, 10))))

      boxOffice ! BoxOffice.GetEvent(eventName)
      expectMsg(Some(Event(eventName, 10)))

      boxOffice ! GetTickets(eventName, 1)
      expectMsg(Tickets(eventName, Vector(Ticket(1))))

      boxOffice ! GetTickets("DavidBowie", 1)
      expectMsg(Tickets("DavidBowie"))
    }

    "Create a child actor when an event is created and sends it a Tickets message" in {
      val boxOffice = system.actorOf(Props(
          new BoxOffice  {
            override def createTicketSeller(name: String): ActorRef = testActor
          }
        )
      )

      val tickets = 3
      val eventName = "RHCP"
      val expectedTickets = (1 to tickets).map(Ticket).toVector
      boxOffice ! CreateEvent(eventName, tickets)
      expectMsg(Add(expectedTickets))
      expectMsg(EventCreated(Event(eventName, tickets)))
    }

    "Get and cancel an event that is not created yet" in {
      val boxOffice = system.actorOf(BoxOffice.props)
      val noneExitEventName = "noExitEvent"
      boxOffice ! BoxOffice.GetEvent(noneExitEventName)
      expectMsg(None)

      boxOffice ! CancelEvent(noneExitEventName)
      expectMsg(None)
    }

    "Cancel a ticket which event is not created " in {
      val boxOffice = system.actorOf(BoxOffice.props)
      val noneExitEventName = "noExitEvent"

      boxOffice ! CancelEvent(noneExitEventName)
      expectMsg(None)
    }

    "Cancel a ticket which event is created" in {
      val boxOffice = system.actorOf(BoxOffice.props)
      val eventName = "RHCP"
      val tickets = 10
      boxOffice ! CreateEvent(eventName, tickets)
      expectMsg(EventCreated(Event(eventName, tickets)))

      boxOffice ! CancelEvent(eventName)
      expectMsg(Some(Event(eventName, tickets)))
    }
  }


} 
Example 49
Source File: HelloWorldTest.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.deploy

import org.scalatest.{BeforeAndAfterAll, WordSpecLike}
import org.scalatest.MustMatchers
import akka.testkit.{TestActorRef, ImplicitSender, TestKit}
import akka.actor.ActorSystem

class HelloWorldTest extends TestKit(ActorSystem("HelloWorldTest"))
    with ImplicitSender
    with WordSpecLike
    with MustMatchers
    with BeforeAndAfterAll {

  val actor = TestActorRef[HelloWorld]

  override def afterAll(): Unit = {
    system.terminate()
  }
  "HelloWorld" must {
    "reply when sending a string" in {
      actor ! "everybody"
      expectMsg("Hello everybody")
    }
  }
} 
Example 50
Source File: ConfigTest.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.config

import akka.actor.ActorSystem
import org.scalatest.WordSpecLike
import com.typesafe.config.ConfigFactory
import org.scalatest.MustMatchers

class ConfigTest extends WordSpecLike with MustMatchers {

  "Configuration" must {
    "has configuration" in {
      val mySystem = ActorSystem("myTest")
      val config = mySystem.settings.config
      config.getInt("myTest.intParam") must be(20)
      config.getString("myTest.applicationDesc") must be("My Config Test")
    }
    "has defaults" in {
      val mySystem = ActorSystem("myDefaultsTest")
      val config = mySystem.settings.config
      config.getInt("myTestDefaults.intParam") must be(20)
      config.getString("myTestDefaults.applicationDesc") must be("My Current Test")
    }
    "can include file" in {
      val mySystem = ActorSystem("myIncludeTest")
      val config = mySystem.settings.config
      config.getInt("myTestIncluded.intParam") must be(20)
      config.getString("myTestIncluded.applicationDesc") must be("My Include Test")
    }
    "can be loaded by ourself" in {
      val configuration = ConfigFactory.load("load")
      val mySystem = ActorSystem("myLoadTest", configuration)
      val config = mySystem.settings.config
      config.getInt("myTestLoad.intParam") must be(20)
      config.getString("myTestLoad.applicationDesc") must be("My Load Test")
    }
    
  }

} 
Example 51
Source File: TicketSellerSpec.scala    From 006877   with MIT License 5 votes vote down vote up
package com.goticks

import akka.actor.{ ActorSystem }

import akka.testkit.{ImplicitSender, TestKit}

import org.scalatest.{WordSpecLike, MustMatchers}

class TickerSellerSpec extends TestKit(ActorSystem("testTickets"))
                         with WordSpecLike
                         with MustMatchers
                         with ImplicitSender
                         with StopSystemAfterAll {
  "The TicketSeller" must {
    "Sell tickets until they are sold out" in {
      import TicketSeller._

      def mkTickets = (1 to 10).map(i=>Ticket(i)).toVector
      val event = "RHCP"
      val ticketingActor = system.actorOf(TicketSeller.props(event))

      ticketingActor ! Add(mkTickets)
      ticketingActor ! Buy(1)

      expectMsg(Tickets(event, Vector(Ticket(1))))

      val nrs = (2 to 10)
      nrs.foreach(_ => ticketingActor ! Buy(1))

      val tickets = receiveN(9)
      tickets.zip(nrs).foreach { case (Tickets(event, Vector(Ticket(id))), ix) => id must be(ix) }

      ticketingActor ! Buy(1)
      expectMsg(Tickets(event))
    }

    "Sell tickets in batches until they are sold out" in {
      import TicketSeller._

      val firstBatchSize = 10

      def mkTickets = (1 to (10 * firstBatchSize)).map(i=>Ticket(i)).toVector

      val event = "Madlib"
      val ticketingActor = system.actorOf(TicketSeller.props(event))

      ticketingActor ! Add(mkTickets)
      ticketingActor ! Buy(firstBatchSize)
      val bought = (1 to firstBatchSize).map(Ticket).toVector

      expectMsg(Tickets(event, bought))

      val secondBatchSize = 5
      val nrBatches = 18

      val batches = (1 to nrBatches * secondBatchSize)
      batches.foreach(_ => ticketingActor ! Buy(secondBatchSize))

      val tickets = receiveN(nrBatches)

      tickets.zip(batches).foreach {
        case (Tickets(event, bought), ix) =>
          bought.size must equal(secondBatchSize)
          val last = ix * secondBatchSize + firstBatchSize
          val first = ix * secondBatchSize + firstBatchSize - (secondBatchSize - 1)
          bought.map(_.id) must equal((first to last).toVector)
		case _ =>
      }

      ticketingActor ! Buy(1)
      expectMsg(Tickets(event))

      ticketingActor ! Buy(10)
      expectMsg(Tickets(event))
    }
  }
} 
Example 52
Source File: BoxOfficeSpec.scala    From 006877   with MIT License 5 votes vote down vote up
package com.goticks

import akka.actor.{ ActorRef, Props, ActorSystem }

import akka.testkit.{ TestKit, ImplicitSender, DefaultTimeout }

import org.scalatest.{ WordSpecLike, MustMatchers }

class BoxOfficeSpec extends TestKit(ActorSystem("testBoxOffice"))
    with WordSpecLike
    with MustMatchers
    with ImplicitSender
    with DefaultTimeout
    with StopSystemAfterAll {
  "The BoxOffice" must {

    "Create an event and get tickets from the correct Ticket Seller" in {
      import BoxOffice._
      import TicketSeller._

      val boxOffice = system.actorOf(BoxOffice.props)
      val eventName = "RHCP"
      boxOffice ! CreateEvent(eventName, 10)
      expectMsg(EventCreated(Event(eventName, 10)))

      boxOffice ! GetTickets(eventName, 1)
      expectMsg(Tickets(eventName, Vector(Ticket(1))))

      boxOffice ! GetTickets("DavidBowie", 1)
      expectMsg(Tickets("DavidBowie"))
    }

    "Create a child actor when an event is created and sends it a Tickets message" in {
      import BoxOffice._
      import TicketSeller._

      val boxOffice = system.actorOf(Props(
          new BoxOffice  {
            override def createTicketSeller(name: String): ActorRef = testActor
          }
        )
      )

      val tickets = 3
      val eventName = "RHCP"
      val expectedTickets = (1 to tickets).map(Ticket).toVector
      boxOffice ! CreateEvent(eventName, tickets)
      expectMsg(Add(expectedTickets))
      expectMsg(EventCreated(Event(eventName, tickets)))
    }
  }
} 
Example 53
Source File: FilteringActorTest.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.testdriven

import akka.testkit.TestKit
import akka.actor.{ Actor, Props, ActorRef, ActorSystem }
import org.scalatest.{MustMatchers, WordSpecLike }

class FilteringActorTest extends TestKit(ActorSystem("testsystem"))
  with WordSpecLike
  with MustMatchers
  with StopSystemAfterAll {
  "A Filtering Actor" must {

    "filter out particular messages" in {
      import FilteringActor._
      val props = FilteringActor.props(testActor, 5)
      val filter = system.actorOf(props, "filter-1")
      filter ! Event(1)
      filter ! Event(2)
      filter ! Event(1)
      filter ! Event(3)
      filter ! Event(1)
      filter ! Event(4)
      filter ! Event(5)
      filter ! Event(5)
      filter ! Event(6)
      val eventIds = receiveWhile() {
        case Event(id) if id <= 5 => id
      }
      eventIds must be(List(1, 2, 3, 4, 5))
      expectMsg(Event(6))
    }


    "filter out particular messages using expectNoMsg" in {
      import FilteringActor._
      val props = FilteringActor.props(testActor, 5)
      val filter = system.actorOf(props, "filter-2")
      filter ! Event(1)
      filter ! Event(2)
      expectMsg(Event(1))
      expectMsg(Event(2))
      filter ! Event(1)
      expectNoMsg
      filter ! Event(3)
      expectMsg(Event(3))
      filter ! Event(1)
      expectNoMsg
      filter ! Event(4)
      filter ! Event(5)
      filter ! Event(5)
      expectMsg(Event(4))
      expectMsg(Event(5))
      expectNoMsg()
    }

  }
}

object FilteringActor {
  def props(nextActor: ActorRef, bufferSize: Int) =
    Props(new FilteringActor(nextActor, bufferSize))
  case class Event(id: Long)
}

class FilteringActor(nextActor: ActorRef,
                     bufferSize: Int) extends Actor {
  import FilteringActor._
  var lastMessages = Vector[Event]()
  def receive = {
    case msg: Event =>
      if (!lastMessages.contains(msg)) {
        lastMessages = lastMessages :+ msg
        nextActor ! msg
        if (lastMessages.size > bufferSize) {
          // 가장 오래된 것을 버린다
          lastMessages = lastMessages.tail
        }
      }
  }
} 
Example 54
Source File: Greeter02Test.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.testdriven

import akka.testkit.{ TestKit }
import org.scalatest.WordSpecLike
import akka.actor._



class Greeter02Test extends TestKit(ActorSystem("testsystem"))
  with WordSpecLike
  with StopSystemAfterAll {

  "The Greeter" must {
    "say Hello World! when a Greeting(\"World\") is sent to it" in {
      val props = Greeter02.props(Some(testActor))
      val greeter = system.actorOf(props, "greeter02-1")
      greeter ! Greeting("World")
      expectMsg("Hello World!")
    }
    "say something else and see what happens" in {
      val props = Greeter02.props(Some(testActor))
      val greeter = system.actorOf(props, "greeter02-2")
      system.eventStream.subscribe(testActor, classOf[UnhandledMessage])
      greeter ! "World"
      expectMsg(UnhandledMessage("World", system.deadLetters, greeter))
    }
  }
}


object Greeter02 {
  def props(listener: Option[ActorRef] = None) =
    Props(new Greeter02(listener))
}
class Greeter02(listener: Option[ActorRef])
  extends Actor with ActorLogging {
  def receive = {
    case Greeting(who) =>
      val message = "Hello " + who + "!"
      log.info(message)
      listener.foreach(_ ! message)
  }
} 
Example 55
Source File: EchoActorTest.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.testdriven

import akka.testkit.{ TestKit, ImplicitSender }
import akka.actor.{ Props, Actor, ActorSystem }
import org.scalatest.WordSpecLike

import akka.util.Timeout
import scala.concurrent.Await
import scala.util.{ Success, Failure }

import scala.language.postfixOps


class EchoActorTest extends TestKit(ActorSystem("testsystem"))
  with WordSpecLike
  with ImplicitSender
  with StopSystemAfterAll {


  "An EchoActor" must {
    "Reply with the same message it receives" in {

      import akka.pattern.ask
      import scala.concurrent.duration._
      implicit val timeout = Timeout(3 seconds)
      implicit val ec = system.dispatcher
      val echo = system.actorOf(Props[EchoActor], "echo1")
      val future = echo.ask("some message")
      future.onComplete {
        case Failure(_)   => //실패 처리
        case Success(msg) => //성공 처리
      }

      Await.ready(future, timeout.duration)
    }

    "Reply with the same message it receives without ask" in {
      val echo = system.actorOf(Props[EchoActor], "echo2")
      echo ! "some message"
      expectMsg("some message")

    }

  }
}


class EchoActor extends Actor {
  def receive = {
    case msg =>
      sender() ! msg
  }
} 
Example 56
Source File: SendingActorTest.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.testdriven

import scala.util.Random
import akka.testkit.TestKit
import akka.actor.{ Props, ActorRef, Actor, ActorSystem }
import org.scalatest.{WordSpecLike, MustMatchers}

class SendingActorTest extends TestKit(ActorSystem("testsystem"))
  with WordSpecLike
  with MustMatchers
  with StopSystemAfterAll {

  "A Sending Actor" must {
    "send a message to another actor when it has finished processing" in {
      import SendingActor._
      val props = SendingActor.props(testActor) 
      val sendingActor = system.actorOf(props, "sendingActor")
      
      val size = 1000
      val maxInclusive = 100000

      def randomEvents() = (0 until size).map{ _ => 
        Event(Random.nextInt(maxInclusive))
      }.toVector

      val unsorted = randomEvents()
      val sortEvents = SortEvents(unsorted)
      sendingActor ! sortEvents

      expectMsgPF() {
        case SortedEvents(events) =>
          events.size must be(size)
          unsorted.sortBy(_.id) must be(events)
      }
    }
  }
}

object SendingActor {
  def props(receiver: ActorRef) =
    Props(new SendingActor(receiver))
  case class Event(id: Long)  
  case class SortEvents(unsorted: Vector[Event])  
  case class SortedEvents(sorted: Vector[Event])
}

class SendingActor(receiver: ActorRef) extends Actor {
  import SendingActor._
  def receive = {
    case SortEvents(unsorted) =>
      receiver ! SortedEvents(unsorted.sortBy(_.id))
  }
} 
Example 57
Source File: SilentActorNextTest.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.testdriven

import org.scalatest.WordSpecLike
import org.scalatest.MustMatchers
import akka.testkit.{ TestActorRef, TestKit }
import akka.actor._

package silentactor02 {

class SilentActorTest extends TestKit(ActorSystem("testsystem"))
    with WordSpecLike
    with MustMatchers
    with StopSystemAfterAll {

    "A Silent Actor" must {

      "change internal state when it receives a message, single" in {
        import SilentActor._

        val silentActor = TestActorRef[SilentActor]
        silentActor ! SilentMessage("whisper")
        silentActor.underlyingActor.state must (contain("whisper"))
      }

    }
  }


  object SilentActor {
    case class SilentMessage(data: String)
    case class GetState(receiver: ActorRef)
  }

  class SilentActor extends Actor {
    import SilentActor._
    var internalState = Vector[String]()

    def receive = {
      case SilentMessage(data) =>
        internalState = internalState :+ data
    }

    def state = internalState
  }
}

package silentactor03 {

  class SilentActorTest extends TestKit(ActorSystem("testsystem"))
    with WordSpecLike
    with MustMatchers
    with StopSystemAfterAll {

    "A Silent Actor" must {

      "change internal state when it receives a message, multi" in {
        import SilentActor._

        val silentActor = system.actorOf(Props[SilentActor], "s3")
        silentActor ! SilentMessage("whisper1")
        silentActor ! SilentMessage("whisper2")
        silentActor ! GetState(testActor)
        expectMsg(Vector("whisper1", "whisper2"))
      }

    }

  }



  object SilentActor {
    case class SilentMessage(data: String)
    case class GetState(receiver: ActorRef)
  }

  class SilentActor extends Actor {
    import SilentActor._
    var internalState = Vector[String]()

    def receive = {
      case SilentMessage(data) =>
        internalState = internalState :+ data
      case GetState(receiver) => receiver ! internalState
    }
  }

} 
Example 58
Source File: Greeter01Test.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.testdriven
import akka.testkit.{ CallingThreadDispatcher, EventFilter, TestKit }
import akka.actor.{ Props, ActorSystem }
import com.typesafe.config.ConfigFactory
import org.scalatest.WordSpecLike


import Greeter01Test._

class Greeter01Test extends TestKit(testSystem)
  with WordSpecLike
  with StopSystemAfterAll {

  "The Greeter" must {
    "say Hello World! when a Greeting(\"World\") is sent to it" in {
      val dispatcherId = CallingThreadDispatcher.Id
      val props = Props[Greeter].withDispatcher(dispatcherId)
      val greeter = system.actorOf(props)
      EventFilter.info(message = "Hello World!",
        occurrences = 1).intercept {
          greeter ! Greeting("World")
        }
    }
  }
}

object Greeter01Test {
  val testSystem = {
    val config = ConfigFactory.parseString(
      """
         akka.loggers = [akka.testkit.TestEventListener]
      """)
    ActorSystem("testsystem", config)
  }
} 
Example 59
Source File: HashRoutingTest.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.routing

import scala.concurrent.duration._

import akka.actor._
import org.scalatest.{BeforeAndAfterAll, WordSpecLike}
import akka.routing._
import akka.routing.ConsistentHashingRouter._

import akka.testkit.{TestProbe, TestKit}

class HashRoutingTest
  extends TestKit(ActorSystem("PerfRoutingTest"))
  with WordSpecLike with BeforeAndAfterAll {

  override def afterAll() = {
    system.terminate()
  }

  "The HashRouting" must {
    "work using mapping" in {
      val endProbe = TestProbe()

      def hashMapping: ConsistentHashMapping = {
        case msg: GatherMessage => msg.id
      }

      val router = system.actorOf(ConsistentHashingPool(10, virtualNodesFactor = 10, hashMapping = hashMapping).
          props(Props(new SimpleGather(endProbe.ref))), name = "routerMapping")

      router ! GatherMessageNormalImpl("1", Seq("msg1"))
      endProbe.expectNoMsg(100.millis)
      router ! GatherMessageNormalImpl("1", Seq("msg2"))
      endProbe.expectMsg(GatherMessageNormalImpl("1",Seq("msg1","msg2")))

      router ! GatherMessageNormalImpl("10", Seq("msg1"))
      endProbe.expectNoMsg(100.millis)
      router ! GatherMessageNormalImpl("10", Seq("msg2"))
      endProbe.expectMsg(GatherMessageNormalImpl("10",Seq("msg1","msg2")))
      system.stop(router)
    }
    "work using messages" in {
      val endProbe = TestProbe()

      val router = system.actorOf(ConsistentHashingPool(10, virtualNodesFactor = 10).
        props(Props(new SimpleGather(endProbe.ref))), name = "routerMessage")

      router ! GatherMessageWithHash("1", Seq("msg1"))
      endProbe.expectNoMsg(100.millis)
      router ! GatherMessageWithHash("1", Seq("msg2"))
      endProbe.expectMsg(GatherMessageNormalImpl("1",Seq("msg1","msg2")))

      router ! GatherMessageWithHash("10", Seq("msg1"))
      endProbe.expectNoMsg(100.millis)
      router ! GatherMessageWithHash("10", Seq("msg2"))
      endProbe.expectMsg(GatherMessageNormalImpl("10",Seq("msg1","msg2")))
      system.stop(router)
    }
    "work using Envelope" in {
      val endProbe = TestProbe()

      val router = system.actorOf(ConsistentHashingPool(10, virtualNodesFactor = 10).
        props(Props(new SimpleGather(endProbe.ref))), name = "routerMessage")

      router ! ConsistentHashableEnvelope(
        message = GatherMessageNormalImpl("1", Seq("msg1")),
        hashKey = "someHash")

      endProbe.expectNoMsg(100.millis)
      router ! ConsistentHashableEnvelope(
        message = GatherMessageNormalImpl("1", Seq("msg2")),
        hashKey = "someHash")
      endProbe.expectMsg(GatherMessageNormalImpl("1",Seq("msg1","msg2")))

      router ! ConsistentHashableEnvelope(
        message = GatherMessageNormalImpl("10", Seq("msg1")),
        hashKey = "10")
      endProbe.expectNoMsg(100.millis)
      router ! ConsistentHashableEnvelope(
        message = GatherMessageNormalImpl("10", Seq("msg2")),
        hashKey = "10")
      endProbe.expectMsg(GatherMessageNormalImpl("10",Seq("msg1","msg2")))
      system.stop(router)
    }
    "fail without using hash" in {
      val endProbe = TestProbe()

      val router = system.actorOf(ConsistentHashingPool(10, virtualNodesFactor = 10).
        props(Props(new SimpleGather(endProbe.ref))), name = "routerMessage")

      router ! GatherMessageNormalImpl("1", Seq("msg1"))
      endProbe.expectNoMsg(100.millis)
      router ! GatherMessageNormalImpl("1", Seq("msg2"))
      endProbe.expectNoMsg(1000.millis)

      system.stop(router)
    }
  }

} 
Example 60
Source File: LifeCycleHooksTest.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.faulttolerance

import aia.faulttolerance.LifeCycleHooks.{ForceRestart, SampleMessage}
import akka.actor._
import akka.testkit.TestKit
import org.scalatest.{BeforeAndAfterAll, WordSpecLike}

class LifeCycleHooksTest extends TestKit(ActorSystem("LifCycleTest")) with WordSpecLike with BeforeAndAfterAll {

  override def afterAll(): Unit = {
    system.terminate()
  }

  "The Child" must {
    "log lifecycle hooks" in {
      val testActorRef = system.actorOf(
        Props[LifeCycleHooks], "LifeCycleHooks")
      watch(testActorRef)
      testActorRef ! ForceRestart
      testActorRef.tell(SampleMessage, testActor)
      expectMsg(SampleMessage)
      system.stop(testActorRef)
      expectTerminated(testActorRef)

    }
  }
} 
Example 61
Source File: ServiceSpec.scala    From mqtt-mongo   with MIT License 5 votes vote down vote up
package com.izmailoff.mm.service

import akka.actor.ActorSystem
import akka.testkit.{TestProbe, DefaultTimeout, ImplicitSender, TestKit}
import com.izmailoff.mm.config.GlobalAppConfig
import com.sandinh.paho.akka.MqttPubSub.{Subscribe, SubscribeAck, Message}
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
import scala.concurrent.duration._
import scala.collection.JavaConversions._


class ServiceSpec
  extends TestKit(ActorSystem("test-mqtt-mongo-system", GlobalAppConfig.config))
  with DefaultTimeout
  with ImplicitSender
  with WordSpecLike
  with Matchers
  with BeforeAndAfterAll
  with TestMqttMongoServiceImpl
  with TestHelpers {

  override def afterAll {
    shutdown()
  }

  "Subscription between MQTT Broker and Consumer" should {
    "get established when consumer is started" in {
      val mqttBroker = startMqttIntermediary()
      val probe = TestProbe()
      val mqttConsumer = startMqttConsumer(probe.ref)

      probe.expectMsg(Subscribe(testTopic, mqttConsumer))
      probe.forward(mqttBroker, Subscribe(testTopic, probe.ref))
      probe.expectMsg(SubscribeAck(Subscribe(testTopic, probe.ref)))
      probe.forward(mqttConsumer, SubscribeAck(Subscribe(testTopic, mqttConsumer)))
      probe.expectNoMsg()
    }
  }

  "Sending a message to MQTT Broker" should {
    "forward it to MQTT Consumer and get saved in DB in proper JSON format" in {
      val collection = getCollectionName(testTopic).head
      db.getCollection(collection).count() should be(0)
      val mqttBroker = startMqttIntermediary()
      val mqttConsumer = startMqttConsumer(mqttBroker)
      expectNoMsg(1 second)

      mqttBroker ! new Message(testTopic, "test content".getBytes)
      mqttBroker ! new Message(testTopic, """{ "field1" : "str val", "field2" : 123 }""".getBytes)
      expectNoMsg(1 second)

      db.getCollection(collection).count() should be(2)
      val allDocsDb = db.getCollection(collection).find().iterator.toList
      allDocsDb.exists { d =>
        val fields: Map[Any, Any] = d.toMap.toMap
        fields.size == 2 &&
          fields("payload") == "test content"
      } should be(true)
      allDocsDb.exists { d =>
        val fields: Map[Any, Any] = d.toMap.toMap
        fields.size == 3 &&
          fields("field1") == "str val" &&
          fields("field2") == 123
      } should be(true)
    }
  }


} 
Example 62
Source File: SharedSparkSessionSuite.scala    From spark-tensorflow-connector   with Apache License 2.0 5 votes vote down vote up
package org.trustedanalytics.spark.datasources.tensorflow

import java.io.File

import org.apache.commons.io.FileUtils
import org.apache.spark.SharedSparkSession
import org.junit.{After, Before}
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}


trait BaseSuite extends WordSpecLike with Matchers with BeforeAndAfterAll

class SharedSparkSessionSuite extends SharedSparkSession with BaseSuite {
  val TF_SANDBOX_DIR = "tf-sandbox"
  val file = new File(TF_SANDBOX_DIR)

  @Before
  override def beforeAll() = {
    super.setUp()
    FileUtils.deleteQuietly(file)
    file.mkdirs()
  }

  @After
  override def afterAll() = {
    FileUtils.deleteQuietly(file)
    super.tearDown()
  }
} 
Example 63
Source File: LeaseContentionSpec.scala    From akka-management   with Apache License 2.0 5 votes vote down vote up
package akka.coordination.lease.kubernetes

import java.util.concurrent.Executors

import akka.actor.ActorSystem
import akka.coordination.lease.TimeoutSettings
import akka.coordination.lease.kubernetes.internal.KubernetesApiImpl
import akka.coordination.lease.scaladsl.LeaseProvider
import akka.testkit.TestKit
import com.typesafe.config.ConfigFactory
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

import scala.collection.immutable
import scala.concurrent.{ExecutionContext, Future}


class LeaseContentionSpec extends TestKit(ActorSystem("LeaseContentionSpec", ConfigFactory.parseString(
  """
    akka.loglevel = INFO
    akka.coordination.lease.kubernetes {
      api-service-host = localhost
      api-service-port = 8080
      namespace = "lease"
      namespace-path = ""
      secure-api-server = false
    }

  """
))) with WordSpecLike with Matchers with ScalaFutures with BeforeAndAfterAll {

  implicit val patience: PatienceConfig = PatienceConfig(testKitSettings.DefaultTimeout.duration)

  // for cleanup
  val k8sApi = new KubernetesApiImpl(system, KubernetesSettings(system, TimeoutSettings(system.settings.config.getConfig("akka.coordination.lease.kubernetes"))))

  val lease1 = "contended-lease"
  val lease2 = "contended-lease-2"


  override protected def beforeAll(): Unit = {
    k8sApi.removeLease(lease1).futureValue
    k8sApi.removeLease(lease2).futureValue
  }

  override protected def afterAll(): Unit ={
    TestKit.shutdownActorSystem(system)
  }

  "A lease under contention" should {

    "only allow one client to get acquire lease" in {
      val underTest = LeaseProvider(system)
      val nrClients = 30
      implicit val ec = ExecutionContext.fromExecutor(Executors.newFixedThreadPool(nrClients)) // too many = HTTP request queue of pool fills up
      // could make this more contended with a countdown latch so they all start at the same time
      val leases: immutable.Seq[(String, Boolean)] = Future.sequence((0 until nrClients).map(i => {
        val clientName = s"client$i"
        val lease = underTest.getLease(lease1, KubernetesLease.configPath, clientName)
        Future {
          lease.acquire()
        }.flatMap(identity).map(granted => (clientName, granted))
      })).futureValue

      val numberGranted = leases.count { case (_, granted) => granted }
      withClue(s"More than one lease granted $leases") {
        numberGranted shouldEqual 1
      }
    }
  }

} 
Example 64
Source File: HttpContactPointRoutesSpec.scala    From akka-management   with Apache License 2.0 5 votes vote down vote up
package akka.management.cluster.bootstrap.contactpoint

import akka.cluster.{ Cluster, ClusterEvent }
import akka.event.NoLogging
import akka.http.scaladsl.testkit.ScalatestRouteTest
import akka.management.cluster.bootstrap.ClusterBootstrapSettings
import akka.testkit.{ SocketUtil, TestProbe }
import org.scalatest.concurrent.Eventually
import org.scalatest.time.{ Millis, Seconds, Span }
import org.scalatest.{ Matchers, WordSpecLike }

class HttpContactPointRoutesSpec
    extends WordSpecLike
    with Matchers
    with ScalatestRouteTest
    with HttpBootstrapJsonProtocol
    with Eventually {

  implicit override val patienceConfig: PatienceConfig =
    PatienceConfig(timeout = scaled(Span(3, Seconds)), interval = scaled(Span(50, Millis)))

  override def testConfigSource =
    s"""
    akka {
      remote {
        netty.tcp {
          hostname = "127.0.0.1"
          port = ${SocketUtil.temporaryServerAddress("127.0.0.1").getPort}
        }
      }
    }
    """.stripMargin

  "Http Bootstrap routes" should {

    val settings = ClusterBootstrapSettings(system.settings.config, NoLogging)
    val httpBootstrap = new HttpClusterBootstrapRoutes(settings)

    "empty list if node is not part of a cluster" in {
      ClusterBootstrapRequests.bootstrapSeedNodes("") ~> httpBootstrap.routes ~> check {
        responseAs[String] should include(""""seedNodes":[]""")
      }
    }

    "include seed nodes when part of a cluster" in {
      val cluster = Cluster(system)
      cluster.join(cluster.selfAddress)

      val p = TestProbe()
      cluster.subscribe(p.ref, ClusterEvent.InitialStateAsEvents, classOf[ClusterEvent.MemberUp])
      val up = p.expectMsgType[ClusterEvent.MemberUp]
      up.member should ===(cluster.selfMember)

      eventually {
        ClusterBootstrapRequests.bootstrapSeedNodes("") ~> httpBootstrap.routes ~> check {
          val response = responseAs[HttpBootstrapJsonProtocol.SeedNodes]
          response.seedNodes should !==(Set.empty)
          response.seedNodes.map(_.node) should contain(cluster.selfAddress)
        }
      }
    }
  }

} 
Example 65
Source File: ConsulDiscoverySpec.scala    From akka-management   with Apache License 2.0 5 votes vote down vote up
package akka.cluster.bootstrap.discovery

import java.net.InetAddress

import akka.actor.ActorSystem
import akka.discovery.ServiceDiscovery.ResolvedTarget
import akka.discovery.consul.ConsulServiceDiscovery
import akka.testkit.TestKitBase
import com.google.common.net.HostAndPort
import com.orbitz.consul.Consul
import com.orbitz.consul.model.catalog.ImmutableCatalogRegistration
import com.orbitz.consul.model.health.ImmutableService
import com.pszymczyk.consul.{ ConsulProcess, ConsulStarterBuilder }
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.time.{ Millis, Seconds, Span }
import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpecLike }

import scala.concurrent.duration._

class ConsulDiscoverySpec extends WordSpecLike with Matchers with BeforeAndAfterAll with TestKitBase with ScalaFutures {

  private val consul: ConsulProcess = ConsulStarterBuilder.consulStarter().withHttpPort(8500).build().start()

  "Consul Discovery" should {
    "work for defaults" in {
      val consulAgent =
        Consul.builder().withHostAndPort(HostAndPort.fromParts(consul.getAddress, consul.getHttpPort)).build()
      consulAgent
        .catalogClient()
        .register(
          ImmutableCatalogRegistration
            .builder()
            .service(
              ImmutableService
                .builder()
                .addTags(s"system:${system.name}", "akka-management-port:1234")
                .address("127.0.0.1")
                .id("test")
                .service("test")
                .port(1235)
                .build()
            )
            .node("testNode")
            .address("localhost")
            .build()
        )

      val lookupService = new ConsulServiceDiscovery(system)
      val resolved = lookupService.lookup("test", 10.seconds).futureValue
      resolved.addresses should contain(
        ResolvedTarget(
          host = "127.0.0.1",
          port = Some(1234),
          address = Some(InetAddress.getByName("127.0.0.1"))
        )
      )
    }
  }

  override def afterAll(): Unit = {
    super.afterAll()
    consul.close()
  }

  override implicit lazy val system: ActorSystem = ActorSystem("test")

  implicit override val patienceConfig: PatienceConfig =
    PatienceConfig(timeout = scaled(Span(30, Seconds)), interval = scaled(Span(50, Millis)))

} 
Example 66
Source File: ClusterMembershipCheckSpec.scala    From akka-management   with Apache License 2.0 5 votes vote down vote up
package akka.management.cluster.scaladsl

import akka.actor.ActorSystem
import akka.cluster.MemberStatus
import akka.testkit.TestKit
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{ Matchers, WordSpecLike }

class ClusterMembershipCheckSpec
    extends TestKit(ActorSystem("ClusterHealthCheck"))
    with WordSpecLike
    with Matchers
    with ScalaFutures {

  "Cluster Health" should {
    "be unhealthy if current state not one of healthy states" in {
      val chc = new ClusterMembershipCheck(
        system,
        () => MemberStatus.joining,
        new ClusterMembershipCheckSettings(Set(MemberStatus.Up)))

      chc().futureValue shouldEqual false
    }
    "be unhealthy if current state is one of healthy states" in {
      val chc =
        new ClusterMembershipCheck(
          system,
          () => MemberStatus.Up,
          new ClusterMembershipCheckSettings(Set(MemberStatus.Up)))

      chc().futureValue shouldEqual true
    }
  }
} 
Example 67
Source File: STMultiNodeSpec.scala    From akka-persistence-couchbase   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.javadsl.persistence.multinode

import akka.remote.testkit.MultiNodeSpecCallbacks
import org.scalatest.BeforeAndAfterAll
import org.scalatest.Matchers
import org.scalatest.WordSpecLike


trait STMultiNodeSpec extends MultiNodeSpecCallbacks
  with WordSpecLike with Matchers with BeforeAndAfterAll {

  override def beforeAll(): Unit = {
    super.beforeAll()
    multiNodeSpecBeforeAll()
  }

  override def afterAll(): Unit = {
    multiNodeSpecAfterAll()
    super.afterAll()
  }
} 
Example 68
Source File: ActorSystemSpec.scala    From akka-persistence-couchbase   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.persistence

import akka.actor.ActorSystem
import akka.actor.setup.ActorSystemSetup
import akka.event.{ Logging, LoggingAdapter }
import akka.testkit.{ ImplicitSender, TestKit }
import com.typesafe.config.{ Config, ConfigFactory }
import org.scalactic.{ CanEqual, TypeCheckedTripleEquals }
import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpecLike }

object ActorSystemSpec {
  def getCallerName(clazz: Class[_]): String = {
    val s = (Thread.currentThread.getStackTrace map (_.getClassName) drop 1)
      .dropWhile(_ matches "(java.lang.Thread|.*ActorSystemSpec.?$)")
    val reduced = s.lastIndexWhere(_ == clazz.getName) match {
      case -1 ⇒ s
      case z ⇒ s drop (z + 1)
    }
    reduced.head.replaceFirst(""".*\.""", "").replaceAll("[^a-zA-Z_0-9]", "_")
  }

}

abstract class ActorSystemSpec(system: ActorSystem) extends TestKit(system)
  with WordSpecLike with Matchers with BeforeAndAfterAll with TypeCheckedTripleEquals
  with ImplicitSender {

  def this(testName: String, config: Config) =
    this(ActorSystem(testName, config))

  def this(config: Config) = this(ActorSystemSpec.getCallerName(getClass), config)

  def this(setup: ActorSystemSetup) = this(ActorSystem(ActorSystemSpec.getCallerName(getClass), setup))

  def this() = this(ConfigFactory.empty())

  override protected def afterAll(): Unit = {
    shutdown()
    super.afterAll()
  }

  val log: LoggingAdapter = Logging(system, this.getClass)

  // for ScalaTest === compare of Class objects
  implicit def classEqualityConstraint[A, B]: CanEqual[Class[A], Class[B]] =
    new CanEqual[Class[A], Class[B]] {
      def areEqual(a: Class[A], b: Class[B]) = a == b
    }

} 
Example 69
Source File: STMultiNodeSpec.scala    From akka-persistence-couchbase   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.scaladsl.persistence.multinode

import akka.remote.testkit.MultiNodeSpecCallbacks
import org.scalatest.BeforeAndAfterAll
import org.scalatest.Matchers
import org.scalatest.WordSpecLike


trait STMultiNodeSpec extends MultiNodeSpecCallbacks
  with WordSpecLike with Matchers with BeforeAndAfterAll {

  override def beforeAll(): Unit = {
    super.beforeAll()
    multiNodeSpecBeforeAll()
  }

  override def afterAll(): Unit = {
    multiNodeSpecAfterAll()
    super.afterAll()
  }
} 
Example 70
Source File: CouchbaseJournalIntegrationSpec.scala    From akka-persistence-couchbase   with Apache License 2.0 5 votes vote down vote up
package akka.persistence.couchbase

import akka.actor.ActorSystem
import akka.testkit.{ImplicitSender, TestKit, WithLogCapturing}
import com.typesafe.config.ConfigFactory
import org.scalatest.{Matchers, WordSpecLike}

import scala.concurrent.duration._

class CouchbaseJournalIntegrationSpec
    extends TestKit(
      ActorSystem(
        "CouchbaseJournalIntegrationSpec",
        ConfigFactory.parseString("""
        akka.loglevel = debug
        akka.loggers = ["akka.testkit.SilenceAllTestEventListener"]
      """).withFallback(ConfigFactory.load())
      )
    )
    with ImplicitSender
    with WordSpecLike
    with Matchers
    with CouchbaseBucketSetup
    with WithLogCapturing {
  "The Couchbase Journal" must {
    "always replay to the latest written event" in {
      // even with outstanding writes - covers #140, and also that replay of a longer journal works
      val ref1 = system.actorOf(TestActor.props("latest-written"))

      ref1 ! TestActor.PersistAllAsync((0 to 500).map(_.toString))
      expectMsg("PersistAllAsync-triggered") // large write to be in progress when replay happens _ =>

      watch(ref1)
      ref1 ! TestActor.Stop
      expectTerminated(ref1)

      // if write is still happening, recovery finding highest seqnr should still work
      val ref2 = system.actorOf(TestActor.props("latest-written"))
      ref2 ! TestActor.GetLastRecoveredEvent
      expectMsg(10.seconds, "500")
    }
  }
} 
Example 71
Source File: AbstractCouchbaseSpec.scala    From akka-persistence-couchbase   with Apache License 2.0 5 votes vote down vote up
package akka.persistence.couchbase.scaladsl

import akka.actor.{ActorRef, ActorSystem}
import akka.persistence.couchbase.{CouchbaseBucketSetup, TestActor}
import akka.persistence.query.PersistenceQuery
import akka.stream.{ActorMaterializer, Materializer}
import akka.testkit.{TestKit, TestProbe, WithLogCapturing}
import com.typesafe.config.{Config, ConfigFactory}
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

import scala.concurrent.duration._

abstract class AbstractCouchbaseSpec(testName: String, config: Config)
    extends TestKit(
      ActorSystem(testName, config.withFallback(ConfigFactory.load()))
    )
    with WordSpecLike
    with BeforeAndAfterAll
    with Matchers
    with ScalaFutures
    with CouchbaseBucketSetup
    with WithLogCapturing {
  def this(testName: String) =
    this(
      testName,
      ConfigFactory.parseString("""
            couchbase-journal.read {
              page-size = 10
            }
            akka.loggers = ["akka.testkit.SilenceAllTestEventListener"]
            akka.loglevel=debug
          """)
    )

  var idCounter = 0
  def nextPersistenceId(): String = {
    idCounter += 1
    val id = Integer.toString(idCounter, 24)
    id.toString
  }

  // provides a unique persistence-id per test case and some initial persisted events
  protected trait Setup {
    lazy val probe = TestProbe()
    implicit def sender: ActorRef = probe.ref
    // note must be a def or lazy val or else it doesn't work (init order)
    def initialPersistedEvents: Int = 0
    def startPersistentActor(initialEvents: Int): (String, ActorRef) = {
      val pid = nextPersistenceId()
      system.log.debug("Starting actor with pid {}, and writing {} initial events", pid, initialPersistedEvents)
      val persistentActor = system.actorOf(TestActor.props(pid))
      if (initialEvents > 0) {
        for (i <- 1 to initialEvents) {
          persistentActor ! s"$pid-$i"
          probe.expectMsg(s"$pid-$i-done")
        }
      }
      (pid, persistentActor)
    }
    val (pid, persistentActor) = startPersistentActor(initialPersistedEvents)

    // no guarantee we can immediately read our own writes
    def readingOurOwnWrites[A](f: => A): A =
      awaitAssert(f, readOurOwnWritesTimeout, interval = 250.millis) // no need to bombard the db with retries
  }

  protected val noMsgTimeout = 100.millis
  protected val readOurOwnWritesTimeout = 10.seconds
  override implicit val patienceConfig: PatienceConfig = PatienceConfig(readOurOwnWritesTimeout)
  implicit val materializer: Materializer = ActorMaterializer()

  lazy // #read-journal-access
  val queries: CouchbaseReadJournal =
    PersistenceQuery(system).readJournalFor[CouchbaseReadJournal](CouchbaseReadJournal.Identifier)
  // #read-journal-access

  protected override def afterAll(): Unit = {
    super.afterAll()
    shutdown(system)
  }
} 
Example 72
Source File: CouchbaseSnapshotSpec.scala    From akka-persistence-couchbase   with Apache License 2.0 5 votes vote down vote up
package akka.persistence.couchbase
import akka.actor.{ActorSystem, PoisonPill}
import akka.persistence.couchbase.TestActor.{GetLastRecoveredEvent, SaveSnapshot}
import akka.stream.ActorMaterializer
import akka.testkit.{TestKit, TestProbe, WithLogCapturing}
import com.typesafe.config.ConfigFactory
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Matchers, WordSpecLike}

import scala.concurrent.duration._

class CouchbaseSnapshotSpec
    extends TestKit(
      ActorSystem(
        "CouchbaseSnapshotSpec",
        ConfigFactory.parseString("""
            |akka.loggers = ["akka.testkit.SilenceAllTestEventListener"]
          """.stripMargin).withFallback(ConfigFactory.load())
      )
    )
    with WordSpecLike
    with BeforeAndAfterAll
    with Matchers
    with CouchbaseBucketSetup
    with BeforeAndAfterEach
    with WithLogCapturing {
  protected override def afterAll(): Unit = {
    super.afterAll()
    shutdown(system)
  }

  val waitTime = 100.millis
  implicit val materializer = ActorMaterializer()

  "entity" should {
    "recover" in {
      val senderProbe = TestProbe()
      implicit val sender = senderProbe.ref

      {
        val pa1 = system.actorOf(TestActor.props("p1"))
        pa1 ! "p1-evt-1"
        senderProbe.expectMsg("p1-evt-1-done")

        senderProbe.watch(pa1)
        pa1 ! PoisonPill
        senderProbe.expectTerminated(pa1)
      }
      {
        val pa1 = system.actorOf(TestActor.props("p1"))

        pa1 ! GetLastRecoveredEvent
        senderProbe.expectMsg("p1-evt-1")
      }
    }
    "recover after snapshot" in {
      val senderProbe = TestProbe()
      implicit val sender = senderProbe.ref

      {
        val pa1 = system.actorOf(TestActor.props("p2"))
        pa1 ! "p2-evt-1"
        senderProbe.expectMsg("p2-evt-1-done")

        pa1 ! SaveSnapshot
        senderProbe.expectMsgType[Long]

        senderProbe.watch(pa1)
        pa1 ! PoisonPill
        senderProbe.expectTerminated(pa1)
      }
      {
        val pa1 = system.actorOf(TestActor.props("p2"))

        pa1 ! GetLastRecoveredEvent
        senderProbe.expectMsg("p2-evt-1")
      }
    }
  }
} 
Example 73
Source File: AkkaPersistenceEventLogSpec.scala    From akka-stream-eventsourcing   with Apache License 2.0 5 votes vote down vote up
package com.github.krasserm.ases.log

import akka.actor.ActorSystem
import akka.stream.scaladsl.{Sink, Source}
import akka.testkit.TestKit
import com.github.krasserm.ases._
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{Matchers, WordSpecLike}

import scala.collection.immutable.Seq

class AkkaPersistenceEventLogSpec extends TestKit(ActorSystem("test")) with WordSpecLike with Matchers with ScalaFutures with StreamSpec {
  val akkaPersistenceEventLog: AkkaPersistenceEventLog = new AkkaPersistenceEventLog(journalId = "akka.persistence.journal.inmem")

  "An Akka Persistence event log" must {
    "provide a sink for writing events and a source for delivering replayed events" in {
      val persistenceId = "1"
      val events = Seq("a", "b", "c").map(Emitted(_, emitterId))
      val expected = durables(events, offset = 1).map(Delivered(_)) :+ Recovered

      Source(events).runWith(akkaPersistenceEventLog.sink(persistenceId)).futureValue
      akkaPersistenceEventLog.source[String](persistenceId).runWith(Sink.seq).futureValue should be(expected)
    }
    "provide a flow with an input port for writing events and and output port for delivering replayed and live events" in {
      val persistenceId = "2"
      val events1 = Seq("a", "b", "c").map(Emitted(_, emitterId))
      val events2 = Seq("d", "e", "f").map(Emitted(_, emitterId))
      val expected = (durables(events1, offset = 1).map(Delivered(_)) :+ Recovered) ++ durables(events2, offset = 4).map(Delivered(_))

      Source(events1).runWith(akkaPersistenceEventLog.sink(persistenceId)).futureValue
      Source(events2).via(akkaPersistenceEventLog.flow(persistenceId)).runWith(Sink.seq).futureValue should be(expected)
    }
    "provide a source that only delivers events of compatible types" in {
      val persistenceId = "3"
      val events = Seq("a", "b", 1, 2).map(Emitted(_, emitterId))
      val expected = durables(events, offset = 1).drop(2).map(Delivered(_)) :+ Recovered

      Source(events).runWith(akkaPersistenceEventLog.sink(persistenceId)).futureValue
      akkaPersistenceEventLog.source[Int](persistenceId).runWith(Sink.seq).futureValue should be(expected)
    }
  }
} 
Example 74
Source File: KafkaEventLogSpec.scala    From akka-stream-eventsourcing   with Apache License 2.0 5 votes vote down vote up
package com.github.krasserm.ases.log

import akka.actor.ActorSystem
import akka.stream.scaladsl.{Sink, Source}
import akka.testkit.TestKit
import com.github.krasserm.ases._
import org.apache.kafka.common.TopicPartition
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.time.{Millis, Seconds, Span}
import org.scalatest.{Matchers, WordSpecLike}

import scala.collection.immutable.Seq

class KafkaEventLogSpec extends TestKit(ActorSystem("test")) with WordSpecLike with Matchers with ScalaFutures with StreamSpec with KafkaSpec {
  implicit val pc = PatienceConfig(timeout = Span(5, Seconds), interval = Span(10, Millis))

  val kafkaEventLog: KafkaEventLog = new KafkaEventLog(host, port)

  "A Kafka event log" must {
    "provide a sink for writing events and a source for delivering replayed events" in {
      val topicPartition = new TopicPartition("p-1", 0)
      val events = Seq("a", "b", "c").map(Emitted(_, emitterId))
      val expected = durables(events).map(Delivered(_)) :+ Recovered

      Source(events).runWith(kafkaEventLog.sink(topicPartition)).futureValue
      kafkaEventLog.source[String](topicPartition).take(4).runWith(Sink.seq).futureValue should be(expected)
    }
    "provide a flow with an input port for writing events and and output port for delivering replayed and live events" in {
      val topicPartition = new TopicPartition("p-2", 0)
      val events1 = Seq("a", "b", "c").map(Emitted(_, emitterId))
      val events2 = Seq("d", "e", "f").map(Emitted(_, emitterId))
      val expected = (durables(events1).map(Delivered(_)) :+ Recovered) ++ durables(events2, offset = 3).map(Delivered(_))

      Source(events1).runWith(kafkaEventLog.sink(topicPartition)).futureValue
      Source(events2).via(kafkaEventLog.flow(topicPartition)).take(7).runWith(Sink.seq).futureValue should be(expected)
    }
    "provide a source that only delivers events of compatible types" in {
      val topicPartition = new TopicPartition("p-3", 0)
      val events = Seq("a", "b", 1, 2).map(Emitted(_, emitterId))
      val expected = durables(events).drop(2).map(Delivered(_)) :+ Recovered

      Source(events).runWith(kafkaEventLog.sink(topicPartition)).futureValue
      kafkaEventLog.source[Int](topicPartition).take(3).runWith(Sink.seq).futureValue should be(expected)
    }
  }
} 
Example 75
Source File: RequestRoutingSpec.scala    From akka-stream-eventsourcing   with Apache License 2.0 5 votes vote down vote up
package com.github.krasserm.ases

import akka.NotUsed
import akka.actor.ActorSystem
import akka.stream.scaladsl.{Flow, Sink, Source}
import akka.testkit.TestKit
import com.github.krasserm.ases.log.AkkaPersistenceEventLog
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{Matchers, WordSpecLike}
import scala.collection.immutable.Seq

object RequestRoutingSpec {
  import EventSourcing._

  sealed trait Request {
    def aggregateId: String
  }
  case class GetState(aggregateId: String) extends Request              // Query
  case class Increment(aggregateId: String, delta: Int) extends Request // Command
  case class Incremented(aggregateId: String, delta: Int)               // Event
  case class Response(aggregateId: String, state: Int)

  val requestHandler: RequestHandler[Int, Incremented, Request, Response] = {
    case (s, GetState(aggregateId))     => respond(Response(aggregateId, s))
    case (_, Increment(aggregateId, d)) => emit(Seq(Incremented(aggregateId, d)), Response(aggregateId, _))
  }

  val eventHandler: EventHandler[Int, Incremented] =
    (s, e) => s + e.delta
}

class RequestRoutingSpec extends TestKit(ActorSystem("test")) with WordSpecLike with Matchers with ScalaFutures with StreamSpec {
  import RequestRoutingSpec._

  val akkaPersistenceEventLog: AkkaPersistenceEventLog =
    new log.AkkaPersistenceEventLog(journalId = "akka.persistence.journal.inmem")

  def processor(aggregateId: String): Flow[Request, Response, NotUsed] =
    EventSourcing(aggregateId, 0, requestHandler, eventHandler).join(akkaPersistenceEventLog.flow(aggregateId))

  def router: Flow[Request, Response, NotUsed] =
    Router(_.aggregateId, processor)

  "A request router" when {
    "configured to route based on aggregate id" must {
      "dynamically create a request processor for each aggregate id" in {
        val aggregateId1 = "a1"
        val aggregateId2 = "a2"

        val (pub, sub) = probes(router)

        pub.sendNext(Increment(aggregateId1, 3))
        sub.requestNext(Response(aggregateId1, 3))

        pub.sendNext(Increment(aggregateId2, 1))
        sub.requestNext(Response(aggregateId2, 1))

        pub.sendNext(Increment(aggregateId1, 2))
        sub.requestNext(Response(aggregateId1, 5))

        pub.sendNext(Increment(aggregateId2, -4))
        sub.requestNext(Response(aggregateId2, -3))
      }
      "handle single command using Source.single" in {
        val request = Increment("a3", 3)
        val expected = Response("a3", 3)
        Source.single(request)
          .via(router)
          .runWith(Sink.head)
          .futureValue should be(expected)
      }
      "handle single command using Source.apply(Seq)" in {
        val request = Increment("a4", 3)
        val expected = Response("a4", 3)
        Source(Seq(request))
          .via(router)
          .runWith(Sink.head)
          .futureValue should be(expected)
      }
      "handle multiple commands" in {
        Source(Seq(Increment("a5", 1), Increment("a5", 2), Increment("a5", 3)))
          .via(router)
          .runWith(Sink.seq)
          .futureValue should be(Seq(Response("a5", 1), Response("a5", 3), Response("a5", 6)))
      }
    }
  }
} 
Example 76
Source File: EventCollaborationSpec.scala    From akka-stream-eventsourcing   with Apache License 2.0 5 votes vote down vote up
package com.github.krasserm.ases

import akka.NotUsed
import akka.actor.ActorSystem
import akka.stream.scaladsl.{Flow, Sink}
import akka.testkit.TestKit
import com.github.krasserm.ases.log.{KafkaEventLog, KafkaSpec}
import org.apache.kafka.common.TopicPartition
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.time.{Millis, Seconds, Span}
import org.scalatest.{Matchers, WordSpecLike}

import scala.collection.immutable.Seq

class EventCollaborationSpec extends TestKit(ActorSystem("test")) with WordSpecLike with Matchers with ScalaFutures with StreamSpec with KafkaSpec {
  import EventSourcingSpec._

  implicit val pc = PatienceConfig(timeout = Span(5, Seconds), interval = Span(10, Millis))

  val emitterId1 = "processor1"
  val emitterId2 = "processor2"

  val kafkaEventLog: KafkaEventLog =
    new log.KafkaEventLog(host, port)

  def processor(emitterId: String, topicPartition: TopicPartition): Flow[Request, Response, NotUsed] =
    EventSourcing(emitterId, 0, requestHandler, eventHandler).join(kafkaEventLog.flow(topicPartition))

  "A group of EventSourcing stages" when {
    "joined with a shared event log" can {
      "collaborate via publish-subscribe" in {
        val topicPartition = new TopicPartition("p-1", 0)    // shared topic partition
        val (pub1, sub1) = probes(processor(emitterId1, topicPartition)) // processor 1
        val (pub2, sub2) = probes(processor(emitterId2, topicPartition)) // processor 2

        pub1.sendNext(Increment(3))
        // Both processors receive event but
        // only processor 1 creates response
        sub1.requestNext(Response(3))

        pub2.sendNext(Increment(-4))
        // Both processors receive event but
        // only processor 2 creates response
        sub2.requestNext(Response(-1))

        // consume and verify events emitted by both processors
        kafkaEventLog.source[Incremented](topicPartition).via(log.replayed).map {
          case Durable(event, eid, _, sequenceNr) => (event, eid, sequenceNr)
        }.runWith(Sink.seq).futureValue should be(Seq(
          (Incremented(3), emitterId1, 0L),
          (Incremented(-4), emitterId2, 1L)
        ))
      }
    }
  }
} 
Example 77
Source File: ReservationViewEndpointSpec.scala    From ddd-leaven-akka-v2   with MIT License 5 votes vote down vote up
package ecommerce.sales.app

import java.sql.Date

import akka.http.scaladsl.model.StatusCodes.NotFound
import akka.http.scaladsl.server._
import akka.http.scaladsl.testkit.ScalatestRouteTest
import com.typesafe.config.ConfigFactory
import ecommerce.sales.view.{ReservationDao, ReservationView, ViewTestSupport}
import ecommerce.sales.{ReservationStatus, SalesSerializationHintsProvider}
import org.joda.time.DateTime._
import org.json4s.Formats
import org.scalatest.{BeforeAndAfter, Matchers, WordSpecLike}
import pl.newicom.dddd.serialization.JsonSerHints._
import pl.newicom.dddd.utils.UUIDSupport.uuid7

class ReservationViewEndpointSpec extends WordSpecLike with Matchers with ScalatestRouteTest with ViewTestSupport with BeforeAndAfter {

  override lazy val config = ConfigFactory.load
  implicit val formats: Formats = new SalesSerializationHintsProvider().hints()

  lazy val dao = new ReservationDao
  val reservationId = uuid7

  before {
    viewStore.run {
      dao.createOrUpdate(ReservationView(reservationId, "client-1", ReservationStatus.Opened, new Date(now.getMillis)))
    }.futureValue
  }

  after {
    viewStore.run {
      dao.remove(reservationId)
    }.futureValue
  }

  "Reservation view endpoint" should {

    def response = responseAs[String]

    val route: Route = new ReservationViewEndpoint().route(viewStore)

    "respond to /reservation/all with all reservations" in {
      Get("/reservation/all") ~> route ~> check {
        response should include (reservationId)
      }
    }

    "respond to /reservation/{reservationId} with requested reservation" in {
      Get(s"/reservation/$reservationId") ~> route ~> check {
        response should include (reservationId)
      }
    }

    "respond to /reservation/{reservationId} with NotFound if reservation unknown" in {
      Get(s"/reservation/invalid") ~> route ~> check {
        status shouldBe NotFound
      }
    }

  }

  def ensureSchemaDropped = dao.ensureSchemaDropped
  def ensureSchemaCreated = dao.ensureSchemaCreated

} 
Example 78
Source File: ShipmentViewEndpointSpec.scala    From ddd-leaven-akka-v2   with MIT License 5 votes vote down vote up
package ecommerce.shipping.app

import akka.http.scaladsl.model.StatusCodes.NotFound
import akka.http.scaladsl.server._
import akka.http.scaladsl.testkit.{RouteTestTimeout, ScalatestRouteTest}
import akka.testkit.TestDuration
import com.typesafe.config.ConfigFactory
import ecommerce.sales.view.ViewTestSupport
import ecommerce.shipping.view.{ShipmentDao, ShipmentView}
import ecommerce.shipping.{ShippingSerializationHintsProvider, ShippingStatus}
import org.json4s.Formats
import org.scalatest.{BeforeAndAfter, Matchers, WordSpecLike}
import pl.newicom.dddd.serialization.JsonSerHints._
import pl.newicom.dddd.utils.UUIDSupport.uuid7

import scala.concurrent.duration.DurationInt

class ShipmentViewEndpointSpec extends WordSpecLike with Matchers with ScalatestRouteTest
  with ViewTestSupport with BeforeAndAfter {

  override lazy val config = ConfigFactory.load
  implicit val formats: Formats = new ShippingSerializationHintsProvider().hints()

  implicit val routeTimeout = RouteTestTimeout(3.seconds dilated)

  lazy val dao = new ShipmentDao
  val shipmentId = uuid7

  before {
    viewStore.run {
      dao.createOrUpdate(ShipmentView(shipmentId, "order-1", ShippingStatus.Delivered))
    }.futureValue
  }

  after {
    viewStore.run {
      dao.remove(shipmentId)
    }.futureValue
  }

  "Shipment view endpoint" should {

    def response = responseAs[String]

    val route: Route = new ShipmentViewEndpoint().route(viewStore)

    "respond to /shipment/all with all shipments" in {
      Get("/shipment/all") ~> route ~> check {
        response should include (shipmentId)
      }
    }

    "respond to /shipment/{shipmentId} with requested shipment" in {
      Get(s"/shipment/$shipmentId") ~> route ~> check {
        response should include (shipmentId)
      }
    }

    "respond to /shipment/{shipmentId} with NotFound if shipment unknown" in {
      Get(s"/shipment/invalid") ~> route ~> check {
        status shouldBe NotFound
      }
    }

  }

  def ensureSchemaDropped = dao.ensureSchemaDropped
  def ensureSchemaCreated = dao.ensureSchemaCreated

} 
Example 79
Source File: TestDriver.scala    From ddd-leaven-akka-v2   with MIT License 5 votes vote down vote up
package ecommerce.tests.e2e

import io.restassured.RestAssured._
import io.restassured.builder.RequestSpecBuilder
import io.restassured.config.HttpClientConfig
import io.restassured.config.HttpClientConfig.HttpClientFactory
import io.restassured.filter.log.LogDetail
import io.restassured.module.scala.RestAssuredSupport.AddThenToResponse
import io.restassured.response.ValidatableResponse
import io.restassured.specification.RequestSpecification
import org.apache.http.client.HttpClient
import org.apache.http.impl.client.SystemDefaultHttpClient
import org.apache.http.params.HttpConnectionParams
import org.json4s.Formats
import org.json4s.native.Serialization.write
import org.scalatest.{Matchers, WordSpecLike}
import pl.newicom.dddd.aggregate.Command

trait TestDriver extends WordSpecLike with Matchers {

  val clientConfig: HttpClientConfig = config.getHttpClientConfig
    .httpClientFactory(new HttpClientFactory() {
      override def createHttpClient: HttpClient = {
        val rv         = new SystemDefaultHttpClient
        val httpParams = rv.getParams
        HttpConnectionParams.setConnectionTimeout(httpParams, 2 * 1000) //Wait 5s for a connection
        HttpConnectionParams.setSoTimeout(httpParams, 60 * 1000)        // Default session is 60s
        rv
      }
    })
    .reuseHttpClientInstance()

  def using[R](endpoint: EndpointConfig)(testBody: RequestSpecBuilder => R): R = {
    testBody(
      new RequestSpecBuilder()
        .setConfig(config.httpClient(clientConfig))
        .setBaseUri(endpoint.toUrl)
        .setContentType("application/json")
        .log(LogDetail.ALL)
    )
  }

  def POST(implicit builder: RequestSpecBuilder): POSTOps =
    new POSTOps(builder.build())

  def GET(implicit builder: RequestSpecBuilder): GETOps =
    new GETOps(builder.build())

  class POSTOps(reqSpec: RequestSpecification) {

    def command(c: Command)(implicit formats: Formats): ValidatableResponse =
      given(reqSpec)
        .body(write(c))
        .header("Command-Type", c.getClass.getName)
      .post()
      .Then()
        .log().all()
        .statusCode(200)
  }

  class GETOps(reqSpec: RequestSpecification) {

    def /(subPath: String): ValidatableResponse =
      given(reqSpec)
        .get(subPath)
        .Then()
        .log().all()
        .statusCode(200)
  }

} 
Example 80
Source File: NonBlankStringSpec.scala    From play-ui   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.play.binders

import org.scalatest.{Matchers, WordSpecLike}
import play.api.libs.json.{JsError, JsString, JsSuccess, Json}

class NonBlankStringSpec extends WordSpecLike with Matchers {

  "Creating a NonBlankString" should {
    "throw an exception for a blank string" in {
      an[IllegalArgumentException] should be thrownBy NonBlankString("")
    }

    "give an error for a null string" in {
      an[IllegalArgumentException] should be thrownBy NonBlankString(null)
    }

    "give a success for a non-blank string" in {
      NonBlankString("x")
    }
  }

  "Reading a NonBlankString" should {
    "give an error for a blank string" in {
      validating("") shouldBe a[JsError]
    }

    "give an error for a null string" in {
      validating(null) shouldBe a[JsError]
    }

    "give a success for a non-blank string" in {
      validating("x") should be(JsSuccess(NonBlankString("x")))
    }

    def validating(s: String) = JsString(s).validate[NonBlankString]
  }

  "Writing a NonBlankString" should {
    "just include the value" in {
      Json.toJson(NonBlankString("x")) should be(JsString("x"))
    }
  }

  "Binding from a query string" should {
    "give an error for a blank string" in {
      binding("") shouldBe Some(Left("String was blank"))
    }

    "give an error for a null string" in {
      binding(null) shouldBe Some(Left("String was blank"))
    }

    "give a success for a non-blank string" in {
      binding("x") shouldBe Some(Right(NonBlankString("x")))
    }
    def binding(s: String) = NonBlankString.stringToNonBlankString.bind("v", Map("v" -> Seq(s)))
  }

  "Unbinding to a query string" should {
    "extract the value" in {
      unbinding(NonBlankString("something")) shouldBe "something"
    }
    def unbinding(n: NonBlankString) = NonBlankString.stringToNonBlankString.unbind("v", n)
  }

} 
Example 81
Source File: ContinueUrlSpec.scala    From play-ui   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.play.binders

import org.scalatest.{EitherValues, Matchers, OptionValues, WordSpecLike}
import uk.gov.hmrc.play.binders.ContinueUrl._

class ContinueUrlSpec extends WordSpecLike with Matchers with EitherValues with OptionValues {

  "isAbsoluteUrl" should {
    "return true for an absolute URL" in {
      ContinueUrl("http://www.example.com").isAbsoluteUrl shouldBe true
    }

    "return false for a relative URL" in {
      ContinueUrl("/service/page").isAbsoluteUrl shouldBe false
    }
  }

  "isRelativeUrl" should {
    "return false for an absolute URL" in {
      ContinueUrl("http://www.example.com").isRelativeUrl shouldBe false
    }

    "return true for a relative URL" in {
      ContinueUrl("/service/page").isRelativeUrl shouldBe true
    }
  }

  "not work for protocol-relative urls" in {
    an[IllegalArgumentException] should be thrownBy ContinueUrl("//some/value?with=query")
    an[IllegalArgumentException] should be thrownBy ContinueUrl("///some/value?with=query")
    an[IllegalArgumentException] should be thrownBy ContinueUrl("////some/value?with=query")
  }

  "not work for urls with @" in {
    an[IllegalArgumentException] should be thrownBy ContinueUrl("/some/value?with=query@meh")
  }

  "not work for urls with /\\" in {
    an[IllegalArgumentException] should be thrownBy ContinueUrl("/\\www.example.com")
  }

  "not work for path-relative urls" in {
    an[IllegalArgumentException] should be thrownBy ContinueUrl("some/value?with=query")
  }

  "not work for non-urls" in {
    an[IllegalArgumentException] should be thrownBy ContinueUrl("someasdfasdfa")
  }

  "encodedUrl should produce the expected result" in {
    ContinueUrl("/some/value?with=query").encodedUrl shouldBe "%2Fsome%2Fvalue%3Fwith%3Dquery"
  }

  "Binding a continue URL" should {
    "work for host-relative URLs" in {
      val url = "/some/value"
      queryBinder.bind("continue", Map("continue" -> Seq(url))).value.right.value should be(ContinueUrl(url))
    }

    "work for host-relative URLs with query Params" in {
      val url = "/some/value?with=query"
      queryBinder.bind("continue", Map("continue" -> Seq(url))).value.right.value should be(ContinueUrl(url))
    }

    "not work for path-relative urls" in {
      val url = "some/value?with=query"
      queryBinder.bind("continue", Map("continue" -> Seq(url))).value.left.value should be(
        s"'$url' is not a valid continue URL")
    }

    "not work for non-urls" in {
      val url = "::"
      queryBinder.bind("continue", Map("continue" -> Seq(url))).value.left.value should be(
        s"'$url' is not a valid continue URL")
    }
  }

  "Unbinding a continue URL" should {
    "return the value" in {
      queryBinder.unbind("continue", ContinueUrl("/some/url")) should be("continue=%2Fsome%2Furl")
    }
  }

} 
Example 82
Source File: OriginSpec.scala    From play-ui   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.play.binders

import org.scalatest.{EitherValues, Matchers, OptionValues, WordSpecLike}
import uk.gov.hmrc.play.binders.Origin._

class OriginSpec extends WordSpecLike with Matchers with EitherValues with OptionValues {

  "Origin" should {

    "be valid" in {
      Origin("testing1").origin                   shouldBe "testing1"
      Origin("Testing1").origin                   shouldBe "Testing1"
      Origin("test-ing1").origin                  shouldBe "test-ing1"
      Origin("tesA.ing1").origin                  shouldBe "tesA.ing1"
      Origin(List.fill(100)('0').mkString).origin shouldBe List.fill(100)('0').mkString
    }

    "be invalid" in {
      an[IllegalArgumentException] should be thrownBy Origin("withInvalidCharacters!")
      an[IllegalArgumentException] should be thrownBy Origin("with white spaces")
      an[IllegalArgumentException] should be thrownBy Origin("")
      an[IllegalArgumentException] should be thrownBy Origin(List.fill(101)('0').mkString)
    }
  }

  "Origin binder" should {

    "default when origin has invalid characters" in {
      queryBinder.bind("origin", Map("origin" -> Seq("!asdasd"))).value.right.value should be(Origin("unknown"))
    }

    "default when no origin supplied" in {
      queryBinder.bind("origin", Map("origin" -> Seq.empty)).value.right.value should be(Origin("unknown"))
    }

    "take the first when two origins supplied" in {
      queryBinder.bind("origin", Map("origin" -> Seq("origin1", "origin2"))).value.right.value should be(
        Origin("origin1"))
    }

    "create origin" in {
      queryBinder.bind("origin", Map("origin" -> Seq("theOrigin"))).value.right.value should be(Origin("theOrigin"))
    }

  }

  "Unbinding a continue URL" should {
    "return the value" in {
      queryBinder.unbind("origin", Origin("tax-account-router")) should be("origin=tax-account-router")
    }
  }

} 
Example 83
Source File: PositiveIntegerSpec.scala    From play-ui   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.play.binders

import org.scalatest.{Matchers, WordSpecLike}

class PositiveIntegerSpec extends WordSpecLike with Matchers {

  "Creating a PositiveInteger" should {
    "throw an exception for a negative number" in {
      an[IllegalArgumentException] should be thrownBy PositiveInteger(-1)
    }

    "throw an exception for zero" in {
      an[IllegalArgumentException] should be thrownBy PositiveInteger(0)
    }

    "give a success for a positive number" in {
      PositiveInteger(1).value shouldBe 1
    }
  }

  "Binding from a query string" should {
    "give an error for a negative number" in {
      binding("-1") shouldBe Some(Left("number was <= 0"))
    }

    "give an error for zero" in {
      binding("0") shouldBe Some(Left("number was <= 0"))
    }

    "give a success for a positive number" in {
      binding("1") shouldBe Some(Right(PositiveInteger(1)))
    }
    def binding(s: String) = PositiveInteger.stringToPositiveInteger.bind("v", Map("v" -> Seq(s)))
  }

  "Unbinding to a query string" should {
    "extract the value" in {
      unbinding(PositiveInteger(5)) shouldBe "5"
    }
    def unbinding(n: PositiveInteger) = PositiveInteger.stringToPositiveInteger.unbind("v", n)
  }

} 
Example 84
Source File: ConsumerSpec.scala    From akka-cluster-load-balancing   with MIT License 5 votes vote down vote up
package kamkor.actor

import scala.concurrent.duration.DurationInt

import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpecLike }

import com.typesafe.config.ConfigFactory

import akka.actor.ActorSystem
import akka.testkit.{ EventFilter, ImplicitSender, TestKit }

class ConsumerSpec(_system: ActorSystem)
  extends TestKit(_system)
  with ImplicitSender
  with WordSpecLike
  with Matchers
  with BeforeAndAfterAll {

  def this() = this(
    ActorSystem("ClusterSystem",
      ConfigFactory.parseString("""        
        akka.loggers = ["akka.testkit.TestEventListener"]
        akka.loglevel = "DEBUG"        
        """)))

  override def afterAll: Unit = TestKit.shutdownActorSystem(system)

  "A Customer actor that processes 1 message for 200 millis" must {
    "log endedProcessing with debug level 5 times within 1-1.3 seconds" in {
      val consumer = system.actorOf(Consumer.props(processingTimeMillis = 200))
      val data: Array[Int] = Array(0, 1, 2)

      // akka scheduling is not 100% accurate http://doc.akka.io/docs/akka/snapshot/scala/scheduler.html
      within(999.millis, 1300.millis) {
        EventFilter.debug(pattern = "endProcessing", occurrences = 5) intercept {
          for (_ <- 0 until 5) {
            consumer ! data
          }
        }
      }
    }
  }

} 
Example 85
Source File: DefaultSaverITCase.scala    From flink-tensorflow   with Apache License 2.0 5 votes vote down vote up
package org.apache.flink.contrib.tensorflow.io

import org.apache.flink.contrib.tensorflow.models.savedmodel.DefaultSavedModelLoader
import org.apache.flink.contrib.tensorflow.util.{FlinkTestBase, RegistrationUtils}
import org.apache.flink.core.fs.Path
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{Matchers, WordSpecLike}
import org.tensorflow.{Session, Tensor}

import scala.collection.JavaConverters._

@RunWith(classOf[JUnitRunner])
class DefaultSaverITCase extends WordSpecLike
  with Matchers
  with FlinkTestBase {

  override val parallelism = 1

  "A DefaultSaver" should {
    "run the save op" in {
      val env = StreamExecutionEnvironment.getExecutionEnvironment
      RegistrationUtils.registerTypes(env.getConfig)

      val loader = new DefaultSavedModelLoader(new Path("../models/half_plus_two"), "serve")
      val bundle = loader.load()
      val saverDef = loader.metagraph.getSaverDef
      val saver = new DefaultSaver(saverDef)

      def getA = getVariable(bundle.session(), "a").floatValue()
      def setA(value: Float) = setVariable(bundle.session(), "a", Tensor.create(value))

      val initialA = getA
      println("Initial value: " + initialA)

      setA(1.0f)
      val savePath = tempFolder.newFolder("model-0").getAbsolutePath
      val path = saver.save(bundle.session(), savePath)
      val savedA = getA
      savedA shouldBe (1.0f)
      println("Saved value: " + getA)

      setA(2.0f)
      val updatedA = getA
      updatedA shouldBe (2.0f)
      println("Updated value: " + updatedA)

      saver.restore(bundle.session(), path)
      val restoredA = getA
      restoredA shouldBe (savedA)
      println("Restored value: " + restoredA)
    }

    def getVariable(sess: Session, name: String): Tensor = {
      val result = sess.runner().fetch(name).run().asScala
      result.head
    }

    def setVariable(sess: Session, name: String, value: Tensor): Unit = {
      sess.runner()
        .addTarget(s"$name/Assign")
        .feed(s"$name/initial_value", value)
        .run()
    }
  }
} 
Example 86
Source File: ArraysTest.scala    From flink-tensorflow   with Apache License 2.0 5 votes vote down vote up
package org.tensorflow.contrib.scala

import com.twitter.bijection.Conversion._
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{Matchers, WordSpecLike}
import org.tensorflow.contrib.scala.Arrays._
import org.tensorflow.contrib.scala.Rank._
import resource._

@RunWith(classOf[JUnitRunner])
class ArraysTest extends WordSpecLike
  with Matchers {

  "Arrays" when {
    "Array[Float]" should {
      "convert to Tensor[`1D`,Float]" in {
        val expected = Array(1f,2f,3f)
        managed(expected.as[TypedTensor[`1D`,Float]]).foreach { t =>
          t.shape shouldEqual Array(expected.length)
          val actual = t.as[Array[Float]]
          actual shouldEqual expected
        }
      }
    }
  }
} 
Example 87
Source File: WarmupSpec.scala    From cloudstate   with Apache License 2.0 5 votes vote down vote up
package io.cloudstate.proxy

import akka.actor.ActorSystem
import akka.testkit.{ImplicitSender, TestKit}
import com.typesafe.config.ConfigFactory
import org.scalatest.{BeforeAndAfterAll, WordSpecLike}

class WarmupSpec
    extends TestKit(ActorSystem("WarmupSpec", ConfigFactory.load("test-in-memory")))
    with WordSpecLike
    with BeforeAndAfterAll
    with ImplicitSender {

  override protected def afterAll(): Unit = TestKit.shutdownActorSystem(system)

  "The Warmup Actor" should {
    "successfully complete warmup when needsWarmup is true" in {
      val warmup = system.actorOf(Warmup.props(true))
      awaitCond({
        warmup ! Warmup.Ready
        expectMsgType[Boolean]
      })
    }

    "successfully complete warmup when needsWarmup is false" in {
      val warmup = system.actorOf(Warmup.props(false))
      awaitCond({
        warmup ! Warmup.Ready
        expectMsgType[Boolean]
      })
    }
  }
} 
Example 88
Source File: PubSubRouterAltSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.messagebus.pubsub.inmemory

import akka.actor.Props
import akka.testkit.{TestActorRef, TestKit, TestProbe}
import cool.graph.akkautil.SingleThreadedActorSystem
import cool.graph.messagebus.pubsub.PubSubProtocol.{Publish, Subscribe, Unsubscribe}
import cool.graph.messagebus.pubsub.PubSubRouterAlt
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Matchers, WordSpecLike}

import scala.concurrent.duration._

class PubSubRouterAltSpec
    extends TestKit(SingleThreadedActorSystem("pubsub-router-spec"))
    with WordSpecLike
    with Matchers
    with BeforeAndAfterAll
    with BeforeAndAfterEach {
  override def afterAll = shutdown(verifySystemShutdown = true)

  "The PubSubRouter implementation" should {
    "subscribe subscribers correctly and route messages" in {
      val routerActor = TestActorRef(Props[PubSubRouterAlt])
      val router      = routerActor.underlyingActor.asInstanceOf[PubSubRouterAlt]
      val probe       = TestProbe()
      val topic       = "testTopic"

      routerActor ! Subscribe(topic, probe.ref)
      router.router.routees.length shouldEqual 1

      routerActor ! Publish(topic, "test")
      probe.expectMsg("test")
      probe.expectNoMsg(max = 1.second)

      routerActor ! Publish("testTopic2", "test2")
      probe.expectNoMsg(max = 1.second)
    }

    "unsubscribe subscribers correctly" in {
      val routerActor = TestActorRef(Props[PubSubRouterAlt])
      val router      = routerActor.underlyingActor.asInstanceOf[PubSubRouterAlt]
      val probe       = TestProbe()
      val topic       = "testTopic"

      routerActor ! Subscribe(topic, probe.ref)
      router.router.routees.length shouldEqual 1

      routerActor ! Unsubscribe(topic, probe.ref)
      router.router.routees.length shouldEqual 0

      routerActor ! Publish(topic, "test")
      probe.expectNoMsg(max = 1.second)
    }

    "handle actor terminations" in {
      val routerActor = TestActorRef(Props[PubSubRouterAlt])
      val router      = routerActor.underlyingActor.asInstanceOf[PubSubRouterAlt]
      val probe       = TestProbe()
      val topic       = "testTopic"

      routerActor ! Subscribe(topic, probe.ref)
      router.router.routees.length shouldEqual 1

      system.stop(probe.ref)
      Thread.sleep(50)
      router.router.routees.length shouldEqual 0
    }
  }
} 
Example 89
Source File: PubSubRouterSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.messagebus.pubsub.inmemory

import akka.actor.Props
import akka.testkit.{TestActorRef, TestKit, TestProbe}
import cool.graph.akkautil.SingleThreadedActorSystem
import cool.graph.messagebus.pubsub.PubSubProtocol.{Publish, Subscribe, Unsubscribe}
import cool.graph.messagebus.pubsub.PubSubRouter
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Matchers, WordSpecLike}

import scala.concurrent.duration._

class PubSubRouterSpec
    extends TestKit(SingleThreadedActorSystem("pubsub-router-spec"))
    with WordSpecLike
    with Matchers
    with BeforeAndAfterAll
    with BeforeAndAfterEach {
  override def afterAll = shutdown(verifySystemShutdown = true)

  "The PubSubRouter implementation" should {
    "subscribe subscribers correctly and route messages" in {
      val routerActor = TestActorRef(Props[PubSubRouter])
      val router      = routerActor.underlyingActor.asInstanceOf[PubSubRouter]
      val probe       = TestProbe()
      val topic       = "testTopic"

      routerActor ! Subscribe(topic, probe.ref)
      router.subscribers.values.map(_.size).sum shouldEqual 1

      routerActor ! Publish(topic, "test")
      probe.expectMsg("test")
      probe.expectNoMsg(max = 1.second)

      routerActor ! Publish("testTopic2", "test2")
      probe.expectNoMsg(max = 1.second)
    }

    "unsubscribe subscribers correctly" in {
      val routerActor = TestActorRef(Props[PubSubRouter])
      val router      = routerActor.underlyingActor.asInstanceOf[PubSubRouter]
      val probe       = TestProbe()
      val topic       = "testTopic"

      routerActor ! Subscribe(topic, probe.ref)
      router.subscribers.values.map(_.size).sum shouldEqual 1

      routerActor ! Unsubscribe(topic, probe.ref)
      router.subscribers.values.map(_.size).sum shouldEqual 0

      routerActor ! Publish(topic, "test")
      probe.expectNoMsg(max = 1.second)
    }

    "handle actor terminations" in {
      val routerActor = TestActorRef(Props[PubSubRouter])
      val router      = routerActor.underlyingActor.asInstanceOf[PubSubRouter]
      val probe       = TestProbe()
      val topic       = "testTopic"

      routerActor ! Subscribe(topic, probe.ref)
      router.subscribers.values.map(_.size).sum shouldEqual 1

      system.stop(probe.ref)
      Thread.sleep(50)
      router.subscribers.values.map(_.size).sum shouldEqual 0
    }
  }
} 
Example 90
Source File: InMemoryAkkaQueueSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.messagebus.queue.inmemory

import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.testkit.{TestKit, TestProbe}
import cool.graph.messagebus.QueuePublisher
import cool.graph.messagebus.queue.{BackoffStrategy, ConstantBackoff}
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Matchers, WordSpecLike}

import scala.concurrent.Future
import scala.concurrent.duration._

class InMemoryAkkaQueueSpec
    extends TestKit(ActorSystem("queueing-spec"))
    with WordSpecLike
    with Matchers
    with BeforeAndAfterAll
    with BeforeAndAfterEach
    with ScalaFutures {

  implicit val materializer = ActorMaterializer()

  def withInMemoryQueue[T](backoff: BackoffStrategy = ConstantBackoff(100.millis))(testFn: (InMemoryAkkaQueue[T], TestProbe) => Unit) = {
    val inMemoryQueue = InMemoryAkkaQueue[T](backoff)
    val testProbe     = TestProbe()

    try {
      testFn(inMemoryQueue, testProbe)
    } finally {
      inMemoryQueue.shutdown
    }
  }

  override def afterAll = shutdown(verifySystemShutdown = true)

  "Queue" should {
    "call the onMsg function if a valid message arrives" in {
      withInMemoryQueue[String]() { (queue, probe) =>
        queue.withConsumer((str: String) => { probe.ref ! str; Future.successful(()) })
        queue.publish("test")
        probe.expectMsg("test")
      }
    }

    "increment the message tries correctly on failure" in {
      withInMemoryQueue[String]() { (queue, probe) =>
        queue.withConsumer((str: String) => { probe.ref ! str; Future.failed(new Exception("Kabooom")) })
        queue.publish("test")

        // 5 tries, 5 times the same message (can't check for the tries explicitly here)
        probe.expectMsgAllOf(2.seconds, Vector.fill(5) { "test" }: _*)
        probe.expectNoMsg(1.second)
      }
    }

    "map a type correctly with a MappingQueueConsumer" in {
      withInMemoryQueue[String]() { (queue, probe) =>
        val mapped = queue.map[Int]((str: String) => str.toInt)

        mapped.withConsumer((int: Int) => { probe.ref ! int; Future.successful(()) })
        queue.publish("123")

        probe.expectMsg(123)
      }
    }

    "map a type correctly with a MappingQueuePublisher" in {
      withInMemoryQueue[String]() { (queue: InMemoryAkkaQueue[String], probe) =>
        val mapped: QueuePublisher[Int] = queue.map[Int]((int: Int) => int.toString)

        queue.withConsumer((str: String) => { probe.ref ! str; Future.successful(()) })
        mapped.publish(123)

        probe.expectMsg("123")
      }
    }
  }
} 
Example 91
Source File: RabbitAkkaPubSubTestKitSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.messagebus.testkits

import cool.graph.bugsnag.BugSnagger
import cool.graph.messagebus.Conversions
import cool.graph.messagebus.pubsub.{Message, Only}
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Matchers, WordSpecLike}
import play.api.libs.json.Json

class RabbitAkkaPubSubTestKitSpec extends WordSpecLike with Matchers with BeforeAndAfterAll with BeforeAndAfterEach with ScalaFutures {

  case class TestMessage(id: String, testOpt: Option[Int], testSeq: Seq[String])

  implicit val bugSnagger: BugSnagger = null
  implicit val testMessageFormat      = Json.format[TestMessage]
  implicit val testMarshaller         = Conversions.Marshallers.FromJsonBackedType[TestMessage]()
  implicit val testUnmarshaller       = Conversions.Unmarshallers.ToJsonBackedType[TestMessage]()

  val amqpUri = sys.env.getOrElse("RABBITMQ_URI", sys.error("RABBITMQ_URI required for testing"))
  val testRK  = Only("SomeRoutingKey")

  var testKit: RabbitAkkaPubSubTestKit[TestMessage] = _

  override def beforeEach = {
    testKit = RabbitAkkaPubSubTestKit[TestMessage](amqpUri, "test")
    testKit.start.futureValue
  }

  override def afterEach(): Unit = testKit.stop.futureValue

  "The rabbit pubsub testing kit" should {

    
    "should expect a message correctly" in {
      val testMsg = TestMessage("someId1", None, Seq("1", "2"))

      testKit.publish(testRK, testMsg)
      testKit.expectMsg(Message[TestMessage](testRK.topic, testMsg))
    }

    "should blow up it expects a message and none arrives" in {
      val testMsg = TestMessage("someId2", None, Seq("1", "2"))

      an[AssertionError] should be thrownBy {
        testKit.expectMsg(Message[TestMessage](testRK.topic, testMsg))
      }
    }

    "should expect no message correctly" in {
      testKit.expectNoMsg()
    }

    "should blow up if no message was expected but one arrives" in {
      val testMsg = TestMessage("someId3", None, Seq("1", "2"))

      testKit.publish(testRK, testMsg)

      an[AssertionError] should be thrownBy {
        testKit.expectNoMsg()
      }
    }

    "should expect a message count correctly" in {
      val testMsg  = TestMessage("someId4", None, Seq("1", "2"))
      val testMsg2 = TestMessage("someId5", Some(123), Seq("2", "1"))

      testKit.publish(testRK, testMsg)
      testKit.publish(testRK, testMsg2)

      testKit.expectMsgCount(2)
    }

    "should blow up if it expects a message count and less arrive" in {
      val testMsg = TestMessage("someId6", None, Seq("1", "2"))

      testKit.publish(testRK, testMsg)

      an[AssertionError] should be thrownBy {
        testKit.expectMsgCount(2)
      }
    }

    "should blow up if it expects a message count and more arrive" in {
      val testMsg  = TestMessage("someId7", None, Seq("1", "2"))
      val testMsg2 = TestMessage("someId8", Some(123), Seq("2", "1"))

      testKit.publish(testRK, testMsg)
      testKit.publish(testRK, testMsg2)

      an[AssertionError] should be thrownBy {
        testKit.expectMsgCount(1)
      }
    }
  }
} 
Example 92
Source File: WebsocketSessionSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.subscriptions.websockets

import akka.actor.{ActorSystem, Props}
import akka.testkit.TestProbe
import cool.graph.messagebus.testkits.spechelpers.InMemoryMessageBusTestKits
import cool.graph.websockets.WebsocketSession
import cool.graph.websockets.protocol.Request
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

class WebsocketSessionSpec
    extends InMemoryMessageBusTestKits(ActorSystem("websocket-session-spec"))
    with WordSpecLike
    with Matchers
    with BeforeAndAfterAll
    with ScalaFutures {

  override def afterAll = shutdown()

  "The WebsocketSession" should {
    "send a message with the body STOP to the requests queue AND a Poison Pill to the outActor when it is stopped" in {
      withQueueTestKit[Request] { testKit =>
        val projectId = "projectId"
        val sessionId = "sessionId"
        val outgoing  = TestProbe().ref
        val probe     = TestProbe()

        probe.watch(outgoing)

        val session = system.actorOf(Props(WebsocketSession(projectId, sessionId, outgoing, testKit, bugsnag = null)))

        system.stop(session)
        probe.expectTerminated(outgoing)
        testKit.expectPublishedMsg(Request(sessionId, projectId, "STOP"))
      }
    }
  }
} 
Example 93
Source File: JsonYQLParserSpec.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.hedge_fund.actors

import akka.actor.{Actor, ActorRef, ActorSystem, Props}
import akka.testkit._
import edu.neu.coe.csye7200.hedge_fund.model.Model
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

import scala.io.Source
import scala.concurrent.duration._
import spray.http._
import org.scalatest.Inside

import scala.language.postfixOps
import spray.http.ContentType.apply


class JsonYQLParserSpec(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
    with WordSpecLike with Matchers with Inside with BeforeAndAfterAll {

  def this() = this(ActorSystem("JsonYQLParserSpec"))

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  import scala.language.postfixOps
  val json = Source.fromFile(getClass.getResource("/yqlExample.json").getPath) mkString

  "json conversion" in {
    val body = HttpEntity(MediaTypes.`application/json`, json.getBytes())
    val ok = JsonYQLParser.decode(body) match {
      case Right(x) =>
        val count = x.query.count
        count should equal(4)
        x.query.results.quote.length should equal(count)
        x.query.results.get(count - 1, "symbol") should matchPattern { case Some("MSFT") => }

      case Left(x) =>
        fail("decoding error: " + x)
    }
  }

  "send back" in {
    val blackboard = system.actorOf(Props.create(classOf[MockYQLBlackboard], testActor), "blackboard")
    val entityParser = _system.actorOf(Props.create(classOf[EntityParser], blackboard), "entityParser")
    val entity = HttpEntity(MediaTypes.`application/json`, json.getBytes())
    entityParser ! EntityMessage("json:YQL", entity)
    val msg = expectMsgClass(3.seconds, classOf[QueryResponse])
    println("msg received: " + msg)
    msg should matchPattern {
      case QueryResponse("MSFT", _) =>
    }
    inside(msg) {
      case QueryResponse(symbol, attributes) => attributes.get("Ask") should matchPattern { case Some("46.17") => }
    }
  }

}

import akka.pattern.ask
import scala.concurrent.Await

class MockYQLUpdateLogger(blackboard: ActorRef) extends UpdateLogger(blackboard) {
  override def processStock(identifier: String, model: Model) = {
    model.getKey("price") match {
      case Some(p) => {
        // sender is the MarketData actor
        val future = sender ? SymbolQuery(identifier, List(p))
        val result = Await.result(future, timeout.duration).asInstanceOf[QueryResponse]
        result.attributes map {
          case (k, v) =>
            log.info(s"$identifier attribute $k has been updated to: $v")
            blackboard ! result
        }
      }
      case None => log.warning(s"'price' not defined in model")
    }
  }
}

class MockYQLBlackboard(testActor: ActorRef) extends Blackboard(Map(classOf[KnowledgeUpdate] -> "marketData", classOf[SymbolQuery] -> "marketData", classOf[OptionQuery] -> "marketData", classOf[CandidateOption] -> "optionAnalyzer", classOf[Confirmation] -> "updateLogger"),
  Map("marketData" -> classOf[MarketData], "optionAnalyzer" -> classOf[OptionAnalyzer], "updateLogger" -> classOf[MockYQLUpdateLogger])) {

  override def receive =
    {
      case msg: Confirmation => msg match {
        // Cut down on the volume of messages
        case Confirmation("MSFT", _, _) => super.receive(msg)
        case _ =>
      }
      case msg: QueryResponse => testActor forward msg

      case msg => super.receive(msg)
    }
} 
Example 94
Source File: OptionAnalyzerSpec.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.hedge_fund.actors

import org.scalatest.{BeforeAndAfterAll, Inside, Matchers, WordSpecLike}
import akka.actor.{Actor, ActorRef, ActorSystem, Props}
import akka.testkit._

import scala.concurrent.duration._
import org.scalatest.Inside
import akka.actor.actorRef2Scala
import edu.neu.coe.csye7200.hedge_fund.model.GoogleOptionModel
import org.scalatest.tagobjects.Slow


class OptionAnalyzerSpec(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
    with WordSpecLike with Matchers with Inside with BeforeAndAfterAll {

  def this() = this(ActorSystem("OptionAnalyzerSpec"))

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  "send back" taggedAs(Slow) in {
    val model = new GoogleOptionModel()
    val blackboard = system.actorOf(Props.create(classOf[MockAnalyzerBlackboard], testActor), "blackboard")
    blackboard ! CandidateOption(model, "XX375", true, Map("strike" -> "45.2"), Map("underlying_id" -> "1234", "Sharpe" -> 0.45))
    val confirmationMsg = expectMsgClass(3.seconds, classOf[Confirmation])
    println("confirmation msg received: " + confirmationMsg)
    inside(confirmationMsg) {
      case Confirmation(id, model, details) =>
        println(s"confirmation1 details: $details")
        id shouldEqual "XX375"
        blackboard ! KnowledgeUpdate(model, "XX", Map("id" -> "1234"))
        val confirmationMsg2 = expectMsgClass(3.seconds, classOf[Confirmation])
        println("confirmation msg2 received: " + confirmationMsg2)
        // Note that the key "id" is in the model for symbols, not options
        blackboard ! OptionQuery("id", "1234")
        val responseMsg = expectMsgClass(3.seconds, classOf[QueryResponse])
        println("msg received: " + responseMsg)
        inside(responseMsg) {
          case QueryResponse(symbol, attributes) =>
            symbol shouldEqual "XX"
            println(s"attributes: $attributes")
        }
    }
  }
}

class MockAnalyzerBlackboard(testActor: ActorRef) extends Blackboard(Map(classOf[KnowledgeUpdate] -> "marketData", classOf[SymbolQuery] -> "marketData", classOf[OptionQuery] -> "marketData", classOf[CandidateOption] -> "optionAnalyzer", classOf[Confirmation] -> "updateLogger"),
  Map("marketData" -> classOf[MarketData], "optionAnalyzer" -> classOf[OptionAnalyzer], "updateLogger" -> classOf[UpdateLogger])) {

  override def receive =
    {
      case msg: Confirmation => testActor forward msg
      case msg: QueryResponse => testActor forward msg
      case msg => super.receive(msg)
    }
} 
Example 95
Source File: JsonGoogleOptionParserSpec.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.hedge_fund.actors

import akka.actor.{Actor, ActorRef, ActorSystem, Props}
import akka.testkit._
import edu.neu.coe.csye7200.hedge_fund.model.Model
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

import scala.io.Source
import scala.concurrent.duration._
import spray.http._
import spray.http.MediaTypes._
import org.scalatest.Inside
import org.scalatest.tagobjects.Slow

import scala.language.postfixOps
import spray.json.pimpString


class JsonGoogleOptionParserSpec(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
    with WordSpecLike with Matchers with BeforeAndAfterAll {

  def this() = this(ActorSystem("JsonGoogleParserSpec"))

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  import scala.language.postfixOps
  val json = Source.fromFile(getClass.getResource("/googleOptionExample.json").getPath) mkString

  "json read" in {
    import spray.json._
    val obj = JsonGoogleOptionParser.fix(json).parseJson
  }

  "json conversion" in {
    val contentType = ContentType(MediaTypes.`application/json`, HttpCharsets.`UTF-8`)
    val entity = HttpEntity(contentType, json.getBytes())
    val ok = JsonGoogleOptionParser.decode(entity) match {
      case Right(x) =>
        x.puts.length should equal(20)
        val puts = x.puts
        puts(0).get("s") should matchPattern { case Some("MSFT150731P00042500") => }

      case Left(x) =>
        fail("decoding error: " + x)
    }
  }

  "send back" taggedAs(Slow) in {
    val blackboard = system.actorOf(Props.create(classOf[MockGoogleOptionBlackboard], testActor), "blackboard")
    val entityParser = _system.actorOf(Props.create(classOf[EntityParser], blackboard))
    val contentType = ContentType(MediaTypes.`application/json`, HttpCharsets.`UTF-8`)
    val entity = HttpEntity(contentType, json.getBytes()) 
    entityParser ! EntityMessage("json:GO", entity)
    val msg = expectMsgClass(3.seconds, classOf[QueryResponse])
    println("msg received: " + msg)
    msg should matchPattern {
      case QueryResponse(_, _) =>
    }
  }
}

import akka.pattern.ask
import akka.util.Timeout
import scala.concurrent.duration._
import scala.concurrent.Await


class MockGoogleOptionUpdateLogger(blackboard: ActorRef) extends UpdateLogger(blackboard) {
  override def processOption(identifier: String, model: Model, attributes: Map[String, Any]) = {
    val keys = model mapKeys (List("underlying", "strikePrice", "expiry"))
    println(s"$keys")
    val future = blackboard ? OptionQuery(identifier, keys)
    val result = Await.result(future, timeout.duration).asInstanceOf[QueryResponse]
    blackboard ! result
  }
}

class MockGoogleOptionBlackboard(testActor: ActorRef) extends Blackboard(Map(classOf[KnowledgeUpdate] -> "marketData", classOf[SymbolQuery] -> "marketData", classOf[OptionQuery] -> "marketData", classOf[CandidateOption] -> "optionAnalyzer", classOf[Confirmation] -> "updateLogger"),
  Map("marketData" -> classOf[MarketData], "optionAnalyzer" -> classOf[OptionAnalyzer], "updateLogger" -> classOf[MockGoogleOptionUpdateLogger])) {

  override def receive =
    {
      case msg: Confirmation => msg match {
        // Cut down on the volume of messages
        case Confirmation("MSFT150731P00045000", _, _) => super.receive(msg)
        case _ =>
      }
      case msg: QueryResponse => testActor forward msg
      case msg => super.receive(msg)
    }
} 
Example 96
Source File: PortfolioSpec.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.hedge_fund.portfolio

import org.scalatest.{BeforeAndAfterAll, Inside, Matchers, WordSpecLike}
import akka.actor.{Actor, ActorRef, ActorSystem, Props}
import akka.testkit._

import scala.concurrent.duration._
import org.scalatest.Inside
import akka.actor.actorRef2Scala
import com.typesafe.config.ConfigFactory
import edu.neu.coe.csye7200.hedge_fund.HedgeFund
import edu.neu.coe.csye7200.hedge_fund.actors._
import edu.neu.coe.csye7200.hedge_fund.model.GoogleOptionModel
import org.scalatest.tagobjects.Slow


class PortfolioSpec(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
    with WordSpecLike with Matchers with Inside with BeforeAndAfterAll {

  def this() = this(ActorSystem("MockPortfolioBlackboard"))

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }
  
  "read portfolio" taggedAs(Slow) in {
    val config = ConfigFactory.load
    val portfolio = HedgeFund.getPortfolio(config)
    portfolio.name shouldEqual "Test Portfolio"
    println(s"portfolio: $portfolio")
  }

  "send back" taggedAs(Slow) in {
    val model = new GoogleOptionModel()
    val blackboard = system.actorOf(Props.create(classOf[MockPortfolioBlackboard], testActor), "blackboard")
    blackboard ! CandidateOption(model, "XX375", true, Map("strike" -> "45.2"), Map("underlying_id" -> "1234"))
    val confirmationMsg = expectMsgClass(3.seconds, classOf[Confirmation])
    println("confirmation msg received: " + confirmationMsg)
    inside(confirmationMsg) {
      case Confirmation(id, model, details) =>
        println(s"confirmation1 details: $details")
        id shouldEqual "XX375"
        blackboard ! KnowledgeUpdate(model, "XX", Map("id" -> "1234"))
        val confirmationMsg2 = expectMsgClass(3.seconds, classOf[Confirmation])
        println("confirmation msg2 received: " + confirmationMsg2)
        // Note that the key "id" is in the model for symbols, not options
        blackboard ! OptionQuery("id", "1234")
        val responseMsg = expectMsgClass(3.seconds, classOf[QueryResponse])
        println("msg received: " + responseMsg)
        inside(responseMsg) {
          case QueryResponse(symbol, attributes) =>
            symbol shouldEqual "XX"
            println(s"attributes: $attributes")
        }
    }
  }
}

class MockPortfolioBlackboard(testActor: ActorRef) extends Blackboard(Map(classOf[KnowledgeUpdate] -> "marketData", classOf[SymbolQuery] -> "marketData", classOf[OptionQuery] -> "marketData", classOf[CandidateOption] -> "optionAnalyzer", classOf[PortfolioUpdate] -> "updateLogger", classOf[Confirmation] -> "updateLogger"),
  Map("marketData" -> classOf[MarketData], "optionAnalyzer" -> classOf[OptionAnalyzer], "updateLogger" -> classOf[UpdateLogger])) {

  override def receive =
    {
      case msg: Confirmation => testActor forward msg
      case msg: QueryResponse => testActor forward msg
//      case msg: CandidateOption => testActor forward msg
      case msg => super.receive(msg)
    }
} 
Example 97
Source File: QuerySpec.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.hedge_fund.http

import org.scalatest.{BeforeAndAfterAll, Inside, Matchers, WordSpecLike}
import edu.neu.coe.csye7200.hedge_fund.model.{GoogleQuery, YQLQuery}


class QuerySpec extends WordSpecLike with Matchers with Inside {

  "YQL tech query" in {
    val symbols = List("YHOO", "AAPL", "GOOG", "MSFT")
    val uri = YQLQuery("json", true).createQuery(symbols)
    println(uri.toString)
    uri.toString shouldEqual "https://query.yahooapis.com/v1/public/yql?format=json&callback=&q=select+*+from+yahoo.finance.quotes+where+symbol+in+(%22YHOO%22,%22AAPL%22,%22GOOG%22,%22MSFT%22)&diagnostics=true&env=http://datatables.org/alltables.env"
  }

  "Google tech query" in {
    val symbols = List("AAPL", "YHOO")
    val uri = GoogleQuery("NASDAQ").createQuery(symbols)
    println(uri.toString)
    // TODO this is actually incorrect (and so is code being tested)--fix it
    uri.toString shouldEqual "https://finance.google.com/finance/info?q=NASDAQ:AAPL,YHOO&client=ig"
  }

} 
Example 98
Source File: RuleSpec.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.hedge_fund.rules

import org.scalatest.{ WordSpecLike, Matchers, BeforeAndAfterAll, Inside }
import spray.http._


class RuleSpec extends WordSpecLike with Matchers with Inside {

  "Simple Predicate and Candidate" in {
    val predicate = NumberPredicate("x", "<", 3)
    val rule = Rule(predicate)
    rule.apply(MapCandidate("test", Map("x" -> "2"))) should matchPattern {
      case Right(true) =>
    }
    rule.apply(MapCandidate("test", Map("x" -> "4"))) should matchPattern {
      case Right(false) =>
    }
  }

  "Simple Predicate, bad Candidate" in {
    val predicate = NumberPredicate("x", "<", 3)
    val rule = Rule(predicate)
    inside(rule.apply(MapCandidate("test", Map("y" -> "2")))) {
      case Left(x) => println(x)
    }
    inside(rule.apply(MapCandidate("test", Map("x" -> "y")))) {
      case Left(x) => println(x)
    }
  }

  "Simple Rule" in {
    val predicate = Rule("x < 3")
    predicate should matchPattern {
      case NumberPredicate("x", LessThan(), 3) =>
    }
  }
  "Compound Rule" in {
    val predicate = Rule("(x < 3) & (y > 1)")
    predicate should matchPattern {
      case And(NumberPredicate("x", LessThan(), 3), NumberPredicate("y", GreaterThan(), 1)) =>
    }
  }
  "Nested Rule" in {
    val predicate = Rule("(x < 3) & ((y > 1) | (z = 0))")
    predicate should matchPattern {
      case And(
        NumberPredicate("x", LessThan(), 3),
        Or(
          NumberPredicate("y", GreaterThan(), 1),
          NumberPredicate("z", Equals(), 0))) =>
    }
  }
} 
Example 99
Source File: PredicateSpec.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.hedge_fund.rules

import org.scalatest.{ WordSpecLike, Matchers, BeforeAndAfterAll, Inside }
import spray.http._


class PredicateSpec extends WordSpecLike with Matchers with Inside {

  "Simple Predicate and Candidate" in {
    val predicate = NumberPredicate("x", "<", 3)
    predicate.apply(MapCandidate("test", Map("x" -> "2"))) should matchPattern {
      case Right(true) =>
    }
    predicate.apply(MapCandidate("test", Map("x" -> "4"))) should matchPattern {
      case Right(false) =>
    }
  }

  "Simple Predicate, bad Candidate" in {
    val predicate = NumberPredicate("x", "<", 3)
    inside(predicate.apply(MapCandidate("test", Map("y" -> "2")))) {
      case Left(x) => println(x)
    }
    inside(predicate.apply(MapCandidate("test", Map("x" -> "y")))) {
      case Left(x) => println(x)
    }
  }

  "String Predicate" in {
    val predicate = Predicate("x < 3")
    predicate should matchPattern {
      case NumberPredicate("x", LessThan(), 3) =>
    }
    predicate shouldEqual NumberPredicate("x", "<", 3)
  }

  "Text Predicate" in {
    val predicate = Predicate("x == Hello")
    predicate.apply(MapCandidate("test", Map("x" -> "Hello"))) should matchPattern {
      case Right(true) =>
    }
  }
} 
Example 100
Source File: OptionAnalyzerSpec.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.actors

import akka.actor.{ActorRef, ActorSystem, Props, actorRef2Scala}
import akka.testkit._
import edu.neu.coe.csye7200.model.GoogleOptionModel
import org.scalatest.tagobjects.Slow
import org.scalatest.{BeforeAndAfterAll, Inside, Matchers, WordSpecLike}

import scala.concurrent.duration._


class OptionAnalyzerSpec(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
  with WordSpecLike with Matchers with Inside with BeforeAndAfterAll {

  def this() = this(ActorSystem("OptionAnalyzerSpec"))

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  "send back" taggedAs Slow in {
    val model = new GoogleOptionModel()
    val blackboard = system.actorOf(Props.create(classOf[MockAnalyzerBlackboard], testActor), "blackboard")
    blackboard ! CandidateOption(model, "XX375", put = true, Map("strike" -> "45.2"), Map("underlying_id" -> "1234", "Sharpe" -> 0.45))
    val confirmationMsg = expectMsgClass(3.seconds, classOf[Confirmation])
    println("confirmation msg received: " + confirmationMsg)
    inside(confirmationMsg) {
      case Confirmation(id, m, details) =>
        println(s"confirmation1 details: $details")
        id shouldEqual "XX375"
        blackboard ! KnowledgeUpdate(m, "XX", Map("id" -> "1234"))
        val confirmationMsg2 = expectMsgClass(3.seconds, classOf[Confirmation])
        println("confirmation msg2 received: " + confirmationMsg2)
        // Note that the key "id" is in the model for symbols, not options
        blackboard ! OptionQuery("id", "1234")
        val responseMsg = expectMsgClass(3.seconds, classOf[QueryResponse])
        println("msg received: " + responseMsg)
        inside(responseMsg) {
          case QueryResponse(symbol, attributes) =>
            symbol shouldEqual "XX"
            println(s"attributes: $attributes")
        }
    }
  }
}

class MockAnalyzerBlackboard(testActor: ActorRef) extends Blackboard(Map(classOf[KnowledgeUpdate] -> "marketData", classOf[SymbolQuery] -> "marketData", classOf[OptionQuery] -> "marketData", classOf[CandidateOption] -> "optionAnalyzer", classOf[Confirmation] -> "updateLogger"),
  Map("marketData" -> classOf[MarketData], "optionAnalyzer" -> classOf[OptionAnalyzer], "updateLogger" -> classOf[UpdateLogger])) {

  override def receive: PartialFunction[Any, Unit] = {
    case msg: Confirmation => testActor forward msg
    case msg: QueryResponse => testActor forward msg
    case msg => super.receive(msg)
  }
} 
Example 101
Source File: SimpleRuleSpec.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.oldrules

import org.scalatest.{Inside, Matchers, WordSpecLike}

import scala.util.{Failure, Success}


class SimpleRuleSpec extends WordSpecLike with Matchers with Inside {

  "Simple Predicate and Candidate" in {
    val predicate = NumberPredicate("x", "<", 3)
    val rule = SimpleRule(predicate)
    rule(MapCandidate("test", Map("x" -> "2"))) should matchPattern {
      case Success(true) =>
    }
    rule(MapCandidate("test", Map("x" -> "4"))) should matchPattern {
      case Success(false) =>
    }
  }

  "Simple Predicate, bad Candidate" in {
    val predicate = NumberPredicate("x", "<", 3)
    val rule = SimpleRule(predicate)
    inside(rule(MapCandidate("test", Map("y" -> "2")))) {
      case Failure(x) => println(x)
    }
    inside(rule(MapCandidate("test", Map("x" -> "y")))) {
      case Failure(x) => println(x)
    }
  }

  "Simple Rule" in {
    val predicate = SimpleRule("x < 3")
    predicate should matchPattern {
      case NumberPredicate("x", LessThan(), 3) =>
    }
  }
  "Parenthesized Rule" in {
    val predicate = SimpleRule("(x < 3)")
    predicate should matchPattern {
      case NumberPredicate("x", LessThan(), 3) =>
    }
  }
  "Compound Rule" in {
    val predicate = SimpleRule("(x < 3) & (y > 1)")
    predicate should matchPattern {
      case ComposedPredicate(NumberPredicate("x", LessThan(), 3), NumberPredicate("y", GreaterThan(), 1), Predicate.and) =>
    }
  }

  // FIXME: reimplement rules in terms of Rule class

  //  "Nested Rule" in {
  //    val predicate = SimpleRule("(x < 3) & ((y > 1) | (z = 0))")
  //    predicate should matchPattern {
  //      case
  //      ComposedPredicate(NumberPredicate("x", LessThan(), 3), ComposedPredicate (
  //      NumberPredicate("y", GreaterThan(), 1),
  //      NumberPredicate("z", Equals(), 0), Predicate.or), Predicate.and) =>
  //    }
  //  }
} 
Example 102
Source File: PredicateSpec.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.oldrules

import org.scalatest.{Inside, Matchers, WordSpecLike}

import scala.util.{Failure, Success}


class PredicateSpec extends WordSpecLike with Matchers with Inside {

  "Simple Predicate and Candidate" in {
    val predicate = NumberPredicate("x", "<", 3)
    predicate(MapCandidate("test", Map("x" -> "2"))) should matchPattern {
      case Success(true) =>
    }
    predicate(MapCandidate("test", Map("x" -> "4"))) should matchPattern {
      case Success(false) =>
    }
  }

  "Simple Predicate, bad Candidate" in {
    val predicate = NumberPredicate("x", "<", 3)
    inside(predicate(MapCandidate("test", Map("y" -> "2")))) {
      case Failure(x) => println(x)
    }
    inside(predicate(MapCandidate("test", Map("x" -> "y")))) {
      case Failure(x) => println(x)
    }
  }

  "String Predicate" in {
    val predicate = Predicate("x < 3")
    predicate should matchPattern {
      case NumberPredicate("x", LessThan(), 3) =>
    }
    predicate shouldEqual NumberPredicate("x", "<", 3)
  }

  "Text Predicate" in {
    val predicate = Predicate("x == Hello")
    predicate(MapCandidate("test", Map("x" -> "Hello"))) should matchPattern {
      case Success(true) =>
    }
  }
} 
Example 103
Source File: PortfolioSpec.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.portfolio

import akka.actor.{ActorRef, ActorSystem, Props, actorRef2Scala}
import akka.testkit._
import com.typesafe.config.ConfigFactory
import edu.neu.coe.csye7200.HedgeFund
import edu.neu.coe.csye7200.actors._
import edu.neu.coe.csye7200.model.GoogleOptionModel
import org.scalatest.tagobjects.Slow
import org.scalatest.{BeforeAndAfterAll, Inside, Matchers, WordSpecLike}

import scala.concurrent.duration._
import scala.util.Success


class PortfolioSpec(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
  with WordSpecLike with Matchers with Inside with BeforeAndAfterAll {

  def this() = this(ActorSystem("MockPortfolioBlackboard"))

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  "read portfolio" taggedAs Slow in {
    val config = ConfigFactory.load
    val portfolio = HedgeFund.getPortfolio(config)
    portfolio should matchPattern { case Some(_) => }
    portfolio.get.name shouldEqual "Test Portfolio"
    println(s"portfolio: $portfolio")
  }

  "send back" taggedAs Slow in {
    val model = new GoogleOptionModel()
    val blackboard = system.actorOf(Props.create(classOf[MockPortfolioBlackboard], testActor), "blackboard")
    blackboard ! CandidateOption(model, "XX375", put = true, Map("strike" -> "45.2"), Map("underlying_id" -> "1234"))
    val confirmationMsg = expectMsgClass(3.seconds, classOf[Confirmation])
    println("confirmation msg received: " + confirmationMsg)
    inside(confirmationMsg) {
      case Confirmation(id, m, details) =>
        println(s"confirmation1 details: $details")
        id shouldEqual "XX375"
        blackboard ! KnowledgeUpdate(m, "XX", Map("id" -> "1234"))
        val confirmationMsg2 = expectMsgClass(3.seconds, classOf[Confirmation])
        println("confirmation msg2 received: " + confirmationMsg2)
        // Note that the key "id" is in the model for symbols, not options
        blackboard ! OptionQuery("id", "1234")
        val responseMsg = expectMsgClass(3.seconds, classOf[QueryResponse])
        println("msg received: " + responseMsg)
        inside(responseMsg) {
          case QueryResponse(symbol, attributes) =>
            symbol shouldEqual "XX"
            println(s"attributes: $attributes")
        }
    }
  }
}

class MockPortfolioBlackboard(testActor: ActorRef) extends Blackboard(Map(classOf[KnowledgeUpdate] -> "marketData", classOf[SymbolQuery] -> "marketData", classOf[OptionQuery] -> "marketData", classOf[CandidateOption] -> "optionAnalyzer", classOf[PortfolioUpdate] -> "updateLogger", classOf[Confirmation] -> "updateLogger"),
  Map("marketData" -> classOf[MarketData], "optionAnalyzer" -> classOf[OptionAnalyzer], "updateLogger" -> classOf[UpdateLogger])) {

  override def receive: PartialFunction[Any, Unit] = {
    case msg: Confirmation => testActor forward msg
    case msg: QueryResponse => testActor forward msg
    //      case msg: CandidateOption => testActor forward msg
    case msg => super.receive(msg)
  }
} 
Example 104
Source File: QuerySpec.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.http

import edu.neu.coe.csye7200.model.{GoogleQuery, YQLQuery}
import org.scalatest.{Inside, Matchers, WordSpecLike}


class QuerySpec extends WordSpecLike with Matchers with Inside {

  "YQL tech query" in {
    val symbols = List("YHOO", "AAPL", "GOOG", "MSFT")
    val uri = YQLQuery("json", diagnostics = true).createQuery(symbols)
    println(uri.toString)
    uri.toString shouldEqual "https://query.yahooapis.com/v1/public/yql?format=json&callback=&q=select+*+from+yahoo.finance.quotes+where+symbol+in+(%22YHOO%22,%22AAPL%22,%22GOOG%22,%22MSFT%22)&diagnostics=true&env=http://datatables.org/alltables.env"
  }

  "Google tech query" in {
    val symbols = List("AAPL", "YHOO")
    val uri = GoogleQuery("NASDAQ").createQuery(symbols)
    println(uri.toString)
    // TODO this is actually incorrect (and so is code being tested)--fix it
    uri.toString shouldEqual "https://finance.google.com/finance/info?q=NASDAQ:AAPL,YHOO&client=ig"
  }

} 
Example 105
Source File: HelloWorldActorSpec.scala    From Scala-Reactive-Programming   with MIT License 5 votes vote down vote up
package com.packt.publishing.reactive.hello.actor.v2

import akka.actor.{ActorSystem, Props}
import akka.testkit.{TestKit, TestProbe}
import com.packt.publishing.reactive.hello.model.HelloWorld
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
import scala.concurrent.duration._

class HelloWorldActorSpec(actorSystem: ActorSystem) extends TestKit(actorSystem)
             with Matchers with WordSpecLike with BeforeAndAfterAll {

  def this() = this(ActorSystem("AkkaHelloWorld"))

  "HelloWorld Actor" should {
      "pass on a HelloWorld message" in {
        val testProbe = TestProbe()
        val helloWorldActor = system.actorOf(Props(new HelloWorldActor(testProbe.ref)), "HelloWorldActor")
        helloWorldActor ! HelloWorld
        testProbe.expectMsg(500 millis, HelloWorld)
      }
  }
  override def afterAll: Unit = {
    shutdown(system)
  }

} 
Example 106
Source File: HelloWorldActorSpec.scala    From Scala-Reactive-Programming   with MIT License 5 votes vote down vote up
package com.packt.publishing.reactive.hello.actor

import akka.actor.{ActorSystem, Props}
import akka.testkit.{EventFilter, TestKit, TestProbe}
import com.packt.publishing.reactive.hello.actor.HelloWorldActorSpec._
import com.packt.publishing.reactive.hello.model.HelloWorld
import com.typesafe.config.ConfigFactory
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

class HelloWorldActorSpec extends TestKit(system)
           with Matchers with WordSpecLike with BeforeAndAfterAll {

  "HelloWorld Actor" should {
      "pass on a HelloWorld message" in {
        val testProbe = TestProbe()
        val helloWorldActor = system.actorOf(Props[HelloWorldActor], "HelloWorldActor")
        EventFilter.info(message = "Hello World", occurrences = 1)
          .intercept(helloWorldActor ! HelloWorld)
      }
  }

  override def afterAll: Unit = {
    shutdown(system)
  }

}

object HelloWorldActorSpec {
  val system = {
    val loggerConfig = ConfigFactory.parseString("akka.loggers = [akka.testkit.TestEventListener]")
    ActorSystem("AkkaHelloWorld", loggerConfig)
  }
} 
Example 107
Source File: EntitySupport.scala    From akka-cqrs   with Apache License 2.0 5 votes vote down vote up
package com.productfoundry.akka.cqrs

import akka.actor.{ActorRef, ActorSystem, PoisonPill, Terminated}
import akka.testkit.{ImplicitSender, TestKit}
import akka.util.Timeout
import org.scalatest.concurrent.Eventually
import org.scalatest.time.{Millis, Second, Span}
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, Matchers, WordSpecLike}

import scala.concurrent.duration._

abstract class EntitySupport(_system: ActorSystem)
  extends TestKit(_system)
  with ImplicitSender
  with WordSpecLike
  with Matchers
  with BeforeAndAfterAll
  with BeforeAndAfter
  with Eventually {

  
  override def afterAll(): Unit = {
    TestKit.shutdownActorSystem(system)
  }
} 
Example 108
Source File: PersistenceTestSupport.scala    From akka-cqrs   with Apache License 2.0 5 votes vote down vote up
package com.productfoundry.support

import java.util.UUID

import akka.testkit.{ImplicitSender, TestKit}
import org.scalatest.concurrent.Eventually
import org.scalatest.time.{Millis, Span}
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

abstract class PersistenceTestSupport
  extends TestKit(TestConfig.testSystem)
  with ImplicitSender
  with WordSpecLike
  with Matchers
  with BeforeAndAfterAll
  with Eventually {

  def randomPersistenceId = UUID.randomUUID.toString

  implicit override val patienceConfig = PatienceConfig(
    timeout = scaled(Span(500, Millis)),
    interval = scaled(Span(10, Millis))
  )

  override protected def afterAll(): Unit = {
    TestKit.shutdownActorSystem(system)
  }
} 
Example 109
Source File: SpecBase.scala    From kafka-lag-exporter   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.kafkalagexporter.integration

import akka.actor.typed.ActorSystem
import akka.kafka.testkit.scaladsl.{EmbeddedKafkaLike, ScalatestKafkaSpec}
import com.lightbend.kafkalagexporter.MainApp
import com.lightbend.kafkalagexporter.KafkaClusterManager
import com.typesafe.config.{Config, ConfigFactory}
import net.manub.embeddedkafka.EmbeddedKafkaConfig
import org.scalatest.concurrent.{Eventually, ScalaFutures}
import org.scalatest.{BeforeAndAfterEach, Matchers, WordSpecLike}

import scala.concurrent.Await
import scala.concurrent.duration._

abstract class SpecBase(kafkaPort: Int, val exporterPort: Int)
  extends ScalatestKafkaSpec(kafkaPort)
    with WordSpecLike
    with BeforeAndAfterEach
    with EmbeddedKafkaLike
    with Matchers
    with ScalaFutures
    with Eventually
    with PrometheusUtils
    with LagSim {

  override def createKafkaConfig: EmbeddedKafkaConfig =
    EmbeddedKafkaConfig(kafkaPort,
      zooKeeperPort,
      Map(
        "offsets.topic.replication.factor" -> "1"
      ))

  var kafkaLagExporter: ActorSystem[KafkaClusterManager.Message] = _

  val clusterName = "default"

  val config: Config = ConfigFactory.parseString(s"""
                                            |kafka-lag-exporter {
                                            |  port: $exporterPort
                                            |  clusters = [
                                            |    {
                                            |      name: "$clusterName"
                                            |      bootstrap-brokers: "localhost:$kafkaPort"
                                            |    }
                                            |  ]
                                            |  poll-interval = 5 seconds
                                            |  lookup-table-size = 20
                                            |}""".stripMargin).withFallback(ConfigFactory.load())

  override def beforeEach(): Unit = {
    kafkaLagExporter = MainApp.start(config)
  }

  override def afterEach(): Unit = {
    kafkaLagExporter ! KafkaClusterManager.Stop
    Await.result(kafkaLagExporter.whenTerminated, 10 seconds)
  }
} 
Example 110
Source File: RegionSpec.scala    From affinity   with Apache License 2.0 5 votes vote down vote up
package io.amient.affinity.core.actor

import akka.actor.{ActorPath, ActorSystem, PoisonPill, Props}
import akka.util.Timeout
import com.typesafe.config.ConfigFactory
import io.amient.affinity.AffinityActorSystem
import io.amient.affinity.core.cluster.Coordinator
import org.scalatest.concurrent.{Eventually, IntegrationPatience}
import org.scalatest.{Matchers, WordSpecLike}

import scala.concurrent.duration._
import scala.language.postfixOps


class RegionSpec extends WordSpecLike with Matchers with Eventually with IntegrationPatience {

  val system: ActorSystem = AffinityActorSystem.create(ConfigFactory.load("regionspec"))

  val testPartition = Props(new Partition {
    override def preStart(): Unit = {
      Thread.sleep(100)
      super.preStart()
    }

    override def handle: Receive = {
      case _: IllegalStateException => context.stop(self)
      case _ =>
    }
  })


  "A Region Actor" must {
    "must keep Coordinator Updated during partition failure & restart scenario" in {
      //      val zk = new EmbeddedZookeperServer {}
      try {
        val coordinator = Coordinator.create(system, "region")
        try {
          val d = 1 second
          implicit val timeout = Timeout(d)

          val region = system.actorOf(Props(new Container("region") {
            val partitions = List(0, 1, 2, 3)
            for (partition <- partitions) {
              context.actorOf(testPartition, name = partition.toString)
            }
          }), name = "region")
          eventually {
            coordinator.members.size should be(4)
          }

          //first stop Partition explicitly - it shouldn't be restarted
          import system.dispatcher
          system.actorSelection(ActorPath.fromString(coordinator.members.head._2)).resolveOne.foreach {
            case actorRef => system.stop(actorRef)
          }
          eventually {
            coordinator.members.size should be(3)
          }

          //now simulate error in one of the partitions
          val partitionToFail = coordinator.members.head._2
          system.actorSelection(ActorPath.fromString(partitionToFail)).resolveOne.foreach {
            case actorRef => actorRef ! new IllegalStateException("Exception expected by the Test")
          }
          eventually {
            coordinator.members.size should be(2)
          }
          eventually {
            coordinator.members should not contain (partitionToFail)
          }

          region ! PoisonPill

        } finally {
          coordinator.close
        }
      } finally {
        //        zk.close()
      }
    }
  }

}

class RegionSpecPartition extends Partition {
  override def preStart(): Unit = {
    Thread.sleep(100)
    super.preStart()
  }

  override def handle: Receive = {
    case _: IllegalStateException => context.stop(self)
    case _ =>
  }
} 
Example 111
Source File: ApiKeyAuthSpec.scala    From shield   with MIT License 5 votes vote down vote up
package shield.actors.middleware

import akka.actor.ActorSystem
import akka.testkit.{ImplicitSender, TestActorRef, TestKit}
import org.scalatest.{MustMatchers, WordSpecLike}
import shield.actors.{DownstreamRequest, ForwardRequestCmd, ForwardResponseCmd, ResponseDetails}
import shield.config.Settings
import shield.proxying.FailBalancer
import shield.routing._
import spray.http.HttpHeaders.RawHeader
import spray.http._

class ApiKeyAuthSpec extends TestKit(ActorSystem("testSystem"))
  with WordSpecLike
  with MustMatchers
  with ImplicitSender {

  "ApiKeyAuthTest middleware actor" must {
    val stage = "myStage"
    val settings = Settings(system)
    val location = settings.DefaultServiceLocation
    val getEndpoint = EndpointTemplate(HttpMethods.GET, Path("/foobar"))
    val routingDestination = RoutingDestination(getEndpoint, List(), List(), FailBalancer)

    def httpRequest(headers: List[HttpHeader]): HttpRequest = HttpRequest(HttpMethods.GET, "/v4/mobile/stores", headers)

    def forwardResponseCmd(response: HttpResponse) = {
      ForwardResponseCmd(
        stage,
        ResponseDetails(
          location,
          settings.LocalServiceName,
          getEndpoint,
          None,
          response)
      )
    }

    "reply with Forbidden when created with bad parameters" in {
      val actor = TestActorRef(ApiKeyAuth.props("", Set(""), true, location))
      actor ! DownstreamRequest(stage, routingDestination, httpRequest(List()))
      expectMsg(forwardResponseCmd(HttpResponse(StatusCodes.Forbidden)))
    }

    "reply with Forbidden when given no headers" in {
      val actor = TestActorRef(ApiKeyAuth.props("pid", Set("BA914464-C559-4F81-A37E-521B830F1634"), true, location))
      actor ! DownstreamRequest(stage, routingDestination, httpRequest(List()))
      expectMsg(forwardResponseCmd(HttpResponse(StatusCodes.Forbidden)))
    }

    "reply with Unauthorized when given an incorrect header" in {
      val actor = TestActorRef(ApiKeyAuth.props("pid", Set("BA914464-C559-4F81-A37E-521B830F1634"), true, location))
      actor ! DownstreamRequest(stage, routingDestination, httpRequest(List(RawHeader("pid","asdasdada"))))
      expectMsg(forwardResponseCmd(HttpResponse(StatusCodes.Unauthorized)))
    }

    "succeed with a downstream request when given the correct header and value" in {
      val request = httpRequest(List(RawHeader("pid","BA914464-C559-4F81-A37E-521B830F1634")))
      val actor = TestActorRef(ApiKeyAuth.props("pid", Set("BA914464-C559-4F81-A37E-521B830F1634"), true, location))
      actor ! DownstreamRequest(stage, routingDestination, request)
      expectMsg(ForwardRequestCmd(stage, request, None))
    }

    "succeed with a downstream request when given a correct but capitalized header" in {
      val request = httpRequest(List(RawHeader("PID","BA914464-C559-4F81-A37E-521B830F1634")))
      val actor = TestActorRef(ApiKeyAuth.props("pid", Set("BA914464-C559-4F81-A37E-521B830F1634"), true, location))
      actor ! DownstreamRequest(stage, routingDestination, request)
      expectMsg(ForwardRequestCmd(stage, request, None))
    }

    "succeed with a downstream request when given a case-insensitive value and case sensitivity is off" in {
      val request = httpRequest(List(RawHeader("pid","ba914464-c559-4f81-a37e-521b830f1634")))
      val actor = TestActorRef(ApiKeyAuth.props("pid", Set("BA914464-C559-4F81-A37E-521B830F1634"), false, location))
      actor ! DownstreamRequest(stage, routingDestination, request)
      expectMsg(ForwardRequestCmd(stage, request, None))
    }

    "reply with Unauthorized when given a case-insensitive value and case sensitivity is on" in {
      val request = httpRequest(List(RawHeader("pid","ba914464-c559-4f81-a37e-521b830f1634")))
      val actor = TestActorRef(ApiKeyAuth.props("pid", Set("BA914464-C559-4F81-A37E-521B830F1634"), true, location))
      actor ! DownstreamRequest(stage, routingDestination, request)
      expectMsg(forwardResponseCmd(HttpResponse(StatusCodes.Unauthorized)))
    }

  }
} 
Example 112
Source File: StaticDomainWatcherSpec.scala    From shield   with MIT License 5 votes vote down vote up
package shield.actors.config.domain

import akka.actor.{ActorSystem, Props}
import akka.testkit.{TestActorRef, TestKit, TestProbe}
import org.scalatest.{BeforeAndAfterAll, MustMatchers, WordSpecLike}
import shield.actors.ShieldActorMsgs
import shield.config.Settings

class StaticDomainWatcherSpec extends TestKit(ActorSystem("testSystem"))
  with WordSpecLike
  with MustMatchers
  with BeforeAndAfterAll {

  val settings = Settings(system)

  "StaticDomainWatcher" should {

    "notify shield about domains found" in {
      val parent = TestProbe()
      TestActorRef(Props(new StaticDomainWatcher()), parent.ref, "static-domain-watcher")

      val msg: ShieldActorMsgs.DomainsUpdated = parent.expectMsgClass(classOf[ShieldActorMsgs.DomainsUpdated])
      msg.domains.size must equal (settings.config.getConfigList("shield.domains").size)
    }
  }

} 
Example 113
Source File: LogCollectorSpec.scala    From shield   with MIT License 5 votes vote down vote up
package shield.actors.listeners

import akka.actor.{ActorRef, ActorSystem}
import akka.testkit.{ImplicitSender, TestKit}
import org.scalatest.WordSpecLike
import org.specs2.matcher.MustMatchers
import shield.config.{DomainSettings, Settings}
import spray.http.HttpHeaders.RawHeader
import spray.http.HttpRequest
import spray.json.JsString

class LogCollectorSpec extends TestKit(ActorSystem("testSystem"))
// Using the ImplicitSender trait will automatically set `testActor` as the sender
with ImplicitSender
with WordSpecLike
with MustMatchers {

  import akka.testkit.TestActorRef
  val settings = Settings(system)
  val domainSettings = new DomainSettings(settings.config.getConfigList("shield.domains").get(0), system)

  val actorRef = TestActorRef(new LogCollector("1",domainSettings,Seq[ActorRef](),5))
  val actor = actorRef.underlyingActor

  "LogCollector" should {
    "Extracts headers and adds them to access logs" in {
      val request = HttpRequest().withHeaders(
        RawHeader("sample", "header"),
        RawHeader("test", "test1"),
        RawHeader("test2", "123"),
        RawHeader("test-header-3", "abc"),
        RawHeader("hh", "aaa"),
        RawHeader("hhh", "bbb")
      )

      val extractedHeaders = actor.extractHeaders(request.headers, Set("test-header-3", "hh", "sample", "DNE"))

      extractedHeaders.keys.size must be equalTo 3
      extractedHeaders.get("hh").get must be equalTo JsString("aaa")
      extractedHeaders.get("test-header-3").get must be equalTo JsString("abc")
      extractedHeaders.get("sample").get must be equalTo JsString("header")
    }
  }
} 
Example 114
Source File: ThroughputMeasurementFlowTest.scala    From akka-viz   with MIT License 5 votes vote down vote up
package akkaviz.events

import akka.actor.{ActorRef, ActorSystem}
import akka.pattern
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{Keep, Sink, Source}
import akka.testkit.{TestActorRef, TestKit}
import akkaviz.events.types.{BackendEvent, ReceivedWithId, ThroughputMeasurement}
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{Matchers, WordSpecLike}

import scala.concurrent.Future

class ThroughputMeasurementFlowTest extends TestKit(ActorSystem("FlowTestSystem"))
    with WordSpecLike with Matchers with ScalaFutures {

  import scala.concurrent.duration._

  implicit val materializer = ActorMaterializer()(system)

  val firstRef = TestActorRef[SomeActor](new SomeActor, "first")
  val secondRef = TestActorRef[SomeActor](new SomeActor, "second")

  override implicit val patienceConfig = PatienceConfig(timeout = 5.seconds)

  "ThroughputMeasurementFlow" should {

    "not emit any measurements if there are no Received events" in {
      val src = Source.empty[BackendEvent]
      val sink: Sink[BackendEvent, Future[List[BackendEvent]]] = Sink.fold(List.empty[BackendEvent])((list, ev) => ev :: list)

      val materialized = ThroughputMeasurementFlow(1.second).runWith(src, sink)._2

      whenReady(materialized) { r =>
        r should be('empty)
      }
    }

    "emit proper measured value for one message" in {
      val src = Source.single(ReceivedWithId(1, ActorRef.noSender, firstRef, "sup", true))
      val mat = src.via(ThroughputMeasurementFlow(1.second))
        .toMat(Sink.head[ThroughputMeasurement])(Keep.right).run()

      whenReady(mat) { measurement =>
        measurement.actorRef should equal(firstRef)
        measurement.msgsPerSecond should equal(1.0)
      }
    }

    "emit measured value for one message and 0 for actors which didn't receive anything" in {
      import system.dispatcher
      val src = Source(List(
        ReceivedWithId(1, ActorRef.noSender, firstRef, "sup", true),
        ReceivedWithId(2, ActorRef.noSender, secondRef, "sup", true)
      )).concat(Source.fromFuture(pattern.after(2.seconds, system.scheduler) {
        Future.successful(ReceivedWithId(3, ActorRef.noSender, firstRef, "sup", true))
      }))

      val mat = src.via(ThroughputMeasurementFlow(1.second))
        .toMat(Sink.fold(List.empty[ThroughputMeasurement]) { (list, ev) => ev :: list })(Keep.right).run()

      whenReady(mat) { measurements =>
        val measurementsFor = measurements.groupBy(_.actorRef)
        measurementsFor(firstRef).map(_.msgsPerSecond) should not contain 0.0
        measurementsFor(secondRef).sortBy(_.timestamp).map(_.msgsPerSecond) should contain inOrder (1.0, 0.0)
      }
    }
  }
} 
Example 115
Source File: AkkaHttpActionAdapterTest.scala    From akka-http-pac4j   with Mozilla Public License 2.0 5 votes vote down vote up
package com.stackstate.pac4j

import akka.http.scaladsl.model.{ContentTypes, HttpEntity, HttpRequest, HttpResponse}
import org.scalatest.{Matchers, WordSpecLike}
import akka.http.scaladsl.model.StatusCodes._
import akka.util.ByteString
import com.stackstate.pac4j.AkkaHttpActionAdapterTest.ActionInt
import com.stackstate.pac4j.http.AkkaHttpActionAdapter
import com.stackstate.pac4j.store.ForgetfulSessionStorage
import org.pac4j.core.exception.http.{
  BadRequestAction,
  ForbiddenAction,
  FoundAction,
  HttpAction,
  NoContentAction,
  OkAction,
  StatusAction,
  UnauthorizedAction
}
import org.scalatest.concurrent.ScalaFutures

class AkkaHttpActionAdapterTest extends WordSpecLike with Matchers with ScalaFutures {
  "AkkaHttpActionAdapter" should {
    "convert 200 to OK" in withContext { context =>
      AkkaHttpActionAdapter.adapt(new OkAction(""), context).futureValue.response shouldEqual HttpResponse(
        OK,
        Nil,
        HttpEntity(ContentTypes.`application/octet-stream`, ByteString(""))
      )
    }
    "convert 401 to Unauthorized" in withContext { context =>
      AkkaHttpActionAdapter.adapt(UnauthorizedAction.INSTANCE, context).futureValue.response shouldEqual HttpResponse(Unauthorized)
      context.getChanges.cookies.map(_.name) shouldBe List(AkkaHttpWebContext.DEFAULT_COOKIE_NAME)
    }
    "convert 302 to SeeOther (to support login flow)" in withContext { context =>
      val r = AkkaHttpActionAdapter.adapt(new FoundAction("/login"), context).futureValue.response
      r.status shouldEqual SeeOther
      r.headers.head.value() shouldEqual "/login"
      context.getChanges.cookies.map(_.name) shouldBe List(AkkaHttpWebContext.DEFAULT_COOKIE_NAME)
    }
    "convert 400 to BadRequest" in withContext { context =>
      AkkaHttpActionAdapter.adapt(BadRequestAction.INSTANCE, context).futureValue.response shouldEqual HttpResponse(BadRequest)
    }
    "convert 201 to Created" in withContext { context =>
      AkkaHttpActionAdapter.adapt(201.action(), context).futureValue.response shouldEqual HttpResponse(Created)
    }
    "convert 403 to Forbidden" in withContext { context =>
      AkkaHttpActionAdapter.adapt(ForbiddenAction.INSTANCE, context).futureValue.response shouldEqual HttpResponse(Forbidden)
    }
    "convert 204 to NoContent" in withContext { context =>
      AkkaHttpActionAdapter.adapt(NoContentAction.INSTANCE, context).futureValue.response shouldEqual HttpResponse(NoContent)
    }
    "convert 200 to OK with content set from the context" in withContext { context =>
      AkkaHttpActionAdapter.adapt(new OkAction("content"), context).futureValue.response shouldEqual HttpResponse
        .apply(OK, Nil, HttpEntity(ContentTypes.`application/octet-stream`, ByteString("content")))
    }
    "convert 200 to OK with content type set from the context" in withContext { context =>
      context.setResponseContentType("application/json")
      AkkaHttpActionAdapter.adapt(new OkAction(""), context).futureValue.response shouldEqual HttpResponse
        .apply(OK, Nil, HttpEntity(ContentTypes.`application/json`, ByteString("")))
    }
  }

  def withContext(f: AkkaHttpWebContext => Unit): Unit = {
    f(AkkaHttpWebContext(HttpRequest(), Seq.empty, new ForgetfulSessionStorage, AkkaHttpWebContext.DEFAULT_COOKIE_NAME))
  }
}

object AkkaHttpActionAdapterTest {
  implicit class ActionInt(val i: Int) extends AnyVal {
    def action(): HttpAction = new StatusAction(i)
  }
} 
Example 116
Source File: AkkaHttpSessionStoreTest.scala    From akka-http-pac4j   with Mozilla Public License 2.0 5 votes vote down vote up
package com.stackstate.pac4j.http

import java.util.Optional

import akka.http.scaladsl.model.HttpRequest
import akka.http.scaladsl.testkit.ScalatestRouteTest
import com.stackstate.pac4j.AkkaHttpWebContext
import com.stackstate.pac4j.store.{ForgetfulSessionStorage, InMemorySessionStorage}
import org.scalatest.{Matchers, WordSpecLike}

import scala.concurrent.duration._

class AkkaHttpSessionStoreTest extends WordSpecLike with Matchers with ScalatestRouteTest {
  "AkkaHttpSessionStore.get" should {
    "return null when the data is not available" in {
      new AkkaHttpSessionStore().get(
        new AkkaHttpWebContext(HttpRequest(), Seq.empty, new ForgetfulSessionStorage, AkkaHttpWebContext.DEFAULT_COOKIE_NAME),
        "mykey"
      ) shouldBe Optional.empty()
    }

    "return the data when available" in {
      val context = new AkkaHttpWebContext(HttpRequest(), Seq.empty, new InMemorySessionStorage(30.minutes), AkkaHttpWebContext.DEFAULT_COOKIE_NAME)
      new AkkaHttpSessionStore().set(context, "mykey", "yooo")
      new AkkaHttpSessionStore().get(context, "mykey") shouldBe Optional.of("yooo")
    }
  }
} 
Example 117
Source File: MinNodeTest.scala    From ingraph   with Eclipse Public License 1.0 5 votes vote down vote up
package ingraph.ire.nodes.unary

import akka.actor.{ActorSystem, Props, actorRef2Scala}
import akka.testkit.{ImplicitSender, TestActors, TestKit}
import ingraph.ire.messages.ChangeSet
import ingraph.ire.nodes.unary.aggregation.{AggregationNode, StatefulMin}
import ingraph.ire.util.TestUtil._
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

class MinNodeTest(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
  with WordSpecLike with Matchers with BeforeAndAfterAll {

  def this() = this(ActorSystem("MySpec"))

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  "Min" must {
    "do simple min 0" in {
      val changeSet = ChangeSet(
        positive = tupleBag(tuple("a", 1), tuple("a", 2), tuple("a", 1.1), tuple("b", 3))
      )
      val echoActor = system.actorOf(TestActors.echoActorProps)
      val min = system.actorOf(Props(
        new AggregationNode(echoActor ! _, functionMask(0), () => Vector(new StatefulMin(1)), Vector(0, 1))))

      min ! changeSet
      expectMsg(ChangeSet(
        positive = tupleBag(tuple("a", 1), tuple("b", 3))
      ))

      min ! ChangeSet(
        negative = tupleBag(tuple("a", 1))
      )
      expectMsg(ChangeSet(
        positive = tupleBag(tuple("a", 1.1)),
        negative = tupleBag(tuple("a", 1))
      ))
    }
  }
} 
Example 118
Source File: MaxNodeTest.scala    From ingraph   with Eclipse Public License 1.0 5 votes vote down vote up
package ingraph.ire.nodes.unary

import akka.actor.{ActorSystem, Props, actorRef2Scala}
import akka.testkit.{ImplicitSender, TestActors, TestKit}
import ingraph.ire.messages.ChangeSet
import ingraph.ire.nodes.unary.aggregation.{AggregationNode, StatefulMax}
import ingraph.ire.util.TestUtil._
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

class MaxNodeTest(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
  with WordSpecLike with Matchers with BeforeAndAfterAll {

  def this() = this(ActorSystem("MySpec"))

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  "Max" must {
    "do simple max 0" in {
      val changeSet = ChangeSet(
        positive = tupleBag(tuple("a", 1), tuple("a", 2), tuple("a", 1.1), tuple("b", 3))
      )
      val echoActor = system.actorOf(TestActors.echoActorProps)
      val max = system.actorOf(Props(new AggregationNode(
        echoActor ! _, functionMask(0), () => Vector(new StatefulMax(1)), Vector(0, 1))))

      max ! changeSet
      expectMsg(ChangeSet(
        positive = tupleBag(tuple("a", 2), tuple("b", 3))
      ))

      max ! ChangeSet(
        negative = tupleBag(tuple("a", 2))
      )
      expectMsg(ChangeSet(
        positive = tupleBag(tuple("a", 1.1)),
        negative = tupleBag(tuple("a", 2))
      ))
    }
  }
} 
Example 119
Source File: ConfigurationBuilderSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.lib

import org.scalatest.{Matchers, WordSpecLike}

class ConfigurationBuilderSpec extends WordSpecLike with Matchers {
	"ConfigurationBuilder" should {
		"read application.conf" in {
			val builder = new ConfigurationBuilder("test.builder")
			val properties = builder.properties
			properties.getProperty("someProperty") shouldBe "someValue"
			properties.getProperty("anInteger") shouldBe "3"
		}

		"allow to add properties" in {
			val builder = new ConfigurationBuilder("test.builder")
			val properties = builder.properties
			properties.setProperty("vis", "blub")
			properties.getProperty("someProperty") shouldBe "someValue"
			properties.getProperty("vis") shouldBe "blub"
			properties.size shouldBe 3
		}

		"allow to replace properties" in {
			val builder = new ConfigurationBuilder("test.builder")
			val properties = builder.properties
			properties.setProperty("someProperty", "someOtherValue")
			properties.getProperty("someProperty") shouldBe "someOtherValue"
		}

		"properties (Java) return null for missing keys" in {
			val builder = new ConfigurationBuilder("test.builder")
			val properties = builder.properties
			properties.getProperty("aNonExistingProperty") shouldBe null
		}
	}
} 
Example 120
Source File: MinMaxActorSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.transform

import akka.actor.{Actor, ActorSystem, Props}
import akka.testkit.{TestProbe, ImplicitSender, TestActorRef, TestKit}
import akka.util.Timeout
import io.coral.actors.CoralActorFactory
import io.coral.api.DefaultModule
import org.json4s.JsonDSL._
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
import scala.concurrent.duration._

@RunWith(classOf[JUnitRunner])
class MinMaxActorSpec(_system: ActorSystem)
	extends TestKit(_system)
	with ImplicitSender
	with WordSpecLike
	with Matchers
	with BeforeAndAfterAll {
	implicit val timeout = Timeout(100.millis)
	implicit val formats = org.json4s.DefaultFormats
	implicit val injector = new DefaultModule(system.settings.config)
	def this() = this(ActorSystem("ZscoreActorSpec"))

	override def afterAll() {
		TestKit.shutdownActorSystem(system)
	}

	"A MinMaxActor" must {
		val createJson = parse(
			"""{ "type": "minmax", "params": { "field": "field1", "min": 10.0, "max": 13.5 }}"""
				.stripMargin).asInstanceOf[JObject]

		implicit val injector = new DefaultModule(system.settings.config)

		val props = CoralActorFactory.getProps(createJson).get
		val threshold = TestActorRef[MinMaxActor](props)

		// subscribe the testprobe for emitting
		val probe = TestProbe()
		threshold.underlyingActor.emitTargets += probe.ref

		"Emit the minimum when lower than the min" in {
			val json = parse( """{"field1": 7 }""").asInstanceOf[JObject]
			threshold ! json
			probe.expectMsg(parse( """{ "field1": 10.0 }"""))
		}

		"Emit the maximum when higher than the max" in {
			val json = parse( """{"field1": 15.3 }""").asInstanceOf[JObject]
			threshold ! json
			probe.expectMsg(parse( """{"field1": 13.5 }"""))
		}

		"Emit the value itself when between the min and the max" in {
			val json = parse( """{"field1": 11.7 }""").asInstanceOf[JObject]
			threshold ! json
			probe.expectMsg(parse( """{"field1": 11.7 }"""))
		}

		"Emit object unchanged when key is not present in triggering json" in {
			val json = parse( """{"otherfield": 15.3 }""").asInstanceOf[JObject]
			threshold ! json
			probe.expectMsg(parse( """{"otherfield": 15.3 }"""))
		}
	}
} 
Example 121
Source File: HttpBroadcastActorSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.transform

import akka.actor.ActorSystem
import akka.testkit._
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

class HttpBroadcastActorSpec(_system: ActorSystem)
	extends TestKit(_system)
	with ImplicitSender
	with WordSpecLike
	with Matchers
	with BeforeAndAfterAll
	with ScalaFutures {
	def this() = this(ActorSystem("HttpBroadcastActorSpec"))

	override def afterAll() {
		TestKit.shutdownActorSystem(system)
	}

	"A HttpBroadcastActor" should {
		"Instantiate with any json" in {
			val createJson = parse( """{ "type": "httpbroadcast" }""")
			val props = HttpBroadcastActor(createJson)
			assert(props.isDefined)
		}

		"Emit the trigger contents" in {
			val props = HttpBroadcastActor(parse( """{ "type": "httpbroadcast" }"""))
			val actor = TestActorRef[HttpBroadcastActor](props.get).underlyingActor
			val json = parse( """{"emit":"whatever"}""")
			val result = actor.simpleEmitTrigger(json.asInstanceOf[JObject])
			result should be(Some(json))
		}
	}
} 
Example 122
Source File: LinearRegressionActorSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.transform

import akka.actor.{ActorRef, ActorSystem}
import akka.testkit.{TestProbe, TestActorRef, ImplicitSender, TestKit}
import io.coral.actors.CoralActorFactory
import io.coral.api.DefaultModule
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
import akka.util.Timeout
import org.json4s.native.Serialization.write
import scala.concurrent.duration._

class LinearRegressionActorSpec(_system: ActorSystem)
	extends TestKit(_system)
	with ImplicitSender
	with WordSpecLike
	with Matchers
	with BeforeAndAfterAll {
	def this() = this(ActorSystem("LinearRegressionActorSpec"))

	implicit val timeout = Timeout(100.millis)
	implicit val injector = new DefaultModule(system.settings.config)

	override def afterAll() {
		TestKit.shutdownActorSystem(system)
	}

	def createLinearRegressionActor(intercept: Double, weights: Map[String, Double]) = {
		implicit val formats = DefaultFormats
		val str =
			s"""{ "type":"linearregression",
			   |"params": { "intercept": $intercept,
			   |"weights": ${write(weights)}
			   |}}""".stripMargin

		val createJson = parse(str).asInstanceOf[JObject]
		val props = CoralActorFactory.getProps(createJson).get
		val actorTestRef = TestActorRef[LinearRegressionActor](props)

		val probe = TestProbe()
		actorTestRef.underlyingActor.emitTargets += probe.ref
		(actorTestRef, probe)
	}

	"LinearRegressionActor" should {
		"Instantiate from companion object" in {
			val (actor, _) = createLinearRegressionActor(0, Map("salary" -> 2000))
			actor.underlyingActor.intercept should be(0)
			actor.underlyingActor.weights should be(Map("salary" -> 2000))
		}

		"process trigger data when all the features are available even with different order" in {
			val (actor, probe) = createLinearRegressionActor(0, Map("age" -> 0.2, "salary" -> 0.1))
			val message = parse( s"""{"salary": 4000, "age": 40}""").asInstanceOf[JObject]
			actor ! message

			probe.expectMsg(parse( s"""{"score": 408.0, "salary": 4000, "age": 40}"""))
		}

		"emit when score is calculated" in {
			val (actor, probe) = createLinearRegressionActor(0, Map("salary" -> 10))
			val message = parse( s"""{"salary": 2000}""").asInstanceOf[JObject]
			actor ! message

			probe.expectMsg(parse( s"""{"score": 20000.0, "salary": 2000}"""))
		}

		"not emit when keys are missing" in {
			val (actor, probe) = createLinearRegressionActor(0, Map("age" -> 0.2, "salary" -> 10))
			val message = parse( s"""{"salary": 2000}""").asInstanceOf[JObject]
			actor ! message

			probe.expectNoMsg
		}
	}
} 
Example 123
Source File: JsonActorSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.transform

import akka.actor.ActorSystem
import akka.testkit.{ImplicitSender, TestActorRef, TestKit}
import akka.util.Timeout
import org.json4s.JsonAST.JValue
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

import scala.concurrent.duration._

class JsonActorSpec(_system: ActorSystem)
	extends TestKit(_system)
	with ImplicitSender
	with WordSpecLike
	with Matchers
	with BeforeAndAfterAll {
	def this() = this(ActorSystem("JsonActorSpec"))

	override def afterAll() {
		TestKit.shutdownActorSystem(system)
	}

	implicit val timeout = Timeout(100.millis)
	def createJsonActor(json: JValue): JsonActor = {
		val props = JsonActor(json).get
		val actorRef = TestActorRef[JsonActor](props)
		actorRef.underlyingActor
	}

	"JsonActor" should {
		"have a standard coral props supplier" in {
			val json = parse("""{ "type": "json", "params": { "template": {} } }""")
			val props = JsonActor(json).get
			props.actorClass shouldBe classOf[JsonActor]
		}

		"read the template parameter" in {
			val template = """{ "a": "someReference" }"""
			val json = parse(s"""{ "type": "json", "params": { "template": $template } }""")
			val actor = createJsonActor(json)
			actor.template.template shouldBe parse(template)
		}

		"emit the json based on template" in {
			val templateJson =
				"""{ "a": "ALPHA",
				  |  "b": "${beta}",
				  |  "c": { "d": 123,
				  |         "e": "${epsilon}"
				  |       },
				  |  "f": 1,
				  |  "g": 1.0
				  |}""".stripMargin
			val json = parse(s"""{ "type": "json", "params": { "template": ${templateJson} } }""")
			val actor = createJsonActor(json)
			val triggerJson = parse(
				"""{ "beta": "xyz",
				  |  "epsilon": 987
				  |}""".stripMargin)
			val expectedJson = parse(
				"""{ "a": "ALPHA",
				  |  "c": { "d": 123,
				  |         "e": 987
				  |       },
				  |  "f": 1,
				  |  "b": "xyz",
				  |  "g": 1.0
				  |}""".stripMargin)
			actor.simpleEmitTrigger(triggerJson.asInstanceOf[JObject]) shouldBe Some(expectedJson)
		}
	}
} 
Example 124
Source File: SampleActorSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.transform

import akka.actor.{ActorSystem, Props}
import akka.testkit.{ImplicitSender, TestActorRef, TestKit, TestProbe}
import akka.util.Timeout
import io.coral.lib.{NotSoRandom, Random}
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

import scala.concurrent.duration._
import scala.language.postfixOps

class SampleActorSpec(_system: ActorSystem)
	extends TestKit(_system)
	with ImplicitSender
	with WordSpecLike
	with Matchers
	with BeforeAndAfterAll
	with ScalaFutures {
	def this() = this(ActorSystem("SampleActorSpec"))

	override def afterAll() {
		TestKit.shutdownActorSystem(system)
	}

	def arbitrarySampleActor(): SampleActor = {
		val json = parse(
			"""{ "type": "sample",
			  | "params": { "fraction": 0.707 } }
			""".stripMargin)
		val props = SampleActor(json).get
		TestActorRef[SampleActor](props).underlyingActor
	}

	def notSoRandomSampleActor(fraction: Double, randoms: Double*): SampleActor = {
		val json = parse(
			s"""{ "type": "sample", "params": { "fraction": ${fraction} } }
     		 """.stripMargin)
		val source = NotSoRandom(randoms: _*)
		val props = Props(classOf[SampleActor], json, Random(source))
		TestActorRef[SampleActor](props).underlyingActor
	}

	implicit val timeout = Timeout(100 millis)

	"A SampleActor" should {

		"Be instantiated with sample fraction" in {
			val json = parse("""{ "type": "sample", "params": { "fraction": 0.5 }}""".stripMargin)
			val props = SampleActor(json).get
			props.actorClass() should be(classOf[SampleActor])
			val actor = TestActorRef[SampleActor](props).underlyingActor
			actor.fraction should be(0.5)
		}

		"Not be instantiated without fraction or percentage" in {
			val json = parse("""{ "type": "sample", "params": { "bla": "blabla" }}""".stripMargin)
			SampleActor(json) should be(None)
		}

		"Be constructible with a io.coral.lib.Random for random boolean stream" in {
			val actor = notSoRandomSampleActor(fraction = 0.5, randoms = 0.1, 0.49, 0.50, 0.51, 0.8, 0.4)
			actor.next() should be(true)
			actor.next() should be(true)
			actor.next() should be(false)
			actor.next() should be(false)
			actor.next() should be(false)
			actor.next() should be(true)
		}

		"Should trigger true or false according to random binomial sequence" in {
			val actor = notSoRandomSampleActor(fraction = 0.7, randoms = 0.8, 0.6)
			val json = parse( """{ "something": "whatever" }""").asInstanceOf[JObject]

			val result1 = actor.simpleEmitTrigger(json)
			result1 should be(Some(JNothing))

			val result2 = actor.simpleEmitTrigger(json)
			result2 should be(Some(json))
		}

		"Should have trigger and emit cooperate" in {
			val actor = notSoRandomSampleActor(fraction = 0.7, randoms = 0.6, 0.8)
			val ref = actor.self
			val json = parse( """{ "something": "whatever" }""").asInstanceOf[JObject]
			val probe = TestProbe()
			actor.emitTargets += probe.ref
			ref ! json
			probe.expectMsg(json)
			ref ! json
			probe.expectNoMsg(100 millis)
		}
	}
} 
Example 125
Source File: GroupByActorSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.transform

import java.util.UUID

import akka.actor.{ActorSystem, Props}
import akka.testkit.{ImplicitSender, TestActorRef, TestKit}
import akka.util.Timeout
import io.coral.actors.RuntimeActor
import io.coral.api.DefaultModule
import org.json4s.JsonDSL._
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

import scala.concurrent.duration._
import scala.languageFeature.postfixOps

class GroupByActorSpec(_system: ActorSystem)
	extends TestKit(_system)
	with ImplicitSender
	with WordSpecLike
	with Matchers
	with BeforeAndAfterAll
	with ScalaFutures {
	def this() = this(ActorSystem("GroupByActorSpec"))
	implicit val ec = scala.concurrent.ExecutionContext.Implicits.global
	implicit val injector = new DefaultModule(system.settings.config)
	val name = "runtime1"
	val userUUID1 = UUID.randomUUID()
	implicit val runtime = system.actorOf(Props(new RuntimeActor(name, userUUID1)), "coral")
	implicit val timeout = Timeout(100.millis)
	implicit val formats = org.json4s.DefaultFormats

	override def afterAll() {
		TestKit.shutdownActorSystem(system)
	}

	// here is a dependency on the stats actor
	// in the current situation (the CoralActorFactory) it seems unavoidable
	// to depend in some tests on an existing actor instead of injecting a test actor
	def statsGroupBy: GroupByActor = {
		val createJson = parse(
			"""{ "type": "stats",
			  |  "params": { "field": "amount" },
			  |  "group": { "by": "tag" }
			  | }""".stripMargin
		).asInstanceOf[JObject]
		TestActorRef[GroupByActor](GroupByActor(createJson).get).underlyingActor
	}

	"A GroupByActor" should {
		
	}
} 
Example 126
Source File: StatsActorSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.transform

import akka.actor.ActorSystem
import akka.testkit.{ImplicitSender, TestActorRef, TestKit}
import akka.util.Timeout
import io.coral.actors.CoralActorFactory
import io.coral.api.DefaultModule
import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
import scala.concurrent.Await
import scala.concurrent.duration._

class StatsActorSpec(_system: ActorSystem)
	extends TestKit(_system)
	with ImplicitSender
	with WordSpecLike
	with Matchers
	with BeforeAndAfterAll {
	def this() = this(ActorSystem("StatsActorSpec"))

	override def afterAll() {
		TestKit.shutdownActorSystem(system)
	}

	implicit val timeout = Timeout(100.millis)
	implicit val injector = new DefaultModule(system.settings.config)

	def createStatsActor: StatsActor = {
		val createJson = parse( """{ "type": "stats", "params": { "field": "val" } }""")
			.asInstanceOf[JObject]
		val props = CoralActorFactory.getProps(createJson).get
		val actorRef = TestActorRef[StatsActor](props)
		actorRef.underlyingActor
	}

	val expectedInitialState = Map(
		("count", render(0L)),
		("avg", render(JNull)),
		("sd", render(JNull)),
		("min", render(JNull)),
		("max", render(JNull))
	)

	"StatsActor" should {
		"have a field corresponding to the json definition" in {
			val actor = createStatsActor
			actor.field should be("val")
		}

		"supply it's state" in {
			val actor = createStatsActor
			actor.state should be(expectedInitialState)
		}

		"accept a value as trigger" in {
			val actor = createStatsActor
			val triggerJson = parse( """{ "bla": 1.0, "val": 2.7 }""").asInstanceOf[JObject]
			actor.trigger(triggerJson)
			actor.state should be(
				Map(
					("count", render(1L)),
					("avg", render(2.7)),
					("sd", render(0.0)),
					("min", render(2.7)),
					("max", render(2.7))
				))
		}

		"have timer reset statistics" in {
			val actor = createStatsActor
			val triggerJson = parse( """{ "val": 2.7 }""").asInstanceOf[JObject]
			actor.trigger(triggerJson)
			actor.state should be(
				Map(
					("count", render(1L)),
					("avg", render(2.7)),
					("sd", render(0.0)),
					("min", render(2.7)),
					("max", render(2.7))
				))
			val future = actor.timer
			val json = Await.result(future, timeout.duration).get
			json should be(JNothing)
			actor.state should be(expectedInitialState)
		}
	}
} 
Example 127
Source File: ThresholdActorSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.transform

import io.coral.actors.CoralActorFactory
import io.coral.api.DefaultModule
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import scala.concurrent.duration._
import akka.actor.ActorSystem
import akka.testkit._
import akka.util.Timeout
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

@RunWith(classOf[JUnitRunner])
class ThresholdActorSpec(_system: ActorSystem) extends TestKit(_system)
	with ImplicitSender
	with WordSpecLike
	with Matchers
	with BeforeAndAfterAll {
	implicit val timeout = Timeout(100.millis)
	def this() = this(ActorSystem("ThresholdActorSpec"))

	override def afterAll() {
		TestKit.shutdownActorSystem(system)
	}

	"A ThresholdActor" must {
		val createJson = parse(
			"""{ "type": "threshold", "params": { "key": "key1", "threshold": 10.5 }}"""
				.stripMargin).asInstanceOf[JObject]

		implicit val injector = new DefaultModule(system.settings.config)

		// test invalid definition json as well !!!
		val props = CoralActorFactory.getProps(createJson).get
		val threshold = TestActorRef[ThresholdActor](props)

		// subscribe the testprobe for emitting
		val probe = TestProbe()
		threshold.underlyingActor.emitTargets += probe.ref

		"Emit when equal to the threshold" in {
			val json = parse( """{"key1": 10.5}""").asInstanceOf[JObject]
			threshold ! json
			probe.expectMsg(parse( """{ "key1": 10.5 }"""))
		}

		"Emit when higher than the threshold" in {
			val json = parse( """{"key1": 10.7}""").asInstanceOf[JObject]
			threshold ! json
			probe.expectMsg(parse( """{"key1": 10.7 }"""))
		}

		"Not emit when lower than the threshold" in {
			val json = parse( """{"key1": 10.4 }""").asInstanceOf[JObject]
			threshold ! json
			probe.expectNoMsg()
		}

		"Not emit when key is not present in triggering json" in {
			val json = parse( """{"key2": 10.7 }""").asInstanceOf[JObject]
			threshold ! json
			probe.expectNoMsg()
		}
	}
} 
Example 128
Source File: DefaultModuleSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors

import akka.actor.{ActorSystem, Props}
import akka.testkit.TestKit
import com.typesafe.config.ConfigFactory
import io.coral.api.DefaultModule
import org.json4s.JsonAST.JValue
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
import scaldi.Injectable._

class DefaultModuleSpec(_system: ActorSystem) extends TestKit(_system)
	with WordSpecLike
	with Matchers
	with BeforeAndAfterAll {
	def this() = this(ActorSystem("testSystem"))

	override def afterAll() {
		TestKit.shutdownActorSystem(system)
	}

	"The DefaultModule" should {
		"have the DefaultActorPropFactory when no configuration is made" in {
			implicit val module = new DefaultModule(ConfigFactory.empty)
			val actorPropFactories = inject[List[ActorPropFactory]]

			assert(actorPropFactories.size == 1)
			assert(actorPropFactories(0).getClass == classOf[DefaultActorPropFactory])
		}

		"have the DefaultActorPropFactory when a configuration is made" in {
			val config = """injections.actorPropFactories = ["io.coral.actors.AdditionalActorPropFactoryOne"]"""
			implicit val module = new DefaultModule(ConfigFactory.parseString(config))

			val actorPropFactories = inject[List[ActorPropFactory]]

			assert(actorPropFactories.size == 2)
			assert(actorPropFactories(0).getClass == classOf[DefaultActorPropFactory])
			assert(actorPropFactories(1).getClass == classOf[AdditionalActorPropFactoryOne])
		}

		"should have the ActorPropFactories in the defined order" in {
			val config =
				"""injections.actorPropFactories = ["io.coral.actors.AdditionalActorPropFactoryOne",
				  |"io.coral.actors.AdditionalActorPropFactoryTwo"]""".stripMargin
			implicit val module = new DefaultModule(ConfigFactory.parseString(config))

			val actorPropFactories = inject[List[ActorPropFactory]]

			assert(actorPropFactories.size == 3)
			assert(actorPropFactories(0).getClass == classOf[DefaultActorPropFactory])
			assert(actorPropFactories(1).getClass == classOf[AdditionalActorPropFactoryOne])
			assert(actorPropFactories(2).getClass == classOf[AdditionalActorPropFactoryTwo])
		}
	}
}

class AdditionalActorPropFactoryOne extends ActorPropFactory {
	override def getProps(actorType: String, params: JValue): Option[Props] = None
}

class AdditionalActorPropFactoryTwo extends ActorPropFactory {
	override def getProps(actorType: String, params: JValue): Option[Props] = None
} 
Example 129
Source File: BootConfigSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.api

import org.junit.runner.RunWith
import org.scalatest.{BeforeAndAfterEach, BeforeAndAfterAll, WordSpecLike}
import org.scalatest.junit.JUnitRunner


@RunWith(classOf[JUnitRunner])
class BootConfigSpec
	extends WordSpecLike
	with BeforeAndAfterAll
	with BeforeAndAfterEach {
	"A Boot program actor" should {
		"Properly process given command line arguments for api and akka ports" in {
			val commandLine = CommandLineConfig(apiPort = Some(1234), akkaPort = Some(5345))
			val actual: CoralConfig = io.coral.api.Boot.getFinalConfig(commandLine)
			assert(actual.akka.remote.nettyTcpPort == 5345)
			assert(actual.coral.api.port == 1234)
		}

		"Properly process a given configuration file through the command line" in {
			val configPath = getClass().getResource("bootconfigspec.conf").getFile()
			val commandLine = CommandLineConfig(config = Some(configPath), apiPort = Some(4321))
			val actual: CoralConfig = io.coral.api.Boot.getFinalConfig(commandLine)
			// Overriden in bootconfigspec.conf
			assert(actual.akka.remote.nettyTcpPort == 6347)
			// Overridden in command line parameter
			assert(actual.coral.api.port == 4321)
			// Not overriden in command line or bootconfigspec.conf
			assert(actual.coral.cassandra.port == 9042)
		}
	}
} 
Example 130
Source File: RuntimeStatisticsSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.api

import io.coral.TestHelper
import org.junit.runner.RunWith
import org.scalatest.WordSpecLike
import org.scalatest.junit.JUnitRunner
import org.json4s._
import org.json4s.jackson.JsonMethods._

@RunWith(classOf[JUnitRunner])
class RuntimeStatisticsSpec
	extends WordSpecLike {
	"A RuntimeStatistics class" should {
		"Properly sum multiple statistics objects together" in {
			val counters1 = Map(
				(("actor1", "stat1") -> 100L),
				(("actor1", "stat2") -> 20L),
				(("actor1", "stat3") -> 15L))
			val counters2 = Map(
				(("actor2", "stat1") -> 20L),
				(("actor2", "stat2") -> 30L),
				(("actor2", "stat3") -> 40L))
			val counters3 = Map(
				(("actor2", "stat1") -> 20L),
				(("actor2", "stat2") -> 30L),
				(("actor2", "stat3") -> 40L),
				(("actor2", "stat4") -> 12L))
			val stats1 = RuntimeStatistics(1, 2, 3, counters1)
			val stats2 = RuntimeStatistics(2, 3, 4, counters2)
			val stats3 = RuntimeStatistics(4, 5, 6, counters3)

			val actual = RuntimeStatistics.merge(List(stats1, stats2, stats3))

			val expected = RuntimeStatistics(7, 10, 13,
				Map(("actor1", "stat1") -> 100,
					("actor1", "stat2") -> 20,
					("actor1", "stat3") -> 15,
					("actor2", "stat1") -> 20,
					("actor2", "stat2") -> 30,
					("actor2", "stat3") -> 40,
					("actor2", "stat4") -> 12))

			assert(actual == expected)
		}

		"Create a JSON object from a RuntimeStatistics object" in {
			val input = RuntimeStatistics(1, 2, 3,
				Map((("actor1", "stat1") -> 10L),
					(("actor1", "stat2") -> 20L)))

			val expected = parse(
				s"""{
				   |  "totalActors": 1,
				   |  "totalMessages": 2,
				   |  "totalExceptions": 3,
				   |  "counters": {
				   |    "total": {
				   |      "stat1": 10,
				   |      "stat2": 20
				   |    }, "actor1": {
				   |      "stat1": 10,
				   |      "stat2": 20
				   |    }
				   |  }
				   |}
				 """.stripMargin).asInstanceOf[JObject]

			val actual = RuntimeStatistics.toJson(input)
			assert(actual == expected)
		}

		"Create a RuntimeStatistics object from a JSON object" in {
			val input = parse(
				s"""{
				   |  "totalActors": 1,
				   |  "totalMessages": 2,
				   |  "totalExceptions": 3,
				   |  "counters": {
				   |    "total": {
				   |      "stat1": 10,
				   |      "stat2": 20
				   |    }, "actor1": {
				   |      "stat1": 10,
				   |      "stat2": 20
				   |    }
				   |  }
				   |}
				 """.stripMargin).asInstanceOf[JObject]

			val actual = RuntimeStatistics.fromJson(input)

			val expected = RuntimeStatistics(1, 2, 3,
				Map((("actor1", "stat1") -> 10L),
					(("actor1", "stat2") -> 20L)))

			assert(actual == expected)
		}
	}
} 
Example 131
Source File: RandomSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.lib

import org.scalatest.{Matchers, WordSpecLike}

class RandomSpec extends WordSpecLike with Matchers {
	"The Random object" should {
		"provide a stream of uniformly distributed doubles" in {
			val source = NotSoRandom(0.3, 0.8)
			val random = new Random(source)
			val stream = random.uniform()
			stream.head should be(0.3) // as example of simple use case
			stream.take(2).toList should be(List(0.3, 0.8))
		}

		"provide a stream of uniformly distributed doubles with scale" in {
			val source = NotSoRandom(0.3, 0.8)
			val random = new Random(source)
			val stream = random.uniform(2.0, 6.0)
			stream.take(2).toList should be(List(3.2, 5.2))
		}

		"provide a stream of weighted true/false values (binomial values)" in {
			val source = NotSoRandom(0.3, 0.8, 0.299999, 0.0, 1.0)
			val random = new Random(source)
			val stream = random.binomial(0.3)
			stream.take(5).toList should be(List(false, false, true, true, false))
		}
	}
} 
Example 132
Source File: CheckerSpec.scala    From cave   with MIT License 5 votes vote down vote up
package worker

import akka.actor.{ActorSystem, Props}
import akka.testkit.TestKit
import com.cave.metrics.data.evaluator.DataFetcher
import com.cave.metrics.data.influxdb.InfluxClientFactory
import com.cave.metrics.data.{AlertJsonData, Check}
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{BeforeAndAfterAll, WordSpecLike}

import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Try, Success}

class CheckerSpec extends TestKit(ActorSystem()) with WordSpecLike with BeforeAndAfterAll with AlertJsonData with MockitoSugar {

  override def afterAll() = {
    system.shutdown()
  }

  final val SomeReason = "BOOM!"
  val mockClientFactory = mock[InfluxClientFactory]

  def fakeChecker(check: Check): Props = Props(new Checker(check) {
    override def fetcher = new DataFetcher(mockClientFactory)

    override def run(check: Check)(implicit ec: ExecutionContext): Future[Try[Boolean]] = {
      if (check.schedule.alert.description == AlertDescription) Future.successful(Success(true))
      else if (check.schedule.alert.description == AlertFiveDescription) Future.successful(Success(false))
      else Future.failed(new RuntimeException(SomeReason))
    }
  })


  "A checker" must {
    "send Done(true) if an alarm condition has been detected" in {
      val checker = system.actorOf(Props(new StepParent(fakeChecker(InsufficientOrders), testActor)), "alarm")

      expectMsg(Checker.Done(alarm = Success(true)))
      watch(checker)
      expectTerminated(checker)
    }

    "send Done(false) if no alarm condition has been detected" in {
      val checker = system.actorOf(Props(new StepParent(fakeChecker(InsufficientOrdersFive), testActor)), "notAlarm")

      expectMsg(Checker.Done(alarm = Success(false)))
      watch(checker)
      expectTerminated(checker)
    }

    "properly finish in case of error" in {
      val checker = system.actorOf(Props(new StepParent(fakeChecker(OrdersLessThanPredicted), testActor)), "error")

      expectMsg(Checker.Aborted(SomeReason))
      watch(checker)
      expectTerminated(checker)
    }
  }
} 
Example 133
Source File: JsonTemplateSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.lib

import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.scalatest.{Matchers, WordSpecLike}

class JsonTemplateSpec extends WordSpecLike with Matchers {
	"A JsonTemplate class" should {
		"instantiate from a template object" in {
			val templateJson = parse(
				"""{ "field1": "abc",
				  |  "field2": 123
				  |}""".stripMargin)
			val template = JsonTemplate(templateJson.asInstanceOf[JObject])
			template.interpret(parse("{}").asInstanceOf[JObject]) shouldBe templateJson
		}

		"substitute references (identified with a ${...}} construct" in {
			val templateJson = parse(
				"""{ "field1": "${abc}",
				  |  "field2": 123
				  |}""".stripMargin)
			val template = JsonTemplate(templateJson.asInstanceOf[JObject])
			val inputJson = parse(
				"""{ "def": 456,
				  |  "abc": 789
				  |}""".stripMargin)
			val outputJson = parse(
				"""{ "field1": 789,
				  |  "field2": 123
				  |}""".stripMargin)
			template.interpret(inputJson.asInstanceOf[JObject]) shouldBe outputJson
		}

		"handle nested structure" in {
			val templateJson = parse(
				"""{ "a": "ALPHA",
				  |  "b": "${beta}",
				  |  "c": { "d": 123,
				  |         "e": { "ee": "${epsilon}" }
				  |       },
				  |  "f": 1,
				  |  "g": 1.0
				  |}""".stripMargin)
			val template = JsonTemplate(templateJson.asInstanceOf[JObject])
			val inputJson = parse(
				"""{ "beta": "xyz",
				  |  "epsilon": 987
				  |}""".stripMargin)
			val outputJson = parse(
				"""{ "a": "ALPHA",
				  |  "c": { "d": 123,
				  |         "e": { "ee": 987 }
				  |       },
				  |  "f": 1,
				  |  "b": "xyz",
				  |  "g": 1.0
				  |}""".stripMargin)
			template.interpret(inputJson.asInstanceOf[JObject]) shouldBe outputJson
		}

		"handle expressions cf jsonExpressionParser" in {
			val templateJson = parse(
				"""{ "a": "${array[1]}",
				  |  "b": "${ field.sub.subsub }",
				  |  "c": 1.0
				  |}""".stripMargin)
			val template = JsonTemplate(templateJson.asInstanceOf[JObject])
			val inputJson = parse(
				"""{ "array": ["a0", "a1", "a2"],
				  |  "field": { "sub": { "subsub": 123, "bla": "bla" } },
				  |  "epsilon": 987
				  |}""".stripMargin)
			val outputJson = parse(
				"""{ "a": "a1",
				  |  "b": 123,
				  |  "c": 1.0
				  |}""".stripMargin)
			template.interpret(inputJson.asInstanceOf[JObject]) shouldBe outputJson
		}

		"use null when values are not found" in {
			val templateJson = parse(
				"""{ "field1": "${abc}",
				  |  "field2": 123
				  |}""".stripMargin)
			val template = JsonTemplate(templateJson.asInstanceOf[JObject])
			val inputJson = parse(
				"""{ "def": 456
				  |}""".stripMargin)
			val outputJson = parse(
				"""{ "field1": null,
				  |  "field2": 123
				  |}""".stripMargin)
			template.interpret(inputJson.asInstanceOf[JObject]) shouldBe outputJson
		}

	}

} 
Example 134
Source File: SharedSparkSessionSuite.scala    From ecosystem   with Apache License 2.0 5 votes vote down vote up
package org.tensorflow.spark.datasources.tfrecords

import java.io.File

import org.apache.commons.io.FileUtils
import org.apache.spark.SharedSparkSession
import org.junit.{After, Before}
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}


trait BaseSuite extends WordSpecLike with Matchers with BeforeAndAfterAll

class SharedSparkSessionSuite extends SharedSparkSession with BaseSuite {
  val TF_SANDBOX_DIR = "tf-sandbox"
  val file = new File(TF_SANDBOX_DIR)

  @Before
  override def beforeAll() = {
    super.setUp()
    FileUtils.deleteQuietly(file)
    file.mkdirs()
  }

  @After
  override def afterAll() = {
    FileUtils.deleteQuietly(file)
    super.tearDown()
  }
} 
Example 135
Source File: JsonYQLParserSpec.scala    From Scalaprof   with GNU General Public License v2.0 5 votes vote down vote up
package com.phasmid.hedge_fund.actors

import akka.actor.{ ActorSystem, Actor, Props, ActorRef }
import akka.testkit._
import org.scalatest.{ WordSpecLike, Matchers, BeforeAndAfterAll }
import scala.io.Source
import scala.concurrent.duration._
import spray.http._
import spray.http.MediaTypes._
import org.scalatest.Inside
import scala.language.postfixOps
import spray.http.ContentType.apply


class JsonYQLParserSpec(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
    with WordSpecLike with Matchers with Inside with BeforeAndAfterAll {

  def this() = this(ActorSystem("JsonYQLParserSpec"))

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  import scala.language.postfixOps
  val json = Source.fromFile("src/test/resources/yqlExample.json") mkString

  "json conversion" in {
    val body = HttpEntity(MediaTypes.`application/json`, json.getBytes())
    val ok = JsonYQLParser.decode(body) match {
      case Right(x) =>
        val count = x.query.count
        count should equal(4)
        x.query.results.quote.length should equal(count)
        x.query.results.get(count - 1, "symbol") should matchPattern { case Some("MSFT") => }

      case Left(x) =>
        fail("decoding error: " + x)
    }
  }

  "send back" in {
    val blackboard = system.actorOf(Props.create(classOf[MockYQLBlackboard], testActor), "blackboard")
    val entityParser = _system.actorOf(Props.create(classOf[EntityParser], blackboard), "entityParser")
    val entity = HttpEntity(MediaTypes.`application/json`, json.getBytes())
    entityParser ! EntityMessage("json:YQL", entity)
    val msg = expectMsgClass(3.seconds, classOf[QueryResponseValid])
    println("msg received: " + msg)
    msg should matchPattern {
      case QueryResponseValid("MSFT", _) =>
    }
    inside(msg) {
      case QueryResponseValid(symbol, attributes) => attributes.get("Ask") should matchPattern { case Some("46.17") => }
    }
  }

}

import akka.pattern.ask
import akka.util.Timeout
import scala.concurrent.duration._
import scala.concurrent.Await
import com.phasmid.hedge_fund.model.Model

class MockYQLUpdateLogger(blackboard: ActorRef) extends UpdateLogger(blackboard) {
  override def processStock(identifier: String, model: Model) = {
    model.getKey("price") match {
      case Some(p) => {
        // sender is the MarketData actor
        val future = sender ? SymbolQuery(identifier, List(p))
        val result = Await.result(future, timeout.duration).asInstanceOf[QueryResponseValid]
        result.attributes map {
          case (k, v) =>
            log.info(s"$identifier attribute $k has been updated to: $v")
            blackboard ! result
        }
      }
      case None => log.warning(s"'price' not defined in model")
    }
  }
}

class MockYQLBlackboard(testActor: ActorRef) extends Blackboard(Map(classOf[KnowledgeUpdate] -> "marketData", classOf[SymbolQuery] -> "marketData", classOf[OptionQuery] -> "marketData", classOf[CandidateOption] -> "optionAnalyzer", classOf[Confirmation] -> "updateLogger"),
  Map("marketData" -> classOf[MarketData], "optionAnalyzer" -> classOf[OptionAnalyzer], "updateLogger" -> classOf[MockYQLUpdateLogger])) {

  override def receive =
    {
      case msg: Confirmation => msg match {
        // Cut down on the volume of messages
        case Confirmation("MSFT", _, _) => super.receive(msg)
        case _ =>
      }
      case msg: QueryResponseValid => testActor forward msg

      case msg => super.receive(msg)
    }
} 
Example 136
Source File: OptionAnalyzerSpec.scala    From Scalaprof   with GNU General Public License v2.0 5 votes vote down vote up
package com.phasmid.hedge_fund.actors

import org.scalatest.{ WordSpecLike, Matchers, BeforeAndAfterAll, Inside }
import akka.actor.{ ActorSystem, Actor, Props, ActorRef }
import akka.testkit._
import scala.concurrent.duration._
import org.scalatest.Inside
import akka.actor.actorRef2Scala
import com.phasmid.hedge_fund.model._


class OptionAnalyzerSpec(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
    with WordSpecLike with Matchers with Inside with BeforeAndAfterAll {

  def this() = this(ActorSystem("OptionAnalyzerSpec"))

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  "send back" in {
    val model = new GoogleOptionModel()
    val blackboard = system.actorOf(Props.create(classOf[MockAnalyzerBlackboard], testActor), "blackboard")
    blackboard ! CandidateOption(model, "XX375", true, Map("strike" -> "54.2"), Map("underlying_id" -> "1234", "Sharpe" -> 0.45, "EV" -> "37.132B", "EBITDA" -> "3.046B"))
    val confirmationMsg = expectMsgClass(3.seconds, classOf[Confirmation])
    println("confirmation msg received: " + confirmationMsg)
    inside(confirmationMsg) {
      case Confirmation(id, model, details) =>
        println(s"confirmation1 details: $details")
        id shouldEqual "XX375"
        blackboard ! KnowledgeUpdate(model, "XX", Map("id" -> "1234"))
        val confirmationMsg2 = expectMsgClass(3.seconds, classOf[Confirmation])
        println("confirmation msg2 received: " + confirmationMsg2)
        // Note that the key "id" is in the model for symbols, not options
        blackboard ! OptionQuery("id", "1234")
        val responseMsg = expectMsgClass(3.seconds, classOf[QueryResponseValid])
        println("msg received: " + responseMsg)
        inside(responseMsg) {
          case QueryResponseValid(symbol, attributes) =>
            symbol shouldEqual "XX"
            println(s"attributes: $attributes")
        }
    }
  }
}

class MockAnalyzerBlackboard(testActor: ActorRef) extends Blackboard(Map(classOf[KnowledgeUpdate] -> "marketData", classOf[SymbolQuery] -> "marketData", classOf[OptionQuery] -> "marketData", classOf[CandidateOption] -> "optionAnalyzer", classOf[Confirmation] -> "updateLogger"),
  Map("marketData" -> classOf[MarketData], "optionAnalyzer" -> classOf[OptionAnalyzer], "updateLogger" -> classOf[UpdateLogger])) {

  override def receive =
    {
      case msg: Confirmation => testActor forward msg
      case msg: QueryResponseValid => testActor forward msg
      case msg => super.receive(msg)
    }
} 
Example 137
Source File: PortfolioSpec.scala    From Scalaprof   with GNU General Public License v2.0 5 votes vote down vote up
package com.phasmid.hedge_fund.portfolio

import org.scalatest.{ WordSpecLike, Matchers, BeforeAndAfterAll, Inside }
import akka.actor.{ ActorSystem, Actor, Props, ActorRef }
import akka.testkit._
import scala.concurrent.duration._
import org.scalatest.Inside
import akka.actor.actorRef2Scala
import com.phasmid.hedge_fund.HedgeFund
import com.phasmid.hedge_fund.actors._
import com.typesafe.config.ConfigFactory
import com.phasmid.hedge_fund.model.GoogleOptionModel


class PortfolioSpec(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
    with WordSpecLike with Matchers with Inside with BeforeAndAfterAll {

  def this() = this(ActorSystem("MockPortfolioBlackboard"))

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }
  
  "read portfolio" in {
    val config = ConfigFactory.load
    val portfolio = HedgeFund.getPortfolio(config)
    portfolio.name shouldEqual "Test Portfolio"
    println(s"portfolio: $portfolio")
  }

  "send back" in {
    val model = new GoogleOptionModel()
    val blackboard = system.actorOf(Props.create(classOf[MockPortfolioBlackboard], testActor), "blackboard")
    blackboard ! CandidateOption(model, "XX375", true, Map("strike" -> "45.2"), Map("underlying_id" -> "1234", "Sharpe" -> 0.45, "EV" -> 37132000000.0, "EBITDA" -> 3046000000.0))
    val confirmationMsg = expectMsgClass(3.seconds, classOf[Confirmation])
    println("confirmation msg received: " + confirmationMsg)
    inside(confirmationMsg) {
      case Confirmation(id, model, details) =>
        println(s"confirmation1 details: $details")
        id shouldEqual "XX375"
        blackboard ! KnowledgeUpdate(model, "XX", Map("id" -> "1234"))
        val confirmationMsg2 = expectMsgClass(3.seconds, classOf[Confirmation])
        println("confirmation msg2 received: " + confirmationMsg2)
        // Note that the key "id" is in the model for symbols, not options
        blackboard ! OptionQuery("id", "1234")
        val responseMsg = expectMsgClass(3.seconds, classOf[QueryResponse])
        println("msg received: " + responseMsg)
        inside(responseMsg) {
          case QueryResponseValid(symbol, attributes) =>
            symbol shouldEqual "XX"
            println(s"attributes: $attributes")
        }
    }
  }
}

class MockPortfolioBlackboard(testActor: ActorRef) extends Blackboard(Map(classOf[KnowledgeUpdate] -> "marketData", classOf[SymbolQuery] -> "marketData", classOf[OptionQuery] -> "marketData", classOf[CandidateOption] -> "optionAnalyzer", classOf[PortfolioUpdate] -> "updateLogger", classOf[Confirmation] -> "updateLogger"),
  Map("marketData" -> classOf[MarketData], "optionAnalyzer" -> classOf[OptionAnalyzer], "updateLogger" -> classOf[UpdateLogger])) {

  override def receive =
    {
      case msg: Confirmation => testActor forward msg
      case msg: QueryResponse => testActor forward msg
      case msg => super.receive(msg)
    }
} 
Example 138
Source File: QuerySpec.scala    From Scalaprof   with GNU General Public License v2.0 5 votes vote down vote up
package com.phasmid.hedge_fund.http

import org.scalatest.{ WordSpecLike, Matchers, BeforeAndAfterAll, Inside }
import spray.http._
import com.phasmid.hedge_fund.model.GoogleQuery
import com.phasmid.hedge_fund.model.YQLQuery


class QuerySpec extends WordSpecLike with Matchers with Inside {

  "YQL tech query" in {
    val symbols = List("YHOO", "AAPL", "GOOG", "MSFT")
    val uri = YQLQuery("json", true).createQuery(symbols)
    println(uri.toString)
    uri.toString shouldEqual "https://query.yahooapis.com/v1/public/yql?format=json&callback=&q=select+*+from+yahoo.finance.quotes+where+symbol+in+(%22YHOO%22,%22AAPL%22,%22GOOG%22,%22MSFT%22)&diagnostics=true&env=http://datatables.org/alltables.env"
  }

  "Google tech query" in {
    val symbols = List("AAPL", "YHOO")
    val uri = GoogleQuery("NASDAQ").createQuery(symbols)
    println(uri.toString)
    // TODO this is actually incorrect (and so is code being tested)--fix it
    uri.toString shouldEqual "https://finance.google.com/finance/info?q=NASDAQ:AAPL,YHOO&client=ig"
  }

} 
Example 139
Source File: RuleSpec.scala    From Scalaprof   with GNU General Public License v2.0 5 votes vote down vote up
package com.phasmid.hedge_fund.rules

import org.scalatest.{ WordSpecLike, Matchers, BeforeAndAfterAll, Inside }
import spray.http._


class RuleSpec extends WordSpecLike with Matchers with Inside {

  "Simple Predicate and Candidate" in {
    val predicate = NumberPredicate("x", "<", 3)
    val rule = Rule(predicate)
    rule.apply(MapCandidate("test", Map("x" -> "2"))) should matchPattern {
      case Right(true) =>
    }
    rule.apply(MapCandidate("test", Map("x" -> "4"))) should matchPattern {
      case Right(false) =>
    }
  }

  "Simple Predicate, bad Candidate" in {
    val predicate = NumberPredicate("x", "<", 3)
    val rule = Rule(predicate)
    inside(rule.apply(MapCandidate("test", Map("y" -> "2")))) {
      case Left(x) =>
    }
    inside(rule.apply(MapCandidate("test", Map("x" -> "y")))) {
      case Left(x) =>
    }
  }

  "Simple Rule" in {
    val predicate = Rule("x < 3")
    predicate should matchPattern {
      case NumberPredicate("x", LessThan(), 3) =>
    }
  }
  "Compound Rule" in {
    val predicate = Rule("(x < 3) & (y > 1)")
    predicate should matchPattern {
      case And(NumberPredicate("x", LessThan(), 3), NumberPredicate("y", GreaterThan(), 1)) =>
    }
  }
  "Nested Rule" in {
    val predicate = Rule("(x < 3) & ((y > 1) | (z = 0))")
    predicate should matchPattern {
      case And(
        NumberPredicate("x", LessThan(), 3),
        Or(
          NumberPredicate("y", GreaterThan(), 1),
          NumberPredicate("z", Equals(), 0))) =>
    }
  }
} 
Example 140
Source File: PredicateSpec.scala    From Scalaprof   with GNU General Public License v2.0 5 votes vote down vote up
package com.phasmid.hedge_fund.rules

import org.scalatest.{ WordSpecLike, Matchers, BeforeAndAfterAll, Inside }
import spray.http._


class PredicateSpec extends WordSpecLike with Matchers with Inside {

  "Simple Predicate and Candidate" in {
    val predicate = NumberPredicate("x", "<", 3)
    predicate.apply(MapCandidate("test", Map("x" -> "2"))) should matchPattern {
      case Right(true) =>
    }
    predicate.apply(MapCandidate("test", Map("x" -> "4"))) should matchPattern {
      case Right(false) =>
    }
  }

  "Simple Predicate, bad Candidate" in {
    val predicate = NumberPredicate("x", "<", 3)
    inside(predicate.apply(MapCandidate("test", Map("y" -> "2")))) {
      case Left(x) =>
    }
    inside(predicate.apply(MapCandidate("test", Map("x" -> "y")))) {
      case Left(x) =>
    }
  }

  "String Predicate" in {
    val predicate = Predicate("x < 3")
    predicate should matchPattern {
      case NumberPredicate("x", LessThan(), 3) =>
    }
    predicate shouldEqual NumberPredicate("x", "<", 3)
  }

  "Text Predicate" in {
    val predicate = Predicate("x == Hello")
    predicate.apply(MapCandidate("test", Map("x" -> "Hello"))) should matchPattern {
      case Right(true) =>
    }
  }
} 
Example 141
Source File: BotPluginTestKit.scala    From sumobot   with Apache License 2.0 5 votes vote down vote up
package com.sumologic.sumobot.test.annotated

import akka.actor.ActorSystem
import akka.testkit.{TestKit, TestProbe}
import com.sumologic.sumobot.core.model.{IncomingMessage, InstantMessageChannel, OutgoingMessage, UserSender}
import org.junit.runner.RunWith
import org.scalatest.concurrent.Eventually
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
import slack.models.User

import scala.concurrent.duration.{FiniteDuration, _}

@RunWith(classOf[JUnitRunner])
abstract class BotPluginTestKit(actorSystem: ActorSystem)
  extends TestKit(actorSystem)
    with WordSpecLike with Eventually with Matchers
    with BeforeAndAfterAll {

  protected val outgoingMessageProbe = TestProbe()
  system.eventStream.subscribe(outgoingMessageProbe.ref, classOf[OutgoingMessage])

  protected def confirmOutgoingMessage(test: OutgoingMessage => Unit, timeout: FiniteDuration = 1.second): Unit = {
    outgoingMessageProbe.expectMsgClass(timeout, classOf[OutgoingMessage]) match {
      case msg: OutgoingMessage =>
        test(msg)
    }
  }

  protected def instantMessage(text: String, user: User = mockUser("123", "jshmoe")): IncomingMessage = {
    IncomingMessage(text, true, InstantMessageChannel("125", user), "1527239216000090", sentBy = UserSender(user))
  }

  protected def mockUser(id: String, name: String): User = {
    User(id, name, None, None, None, None, None, None, None, None, None, None, None, None, None, None)
  }

  protected def send(message: IncomingMessage): Unit = {
    system.eventStream.publish(message)
  }

  override protected def afterAll(): Unit = {
    TestKit.shutdownActorSystem(system)
  }
} 
Example 142
Source File: MatchTextUtil.scala    From sumobot   with Apache License 2.0 5 votes vote down vote up
package com.sumologic.sumobot.test.annotated

import org.scalatest.WordSpecLike

import scala.util.matching.Regex


trait MatchTextUtil extends WordSpecLike {

  def shouldMatch(regex: Regex, text: String): Unit = {
    if (!doesMatch(regex, text)) {
      fail(s"$regex did not match $text but should")
    }
  }

  def shouldNotMatch(regex: Regex, text: String): Unit = {
    if (doesMatch(regex, text)) {
      fail(s"$regex matched $text but should not")
    }
  }

  private def doesMatch(regex: Regex, text: String): Boolean = {
    regex.pattern.matcher(text).find()
  }
} 
Example 143
Source File: AliasTest.scala    From sumobot   with Apache License 2.0 5 votes vote down vote up
package com.sumologic.sumobot.plugins.alias

import akka.actor.{ActorSystem, Props}
import akka.testkit.TestProbe
import com.sumologic.sumobot.brain.InMemoryBrain
import com.sumologic.sumobot.core.model.IncomingMessage
import com.sumologic.sumobot.plugins.BotPlugin.InitializePlugin
import com.sumologic.sumobot.test.annotated.BotPluginTestKit
import org.scalatest.concurrent.PatienceConfiguration.Timeout
import scala.concurrent.duration._
import org.scalatest.{Matchers, WordSpecLike}
import org.scalatest.concurrent.Eventually._

class AliasTest
  extends BotPluginTestKit(ActorSystem("AliasTest")) {

  val aliasRef = system.actorOf(Props(classOf[Alias]), "alias")
  val brainRef = system.actorOf(Props(classOf[InMemoryBrain]), "brain")
  aliasRef ! InitializePlugin(null, brainRef, null)

  "alias" should {
    "allow aliasing messages to the bot" in {
      send(instantMessage("alias 'foo' to 'bar'"))
      val otherPlugin = TestProbe()
      system.eventStream.subscribe(otherPlugin.ref, classOf[IncomingMessage])
      send(instantMessage("foo"))
      eventually(Timeout(5.seconds)) {
        val messages = otherPlugin.expectMsgAllClassOf(classOf[IncomingMessage])
        messages.foreach(msg => println(msg.canonicalText))
        messages.exists(_.canonicalText == "bar") should be (true)
      }
    }
  }
} 
Example 144
Source File: HelpTest.scala    From sumobot   with Apache License 2.0 5 votes vote down vote up
package com.sumologic.sumobot.plugins.help

import akka.actor.{ActorSystem, Props}
import com.sumologic.sumobot.core.model.{IncomingMessage, InstantMessageChannel, UserSender}
import com.sumologic.sumobot.core.PluginRegistry
import com.sumologic.sumobot.plugins.BotPlugin.{InitializePlugin, PluginAdded}
import com.sumologic.sumobot.plugins.conversations.Conversations
import com.sumologic.sumobot.test.annotated.BotPluginTestKit
import org.scalatest.{Matchers, WordSpecLike}

class HelpTest extends BotPluginTestKit(ActorSystem("HelpTest")) {

  val helpRef = system.actorOf(Props[Help], "help")

  val reg = system.actorOf(Props(classOf[PluginRegistry]))
  val mock = system.actorOf(Props(classOf[Conversations]), "mock")

  reg ! PluginAdded(mock, "mock help")
  reg ! PluginAdded(helpRef, "help help")

  helpRef ! InitializePlugin(null, null, reg)

  val user = mockUser("123", "jshmoe")

  "help" should {
    "return list of plugins" in {
      helpRef ! IncomingMessage("help", true, InstantMessageChannel("125", user), "1527239216000090", attachments = Seq(), sentBy = UserSender(user))
      confirmOutgoingMessage {
        msg =>
          msg.text should be("help\nmock")
      }
    }

    "return help for known plugins" in {
      helpRef ! IncomingMessage("help mock", true, InstantMessageChannel("125", user), "1527239216000090", attachments = Seq(), sentBy = UserSender(user))
      confirmOutgoingMessage {
        msg =>
          msg.text should include("mock help")
      }
    }

    "return an error for unknown commands" in {
      helpRef ! IncomingMessage("help test", true, InstantMessageChannel("125", user), "1527239216000090", attachments = Seq(), sentBy = UserSender(user))
      confirmOutgoingMessage {
        msg =>
          msg.text should include("Sorry, I don't know")
      }
    }

    "work with ? variants" in {
      "?" should fullyMatch regex Help.ListPlugins
      "?" should fullyMatch regex Help.ListPlugins
      "help  " should fullyMatch regex Help.ListPlugins
      "? me " should fullyMatch regex Help.HelpForPlugin
      "help me " should fullyMatch regex Help.HelpForPlugin
    }
  }
} 
Example 145
Source File: FutureRetryUtilitySpec.scala    From NSDb   with Apache License 2.0 5 votes vote down vote up
package io.radicalbit.nsdb.util

import akka.actor.{ActorSystem, Scheduler, Status}
import akka.event.{Logging, LoggingAdapter}
import akka.testkit.{TestKit, TestProbe}
import org.scalatest.{Matchers, WordSpecLike}

import scala.collection.mutable
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}
import scala.concurrent.ExecutionContext.Implicits.global

class FutureRetryUtilitySpec
    extends TestKit(ActorSystem("MySpec"))
    with WordSpecLike
    with Matchers
    with FutureRetryUtility {

  implicit val schedule: Scheduler    = system.scheduler
  implicit val logger: LoggingAdapter = Logging.getLogger(system, this)

  private final val delay: FiniteDuration = 2.seconds
  private final val retries: Int          = 3

  private def future(flag: Boolean) =
    if (flag) Future.successful(3) else Future.failed(new RuntimeException("Failure"))

  "retry function in FutureRetryUtility" must {

    "successfully returns whether, after retries, the future is eventually successful" in {
      Await.result(future(true).retry(delay, retries)(_ > 2), Duration.Inf) shouldBe 3
    }

    "thrown an Exception whether, after retries, the future eventually returns an Exception" in {
      an[RuntimeException] shouldBe thrownBy(Await.result(future(false).retry(delay, retries)(_ => true), Duration.Inf))
    }

    "consider the number of retries" in {
      val q = mutable.Queue(0)
      def future = {
        val nRetries = q.dequeue()
        if (nRetries < 2) { q.enqueue(nRetries + 1); Future.failed(new RuntimeException) } else {
          q.enqueue(nRetries + 1); Future.successful(nRetries)
        }
      }
      Await.result(future.retry(delay, retries)(_ > 2), Duration.Inf) shouldBe 3
    }
  }

  "pipeTo function in FutureRetryUtility" must {

    "returns a successful future and send the content of it through pipe" in {
      val testProbe = TestProbe("actor-test")
      future(true).pipeTo(delay, retries, testProbe.testActor)()
      testProbe.expectMsg(3)
    }

    "return a failed future and send a status failure through pipe" in {
      val testProbe = TestProbe("actor-test")
      future(false).pipeTo(delay, retries, testProbe.testActor)()
      testProbe.expectMsgAllClassOf(classOf[Status.Failure])
    }
  }
} 
Example 146
Source File: ProjectionNodeTest.scala    From ingraph   with Eclipse Public License 1.0 5 votes vote down vote up
package ingraph.ire.nodes.unary

import akka.actor.{ActorSystem, Props, actorRef2Scala}
import akka.testkit.{ImplicitSender, TestActors, TestKit}
import ingraph.ire.messages.ChangeSet
import ingraph.ire.util.TestUtil._
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

class ProjectionNodeTest(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
  with WordSpecLike with Matchers with BeforeAndAfterAll {

  def this() = this(ActorSystem("MySpec"))

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  "Projection" must {
    "select the values" in {
      val changes = ChangeSet(
        positive = tupleBag(tuple(15, 16, 17, 18), tuple(4, 5, 6, 7)),
        negative = tupleBag(tuple(-0, -1, -2, -3), tuple(-10, -11, -12, -13))
      )
      val selectionMask = functionMask(0, 2)
      val expectedChanges = ChangeSet(
        positive = tupleBag(tuple(15, 17), tuple(4, 6)),
        negative = tupleBag(tuple(-0, -2), tuple(-10, -12))
      )
      val echoActor = system.actorOf(TestActors.echoActorProps)
      val selector = system.actorOf(Props(new ProjectionNode(echoActor ! _, selectionMask)), name = "testSelector")

      selector ! changes
      expectMsg(expectedChanges)
      selector ! changes
      expectMsg(expectedChanges)
    }

    val changeSet = ChangeSet(
      positive = tupleBag(tuple(0, "something")),
      negative = tupleBag(tuple(0, "something else"))
    )

    "do projection with equal length" in {
      val echoActor = system.actorOf(TestActors.echoActorProps)
      val checker = system.actorOf(Props(new ProjectionNode(echoActor ! _, functionMask(1, 0)))) // swap attributes

      checker ! changeSet
      expectMsg(ChangeSet(
        positive = tupleBag(tuple("something", 0)),
        negative = tupleBag(tuple("something else", 0))
      ))
    }

    "do projection with lesser length" in {
      val echoActor = system.actorOf(TestActors.echoActorProps)
      val checker = system.actorOf(Props(new ProjectionNode(echoActor ! _, functionMask(1))))

      checker ! changeSet
      expectMsg(ChangeSet(
        positive = tupleBag(tuple("something")),
        negative = tupleBag(tuple("something else"))
      ))
    }
  }
} 
Example 147
Source File: UnwindNodeTest.scala    From ingraph   with Eclipse Public License 1.0 5 votes vote down vote up
package ingraph.ire.nodes.unary

import akka.actor.{ActorSystem, Props, actorRef2Scala}
import akka.testkit.{ImplicitSender, TestActors, TestKit}
import ingraph.ire.datatypes.Tuple
import ingraph.ire.messages.ChangeSet
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

class UnwindNodeTest(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
  with WordSpecLike with Matchers with BeforeAndAfterAll {

  def this() = this(ActorSystem("MySpec"))

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  import ingraph.ire.util.TestUtil._
  private def indexer(index: Int) = {
    (t: Tuple) => t(index).asInstanceOf[Seq[Any]]
  }
  "Unwind" must {
    "do simple unwind 0" in {
      val changeSet = ChangeSet(
        positive = tupleBag(
          tuple("x", cypherList(1, 2, 3), "y"),
          tuple("w", cypherList(), "z")
        ),
        negative = tupleBag(
          tuple("a", cypherList(1, 2), "b"),
          tuple("c", cypherList(), "d")
        )
      )
      val echoActor = system.actorOf(TestActors.echoActorProps)
      val unwind = system.actorOf(Props(new UnwindNode(echoActor ! _, indexer(1))))

      unwind ! changeSet
      expectMsg(ChangeSet(
        positive = tupleBag(
          tuple("x", cypherList(1, 2, 3), "y", 1),
          tuple("x", cypherList(1, 2, 3), "y", 2),
          tuple("x", cypherList(1, 2, 3), "y", 3)
        ),
        negative = tupleBag(
          tuple("a", cypherList(1, 2), "b", 1),
          tuple("a", cypherList(1, 2), "b", 2)
        )
      ))
    }

    "do simple unwind 1" in {
      val changeSet = ChangeSet(
        positive = tupleBag(
          tuple("x", List(1, 2, 3), "y"),
          tuple("w", List(4, 5), "z")
        )
      )
      val echoActor = system.actorOf(TestActors.echoActorProps)
      val unwind = system.actorOf(Props(new UnwindNode(echoActor ! _, indexer(1))))

      unwind ! changeSet
      expectMsg(ChangeSet(
        positive = tupleBag(
          tuple("x", cypherList(1, 2, 3), "y", 1),
          tuple("x", cypherList(1, 2, 3), "y", 2),
          tuple("x", cypherList(1, 2, 3), "y", 3),
          tuple("w", cypherList(4, 5),    "z", 4),
          tuple("w", cypherList(4, 5),    "z", 5)
        )
      ))
    }
  }
} 
Example 148
Source File: ListSelectorNodeTest.scala    From ingraph   with Eclipse Public License 1.0 5 votes vote down vote up
package ingraph.ire.nodes.unary

import akka.actor.{ActorSystem, Props, actorRef2Scala}
import akka.testkit.{ImplicitSender, TestActors, TestKit}
import ingraph.ire.messages.ChangeSet
import ingraph.ire.util.TestUtil._
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

class ListSelectorNodeTest(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
  with WordSpecLike with Matchers with BeforeAndAfterAll {

  def this() = this(ActorSystem("MySpec"))

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  "ListSelector" must {
    "do select items in lists 0" in {
      val changeSet = ChangeSet(
        positive = tupleBag(tuple(List("a","b","c")))
      )

      val function = (n: Any) => n match {
        case s: List[Any] => s(1)
      }
      val echoActor = system.actorOf(TestActors.echoActorProps)
      val listSelector = system.actorOf(Props(new MapperNode(echoActor ! _, function, 0)))

      listSelector ! changeSet
      expectMsg(ChangeSet(positive = tupleBag(tuple("b"))))
    }
  }
} 
Example 149
Source File: SelectionNodeTest.scala    From ingraph   with Eclipse Public License 1.0 5 votes vote down vote up
package ingraph.ire.nodes.unary

import akka.actor.{ActorSystem, Props, actorRef2Scala}
import akka.testkit.{ImplicitSender, TestActors, TestKit}
import ingraph.ire.datatypes.Tuple
import ingraph.ire.messages.ChangeSet
import ingraph.ire.util.TestUtil._
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

class SelectionNodeTest(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
  with WordSpecLike with Matchers with BeforeAndAfterAll {

  def this() = this(ActorSystem("MySpec"))

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  "Selection" must {
    "check the condition properly" in {
      val changeSet = ChangeSet(
        positive = tupleBag(tuple(0, "something"), tuple(0, "something else"))
      )
      val echoActor = system.actorOf(TestActors.echoActorProps)
      val condition = (n: Tuple) => {
        n(1) == "something"
      }
      val checker = system.actorOf(Props(new SelectionNode(echoActor ! _, condition)))

      checker ! changeSet
      expectMsg(ChangeSet(positive = tupleBag(tuple(0, "something"))))
    }
  }
} 
Example 150
Source File: SortAndTopNodeTest.scala    From ingraph   with Eclipse Public License 1.0 5 votes vote down vote up
package ingraph.ire.nodes.unary

import akka.actor.{ActorSystem, Props, actorRef2Scala}
import akka.testkit.{ImplicitSender, TestActors, TestKit}
import ingraph.ire.datatypes.Tuple
import ingraph.ire.messages.ChangeSet
import ingraph.ire.util.TestUtil._
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

class SortAndTopNodeTest(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
  with WordSpecLike with Matchers with BeforeAndAfterAll {

  def this() = this(ActorSystem("MySpec"))

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }
  val selectionMask = Vector((v: Tuple) => v(0), (v: Tuple) => v(1))
  "Sort" should {
    "count with complex keys" in {
      val echoActor = system.actorOf(TestActors.echoActorProps)
      val counter = system.actorOf(Props(
          new SortAndTopNode(echoActor ! _,
              tupleLength = 3,
              selectionMask = selectionMask,
              skip = Some(0),
              limit = Some(2),
              ascendingOrder = Vector(true, false))
      ))

      counter ! ChangeSet(positive = tupleBag(tuple(2, 3, 4), tuple(0, 2, 3), tuple(0, 3, 3), tuple(5, 6, 7)))
      expectMsg(ChangeSet(positive = tupleBag(tuple(0, 3, 3), tuple(0, 2, 3))))

      counter ! ChangeSet(negative = tupleBag(tuple(0, 2, 3)))
      expectMsg(ChangeSet(
        positive = tupleBag(tuple(0, 3, 3), tuple(2, 3, 4)),
        negative = tupleBag(tuple(0, 3, 3), tuple(0, 2, 3))))
    }
    "have bag semantics" in {
      val echoActor = system.actorOf(TestActors.echoActorProps)
      val counter = system.actorOf(Props(new SortAndTopNode(echoActor ! _,
          tupleLength = 3,
          selectionMask = selectionMask,
          skip = Some(0),
          limit = Some(2),
          ascendingOrder = Vector(true, true))))

      counter ! ChangeSet(positive = tupleBag(tuple(0, 1, 2), tuple(0, 1, 2), tuple(0, 1, 3)))
      expectMsg(ChangeSet(positive = tupleBag(tuple(0, 1, 2), tuple(0, 1, 2))))

      counter ! ChangeSet(negative = tupleBag(tuple(0, 1, 2)))
      expectMsg(ChangeSet(
        positive = tupleBag(tuple(0, 1, 2), tuple(0, 1, 3)),
        negative = tupleBag(tuple(0, 1, 2), tuple(0, 1, 2))))

      counter ! ChangeSet(negative = tupleBag(tuple(0, 1, 2)))
      expectMsg(ChangeSet(
        positive = tupleBag(tuple(0, 1, 3)),
        negative = tupleBag(tuple(0, 1, 2), tuple(0, 1, 3))))
    }
  }
} 
Example 151
Source File: MapperTest.scala    From ingraph   with Eclipse Public License 1.0 5 votes vote down vote up
package ingraph.ire.nodes.unary

import akka.actor.{ActorSystem, Props, actorRef2Scala}
import akka.testkit.{ImplicitSender, TestActors, TestKit}
import ingraph.ire.messages.ChangeSet
import ingraph.ire.util.TestUtil._
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

class MapperTest(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
  with WordSpecLike with Matchers with BeforeAndAfterAll {

  def this() = this(ActorSystem("MySpec"))

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  "MapperNode" must {
    "map values" in {
      val changeSet = ChangeSet(
        positive = tupleBag(tuple(0, "something")),
        negative = tupleBag(tuple(0, "something else"))
      )
      val echoActor = system.actorOf(TestActors.echoActorProps)
      val function = (n: Any) => n match {
        case s: String => s.length
      }
      val checker = system.actorOf(Props(new MapperNode(echoActor ! _, function, 1)))

      checker ! changeSet
      expectMsg(ChangeSet(
        positive = tupleBag(tuple(0, "something".length)),
        negative = tupleBag(tuple(0, "something else".length))
      ))
    }
  }
} 
Example 152
Source File: TerminatorTest.scala    From ingraph   with Eclipse Public License 1.0 5 votes vote down vote up
package ingraph.ire.nodes

import akka.actor.{ActorSystem, Props, actorRef2Scala}
import akka.testkit.{ImplicitSender, TestActors, TestKit}
import ingraph.ire.messages.{ChangeSet, Primary, Secondary}
import ingraph.ire.nodes.binary.JoinNode
import ingraph.ire.nodes.unary.{ProductionNode, SelectionNode}
import ingraph.ire.util.TestUtil._
import ingraph.ire.util.Utils.conversions._
import ingraph.ire.messages.Terminator
import ingraph.ire.nodes.unary.SelectionNode
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

import scala.concurrent.Await
import scala.concurrent.duration.{Duration, _}

class TerminatorTest(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
  with WordSpecLike with Matchers with BeforeAndAfterAll {
  def this() = this(ActorSystem("MySpec"))

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  "Unary nodes" must {
    "propagate terminator messages" in {
      val echoActor = system.actorOf(TestActors.echoActorProps)
      val production = system.actorOf(Props(new ProductionNode("alpha test", 2)))
      val intermediary = system.actorOf(Props(new SelectionNode(production ! _, c => true, expectedTerminatorCount = 2))) // TODO wtf
      //      val intermediary = system.actorOf(Props(new SelectionNode(production ! _, c => true)))
      val input1 = system.actorOf(Props(new SelectionNode(production ! _, c => true)))
      input1 ! ChangeSet(positive = tupleBag(tuple(15)))
      input1 ! ChangeSet(positive = tupleBag(tuple(19)))
      val input2 = system.actorOf(Props(new SelectionNode(intermediary ! _, c => true)))
      input2 ! ChangeSet(positive = tupleBag(tuple(25)))
      input2 ! ChangeSet(positive = tupleBag(tuple(29)))
      val input3 = system.actorOf(Props(new SelectionNode(intermediary ! _, c => true)))

      val terminator = Terminator(List(
        input1 ! _, input2 ! _, input3 ! _
      ), production)
      val future = terminator.send()
      input1 ! ChangeSet(positive = tupleBag(tuple(16)))
      input1 ! ChangeSet(positive = tupleBag(tuple(17)))
      input2 ! ChangeSet(positive = tupleBag(tuple(26)))
      input2 ! ChangeSet(positive = tupleBag(tuple(27)))
      val expected = Set(tuple(15), tuple(19), tuple(25), tuple(29))
      assert(Await.result(future, Duration(1, HOURS)).toSet == expected)
    }
  }
  "Binary nodes" must {
    "propagate terminator messages" in {
      val echoActor = system.actorOf(TestActors.echoActorProps)
      val production = system.actorOf(Props(new ProductionNode("")), "Production")
      val checker = system.actorOf(Props(new SelectionNode(production ! _, c => true)), "checker")
      val intermediary = system.actorOf(Props(new JoinNode(checker ! _, 1, 1, mask(0), mask(0))), "intermediary")
      val input1 = system.actorOf(Props(new JoinNode(intermediary ! Primary(_), 1, 1, mask(0), mask(0))), "inputBeta")
      val msg15 = ChangeSet(positive = tupleBag(tuple(15)))
      input1 ! Primary(msg15)
      input1 ! Secondary(msg15)
      intermediary ! Secondary(msg15)
      val msg25 = ChangeSet(positive = tupleBag(tuple(25)))
      input1 ! Primary(msg25)
      input1 ! Secondary(msg25)
      intermediary ! Secondary(msg25)

      val terminator = Terminator(List(input1.primary, input1.secondary, intermediary.secondary), production)
      val future = terminator.send()
      input1 ! Primary(ChangeSet(positive = tupleBag(tuple(16))))
      input1 ! Secondary(ChangeSet(positive = tupleBag(tuple(16))))
      intermediary ! Secondary(ChangeSet(positive = tupleBag(tuple(16))))

      assert(Await.result(future, Duration(1, HOURS)).toSet == Set(tuple(15), tuple(25)))
      assert(Await.result(terminator.send(), Duration(1, HOURS)).toSet == Set(tuple(15), tuple(25), tuple(16)))
      (1 to 500).foreach(i => {
        input1 ! Secondary(ChangeSet(negative = tupleBag(tuple(16))))
        assert(Await.result(terminator.send(), Duration(1, HOURS)).toSet == Set(tuple(15), tuple(25)))
        input1 ! Secondary(ChangeSet(positive = tupleBag(tuple(16))))
        intermediary ! Secondary(ChangeSet(negative = tupleBag(tuple(15))))
        assert(Await.result(terminator.send(), Duration(1, HOURS)).toSet == Set(tuple(25), tuple(16)))
        intermediary ! Secondary(ChangeSet(positive = tupleBag(tuple(15))))
        assert(Await.result(terminator.send(), Duration(1, HOURS)).toSet == Set(tuple(15), tuple(25), tuple(16)))
      })
    }
  }
  "Node splitting" should {
    "work" in {

    }
  }
} 
Example 153
Source File: CounterMultimapTest.scala    From ingraph   with Eclipse Public License 1.0 5 votes vote down vote up
package ingraph.ire.nodes.collections

import ingraph.ire.collections.CounterMultimap
import org.scalatest.{Matchers, WordSpecLike}

class CounterMultimapTest extends WordSpecLike with Matchers {

  "A CounterMultimap" must {
    "add and remove" in {
      val map = new CounterMultimap[Int, Int]

      map.addBinding(1, 1)
      map.addBinding(1, 2)

      map.getCount(1, 1) shouldBe 1
      map.getCount(1, 2) shouldBe 1

      map.addBinding(1, 1)
      map.addBinding(1, 2)

      map.getCount(1, 1) shouldBe 2
      map.getCount(1, 2) shouldBe 2

      map.addBinding(1, 1)
      map.getCount(1, 1) shouldBe 3

      map.values(1).toSet should equal(Set(1, 2))

      map.removeBinding(1, 2)
      map.getCount(1, 2) shouldBe 1
      map.removeBinding(1, 2)
      map.getCount(1, 2) shouldBe 0

      map.values(1).toSet should equal(Set(1))

      map.removeBinding(1, 1)
      map.getCount(1, 1) shouldBe 2
      map.removeBinding(1, 1)
      map.getCount(1, 1) shouldBe 1
      map.removeBinding(1, 1)
      map.getCount(1, 1) shouldBe 0

      map.values(1).toSet should equal(Set())
      map.values(2).toSet should equal(Set())
    }
  }

} 
Example 154
Source File: UnionNodeTest.scala    From ingraph   with Eclipse Public License 1.0 5 votes vote down vote up
package ingraph.ire.nodes.binary

import akka.actor.{ActorSystem, Props, actorRef2Scala}
import akka.testkit.{ImplicitSender, TestActors, TestKit}
import ingraph.ire.messages.{ChangeSet, Primary, Secondary}
import ingraph.ire.util.TestUtil._
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

class UnionNodeTest(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
  with WordSpecLike with Matchers with BeforeAndAfterAll {

  def this() = this(ActorSystem("MySpec"))

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  "Union" must {
    "do simple set unions 0" in {
      val prim = ChangeSet(
        positive = tupleBag(tuple(1, 2), tuple(1, 3))
      )
      val sec = ChangeSet(
        positive = tupleBag(tuple(1, 2), tuple(1, 4))
      )
      val echoActor = system.actorOf(TestActors.echoActorProps)
      val union = system.actorOf(Props(new UnionNode(echoActor ! _, all = false)))

      union ! Primary(prim)
      expectMsg(ChangeSet(positive = tupleBag(tuple(1, 2), tuple(1, 3))))
      union ! Secondary(sec)
      expectMsg(ChangeSet(positive = tupleBag(tuple(1, 4))))
    }

    "do simple bag unions 0" in {
      val prim = ChangeSet(
        positive = tupleBag(tuple(1, 2), tuple(1, 3))
      )
      val sec = ChangeSet(
        positive = tupleBag(tuple(1, 2), tuple(1, 4))
      )
      val echoActor = system.actorOf(TestActors.echoActorProps)
      val union = system.actorOf(Props(new UnionNode(echoActor ! _, all = true)))

      union ! Primary(prim)
      expectMsg(ChangeSet(positive = tupleBag(tuple(1, 2), tuple(1, 3))))
      union ! Secondary(sec)
      expectMsg(ChangeSet(positive = tupleBag(tuple(1, 2), tuple(1, 4))))
    }
  }
} 
Example 155
Source File: InputTransactionFactoryTest.scala    From ingraph   with Eclipse Public License 1.0 5 votes vote down vote up
package ingraph.ire.nodes

import akka.actor.{ActorSystem, actorRef2Scala}
import akka.testkit.{ImplicitSender, TestActors, TestKit}
import ingraph.ire.inputs.InputTransactionFactory
import ingraph.ire.messages.ChangeSet
import ingraph.ire.util.TestUtil._
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

class InputTransactionFactoryTest(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
  with WordSpecLike with Matchers with BeforeAndAfterAll {
  def this() = this(ActorSystem())

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  "InputTransactionFactory" must {

    "send incoming data after subscription" in {
      val input = new InputTransactionFactory
      val echoActor = system.actorOf(TestActors.echoActorProps)
      input.subscribe(Map("test" -> (echoActor ! _)))
      val inputTransaction = input.newInputTransaction
      inputTransaction.add("test", tuple(6, 1L))
      inputTransaction.add("test", tuple(6, 2L))
      inputTransaction.sendAll
      expectMsg(ChangeSet(positive = tupleBag(tuple(6, 2), tuple(6, 1))))
    }

    "do no splitting in batch" in {
      val input = new InputTransactionFactory
      val echoActor = system.actorOf(TestActors.echoActorProps)
      input.subscribe(Map("test" -> (echoActor ! _)))
      val inputTransaction = input.newInputTransaction
      for (i <- 1 to 3) {
        inputTransaction.add("test", tuple(6, i))
      }
      inputTransaction.sendAll
      expectMsg(ChangeSet(positive = tupleBag(tuple(6, 3), tuple(6, 2), tuple(6, 1))))
    }

  }

} 
Example 156
Source File: DuplicateEliminationNodeTest.scala    From ingraph   with Eclipse Public License 1.0 5 votes vote down vote up
package ingraph.ire.stateless.unary

import akka.actor.{ActorSystem, Props}
import akka.testkit.{ImplicitSender, TestActors, TestKit}
import ingraph.ire.messages.ChangeSet
import ingraph.ire.util.TestUtil.{tuple, tupleBag}
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

class DuplicateEliminationNodeTest(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
  with WordSpecLike with Matchers with BeforeAndAfterAll {

  def this() = this(ActorSystem("MySpec"))

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  "DuplicateElimination node" must {
    "do simple duplicate elimination 0" in {
      val echoActor = system.actorOf(TestActors.echoActorProps)
      val duplicateElimination = system.actorOf(Props(new DuplicateEliminationNode(echoActor ! _)))

      duplicateElimination ! ChangeSet(
        positive = tupleBag(tuple(1, 2))
      )
      expectMsg(ChangeSet(
        positive = tupleBag(tuple(1, 2))
      ))

      duplicateElimination ! ChangeSet(
        positive = tupleBag(tuple(1, 2), tuple(3, 4))
      )
      expectMsg(ChangeSet(
        positive = tupleBag(tuple(1, 2), tuple(3, 4))
      ))
    }

    "do simple duplicate elimination 1" in {
      val echoActor = system.actorOf(TestActors.echoActorProps)
      val duplicateElimination = system.actorOf(Props(new DuplicateEliminationNode(echoActor ! _)))

      duplicateElimination ! ChangeSet(positive = tupleBag(tuple(1)))
      expectMsg(ChangeSet(positive = tupleBag(tuple(1))))

      duplicateElimination ! ChangeSet(positive = tupleBag(tuple(1), tuple(1)))
      expectMsg(ChangeSet(positive = tupleBag(tuple(1))))
    }

  }
} 
Example 157
Source File: JournalServiceSpec.scala    From NSDb   with Apache License 2.0 5 votes vote down vote up
package io.radicalbit.nsdb.actors

import akka.actor.ActorSystem
import akka.testkit.{ImplicitSender, TestKit}
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

class JournalServiceSpec
    extends TestKit(ActorSystem("nsdb-test"))
    with ImplicitSender
    with WordSpecLike
    with Matchers
    with BeforeAndAfterAll {

  "A partition" when {
    "inserted" should {
      "be saved correctly" in {}
    }
  }

} 
Example 158
Source File: DuplicateEliminationNodeTest.scala    From ingraph   with Eclipse Public License 1.0 5 votes vote down vote up
package ingraph.ire.nodes.unary

import akka.actor.{ActorSystem, Props, actorRef2Scala}
import akka.testkit.{ImplicitSender, TestActors, TestKit}
import ingraph.ire.messages.ChangeSet
import ingraph.ire.util.TestUtil._
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

class DuplicateEliminationNodeTest(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
  with WordSpecLike with Matchers with BeforeAndAfterAll {

  def this() = this(ActorSystem("MySpec"))

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  "DuplicateElimination node" must {
    "do simple duplicate elimination 0" in {
      val echoActor = system.actorOf(TestActors.echoActorProps)
      val duplicateElimination = system.actorOf(Props(new DuplicateEliminationNode(echoActor ! _)))

      duplicateElimination ! ChangeSet(
        positive = tupleBag(tuple(1, 2))
      )
      expectMsg(ChangeSet(
        positive = tupleBag(tuple(1, 2))
      ))

      duplicateElimination ! ChangeSet(
        positive = tupleBag(tuple(1, 2), tuple(3, 4))
      )
      expectMsg(ChangeSet(
        positive = tupleBag(tuple(3, 4))
      ))

      duplicateElimination ! ChangeSet(
        positive = tupleBag(tuple(5, 6)),
        negative = tupleBag(tuple(1, 2))
      )
      expectMsg(ChangeSet(
        positive = tupleBag(tuple(5, 6))
      ))

      duplicateElimination ! ChangeSet(
        positive = tupleBag(tuple(7, 8)),
        negative = tupleBag(tuple(1, 2))
      )
      expectMsg(ChangeSet(
        positive = tupleBag(tuple(7, 8)),
        negative = tupleBag(tuple(1, 2))
      ))
    }

    "do simple duplicate elimination 1" in {
      val echoActor = system.actorOf(TestActors.echoActorProps)
      val duplicateElimination = system.actorOf(Props(new DuplicateEliminationNode(echoActor ! _)))

      duplicateElimination ! ChangeSet(positive = tupleBag(tuple(1)))
      expectMsg(ChangeSet(positive = tupleBag(tuple(1))))

      duplicateElimination ! ChangeSet(positive = tupleBag(tuple(1)))
      duplicateElimination ! ChangeSet(positive = tupleBag(tuple(1)))

      duplicateElimination ! ChangeSet(negative = tupleBag(tuple(1)))
      duplicateElimination ! ChangeSet(negative = tupleBag(tuple(1)))
      duplicateElimination ! ChangeSet(negative = tupleBag(tuple(1)))
      expectMsg(ChangeSet(negative = tupleBag(tuple(1))))
    }

  }
} 
Example 159
Source File: ScanResourceSpec.scala    From squbs   with Apache License 2.0 5 votes vote down vote up
package org.squbs.unicomplex

import java.util.concurrent.TimeUnit
import javax.management.ObjectName

import akka.actor.ActorSystem
import akka.testkit.{ImplicitSender, TestKit}
import akka.util.Timeout
import com.typesafe.config.ConfigFactory
import org.scalatest.concurrent.Waiters
import org.scalatest.{BeforeAndAfterAll, Inspectors, Matchers, WordSpecLike}
import org.squbs.lifecycle.GracefulStop

import scala.util.Try

object ScanResourceSpec {

  val jmxPrefix = "ScanResourceSpec"

  val config = ConfigFactory.parseString(
    s"""
       |squbs {
       |  actorsystem-name = scanResourceSpec
       |  ${JMX.prefixConfig} = true
       |}
       |
       |default-listener.bind-port = 0
    """.stripMargin)

  implicit val akkaTimeout: Timeout =
    Try(System.getProperty("test.timeout").toLong) map { millis =>
      akka.util.Timeout(millis, TimeUnit.MILLISECONDS)
    } getOrElse Timeouts.askTimeout

  val boot = UnicomplexBoot(config)
    .createUsing {(name, config) => ActorSystem(name, config)}
    .scanResources()
    .initExtensions.start()
}

class ScanResourceSpec extends TestKit(ScanResourceSpec.boot.actorSystem) with ImplicitSender with WordSpecLike
    with Matchers with Inspectors with BeforeAndAfterAll with Waiters {

  import ScanResourceSpec._
  import system.dispatcher

  "The scanned resource" must {

    "have some actors started" in {
      val w = new Waiter

      system.actorSelection("/user/ScanResourceCube").resolveOne().onComplete { result =>
        w {
          assert(result.isSuccess)
        }
        w.dismiss()
      }
      w.await()
    }

    "expose proper cube state through MXBean" in {
      import org.squbs.unicomplex.JMX._
      val cubeName = "ScanResourceCube"
      val cubesName = new ObjectName(prefix(system) + cubeStateName + cubeName)
      get(cubesName, "Name") should be (cubeName)
      get(cubesName, "CubeState") should be ("Active")
      val wellKnownActors = get(cubesName, "WellKnownActors").asInstanceOf[String]
      println(wellKnownActors)
      wellKnownActors should include ("Actor[akka://scanResourceSpec/user/ScanResourceCube/Prepender#")
      wellKnownActors should include ("Actor[akka://scanResourceSpec/user/ScanResourceCube/Appender#")
    }
  }

  override protected def afterAll(): Unit = {
    Unicomplex(system).uniActor ! GracefulStop
  }
} 
Example 160
Source File: StreamTestSpec.scala    From squbs   with Apache License 2.0 5 votes vote down vote up
package org.squbs.unicomplex

import akka.actor._
import akka.http.scaladsl.Http
import akka.http.scaladsl.model._
import akka.pattern._
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.FileIO
import akka.testkit.{ImplicitSender, TestKit}
import com.typesafe.config.ConfigFactory
import org.scalatest.concurrent.Waiters
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
import org.squbs.lifecycle.GracefulStop
import org.squbs.unicomplex.Timeouts._

import scala.concurrent.Await

object StreamTestSpec {
  val dummyJarsDir = getClass.getClassLoader.getResource("classpaths").getPath

  val classPaths = Array(
    "StreamCube",
    "StreamSvc"
  ) map (dummyJarsDir + "/" + _)

  val config = ConfigFactory.parseString(
    s"""
       |squbs {
       |  actorsystem-name = StreamTestSpec
       |  ${JMX.prefixConfig} = true
       |}
       |default-listener.bind-port = 0
    """.stripMargin
  )

  val boot = UnicomplexBoot(config)
    .createUsing {(name, config) => ActorSystem(name, config)}
    .scanComponents(classPaths)
    .initExtensions
    .start()
}

class StreamTestSpec extends TestKit(StreamTestSpec.boot.actorSystem) with ImplicitSender with WordSpecLike
    with Matchers with BeforeAndAfterAll with Waiters {

  implicit val am = ActorMaterializer()
  import system.dispatcher

  val portBindings = Await.result((Unicomplex(system).uniActor ? PortBindings).mapTo[Map[String, Int]], awaitMax)
  val port = portBindings("default-listener")

  override def afterAll(): Unit = {
    Unicomplex(system).uniActor ! GracefulStop
  }

  "UniComplex" must {

    "upload file with correct parts" in {

      val filePath =
        StreamTestSpec.getClass.getResource("/classpaths/StreamSvc/dummy.txt").getPath
      val file = new java.io.File(filePath)
      require(file.exists() && file.canRead)

      val chunkSize = 8192
      val responseF = Http().singleRequest(HttpRequest(HttpMethods.POST,
                                           uri = s"http://127.0.0.1:$port/streamsvc/file-upload",
                                           entity = HttpEntity(MediaTypes.`application/octet-stream`,
                                                               FileIO.fromPath(file.toPath, chunkSize))))

      val actualResponseEntity = Await.result(responseF flatMap extractEntityAsString, awaitMax)
      val expectedNumberOfChunks = Math.ceil(file.length.toDouble / chunkSize).toInt
      val expectedResponseEntity = s"Chunk Count: $expectedNumberOfChunks ByteCount: ${file.length}"
      actualResponseEntity should be (expectedResponseEntity)
    }
  }
} 
Example 161
Source File: ArtifactS3SaverTest.scala    From marvin-engine-executor   with Apache License 2.0 5 votes vote down vote up
package org.marvin.artifact.manager

import java.io.File

import akka.Done
import akka.actor.{ActorSystem, Props}
import akka.testkit.{ImplicitSender, TestKit}
import com.amazonaws.services.s3.AmazonS3
import com.amazonaws.services.s3.model.GetObjectRequest
import com.typesafe.config.ConfigFactory
import org.apache.hadoop.fs.Path
import org.marvin.artifact.manager.ArtifactSaver.{SaveToLocal, SaveToRemote}
import org.marvin.fixtures.MetadataMock
import org.marvin.model.EngineMetadata
import org.scalamock.scalatest.MockFactory
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}


class ArtifactS3SaverTest extends TestKit(
  ActorSystem("ArtifactS3SaverTest", ConfigFactory.parseString("""akka.loggers = ["akka.testkit.TestEventListener"]""")))
  with ImplicitSender with WordSpecLike with Matchers with BeforeAndAfterAll with MockFactory {

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  "s3 saver" should {
    "receive SaveToLocal message" in {
      val metadata = MetadataMock.simpleMockedMetadata()
      val _s3Client = mock[AmazonS3]
      val actor = system.actorOf(Props(new ArtifactS3SaverMock(metadata, _s3Client, true)))

      val protocol = "protocol"
      val artifactName = "model"

      (_s3Client.getObject(_ : GetObjectRequest, _ : File)).expects(*, *).once()

      actor ! SaveToLocal(artifactName, protocol)

      expectMsg(Done)
    }

    "receive SaveToRemote message" in {
      val metadata = MetadataMock.simpleMockedMetadata()
      val _s3Client = mock[AmazonS3]
      val actor = system.actorOf(Props(new ArtifactS3SaverMock(metadata, _s3Client, true)))

      val protocol = "protocol"
      val artifactName = "model"

      (_s3Client.putObject(_ : String, _: String, _ : File)).expects(metadata.s3BucketName, *, *).once()

      actor ! SaveToRemote(artifactName, protocol)

      expectMsg(Done)
    }
  }

    "call preStart method wth success" in {
      val metadata = MetadataMock.simpleMockedMetadata()
      try{
        system.actorOf(Props(new ArtifactS3Saver(metadata)))
        assert(true)
      }catch {
        case _: Throwable =>
          assert(false)
      }
    }

  class ArtifactS3SaverMock(metadata: EngineMetadata, _s3Client: AmazonS3, _isRemote: Boolean) extends ArtifactS3Saver(metadata) {
    def _preStart(): Unit = super.preStart()
    override def preStart(): Unit = {
      s3Client = _s3Client
    }

    override def validatePath(path: Path, isRemote: Boolean): Boolean = {
      if (_isRemote) true
      else false
    }
  }
} 
Example 162
Source File: EnumSpec.scala    From pbdirect   with MIT License 5 votes vote down vote up
package pbdirect

import org.scalatest.{Matchers, WordSpecLike}

class EnumSpec extends WordSpecLike with Matchers {

  sealed trait WeekDay extends Pos
  case object Monday extends WeekDay with Pos._1
  case object Tuesday extends WeekDay with Pos._2
  case object Wednesday extends WeekDay with Pos._3
  case object Thursday extends WeekDay with Pos._4
  case object Friday extends WeekDay with Pos._5
  case object Saturday extends WeekDay with Pos._6
  case object Sunday extends WeekDay with Pos._7

  "Enum" should {
    "list values in declared order" in {
      Enum.values[WeekDay] shouldBe Monday :: Tuesday :: Wednesday :: Thursday :: Friday :: Saturday :: Sunday :: Nil
    }
    "get correct position for a value" in {
      Enum.toInt[WeekDay](Monday) shouldBe 0
      Enum.toInt[WeekDay](Tuesday) shouldBe 1
      Enum.toInt[WeekDay](Wednesday) shouldBe 2
      Enum.toInt[WeekDay](Thursday) shouldBe 3
      Enum.toInt[WeekDay](Friday) shouldBe 4
      Enum.toInt[WeekDay](Saturday) shouldBe 5
      Enum.toInt[WeekDay](Sunday) shouldBe 6
    }
    "get correct value for a position" in {
      Enum.fromInt[WeekDay](0) shouldBe Monday
      Enum.fromInt[WeekDay](1) shouldBe Tuesday
      Enum.fromInt[WeekDay](2) shouldBe Wednesday
      Enum.fromInt[WeekDay](3) shouldBe Thursday
      Enum.fromInt[WeekDay](4) shouldBe Friday
      Enum.fromInt[WeekDay](5) shouldBe Saturday
      Enum.fromInt[WeekDay](6) shouldBe Sunday
    }
  }
} 
Example 163
Source File: InMemoryPersistenceActorSpec.scala    From vamp   with Apache License 2.0 5 votes vote down vote up
package io.vamp.persistence

import java.util.concurrent.TimeUnit

import akka.actor.{ ActorSystem, Props }
import akka.testkit.{ ImplicitSender, TestKit, TestProbe }
import akka.util.Timeout
import com.typesafe.scalalogging.LazyLogging
import io.vamp.common.akka.IoC
import io.vamp.common.vitals.InfoRequest
import io.vamp.common.{ Artifact, Namespace, NamespaceProvider }
import io.vamp.persistence.notification.UnsupportedPersistenceRequest
import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpecLike }

import scala.concurrent.Await
import scala.concurrent.duration._

object TestArtifact {
  val kind: String = "TestArtifact"
}

class TestArtifact extends Artifact {
  override def name = "TestArtifact"

  override def kind = "TestArtifact"

  override def metadata = Map("name" → "testArtifact")
}

class TestInMemoryPersistenceActor extends InMemoryPersistenceActor {
  override protected def type2string(`type`: Class[_]): String = `type` match {
    // test artifact
    case t if classOf[TestArtifact].isAssignableFrom(t) ⇒ TestArtifact.kind
    case _ ⇒ throwException(UnsupportedPersistenceRequest(`type`))
  }
}

class InMemoryPersistenceActorSpec extends TestKit(ActorSystem("InMemoryPersistenceActorSpec")) with ImplicitSender
    with WordSpecLike with Matchers with BeforeAndAfterAll with NamespaceProvider
    with LazyLogging {

  implicit val namespace: Namespace = Namespace("default")
  implicit val timeout: Timeout = Timeout(5L, TimeUnit.SECONDS)

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  "InMemoryPersistenceActor" must {
    "reply to InfoRequest" in {
      val testProbe = TestProbe("test")

      val actors = Await.result(IoC.createActor(Props(classOf[InMemoryPersistenceActor])).map(_ :: Nil)(system.dispatcher), 5.seconds)
      val actor = actors.head
      val expectedResponse = Map("database" →
        Map(
          "status" → "valid",
          "artifacts" → Map(),
          "type" → "in-memory [no persistence]"
        ), "archiving" → true)
      testProbe.send(actor, InfoRequest)
      testProbe.expectMsgPF(30.seconds) {
        case response: Map[_, _] ⇒
          logger.info(response.toString)
          assert(response == expectedResponse)
        case _ ⇒
          fail("Unexpected message")
      }
    }

    "reply to Create" in {
      val testProbe = TestProbe("test")
      val actors = Await.result(IoC.createActor(Props(classOf[TestInMemoryPersistenceActor])).map(_ :: Nil)(system.dispatcher), 5.seconds)
      val actor = actors.head
      val artifact = new TestArtifact()
      val expectedResponse = List[TestArtifact](artifact)
      val source = "testSource"
      testProbe.send(actor, PersistenceActor.Create(artifact, Option(source)))
      testProbe.expectMsgPF(30.seconds) {
        case response: List[_] ⇒
          logger.info(response.toString)
          assert(response === expectedResponse)
        case _ ⇒
          fail("Unexpected message")
      }
    }
  }
} 
Example 164
Source File: IoCSpec.scala    From vamp   with Apache License 2.0 5 votes vote down vote up
package io.vamp.common.akka

import java.util.concurrent.TimeUnit

import akka.actor.{ ActorSystem, Props }
import akka.testkit.{ ImplicitSender, TestKit, TestProbe }
import akka.util.Timeout
import com.typesafe.scalalogging.LazyLogging
import io.vamp.common.notification.Notification
import io.vamp.common.{ ClassMapper, Namespace, NamespaceProvider }
import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpecLike }

import scala.concurrent.{ Await, Future }
import scala.concurrent.duration._

class IoCSpec extends TestKit(ActorSystem("IoCSpec")) with ImplicitSender
    with WordSpecLike with Matchers with BeforeAndAfterAll with NamespaceProvider
    with LazyLogging {

  implicit val namespace: Namespace = Namespace("default")
  implicit val timeout: Timeout = Timeout(5L, TimeUnit.SECONDS)

  override def afterAll {
    TestKit.shutdownActorSystem(system)
  }

  "Echo actor" must {

    "echo message" in {

      val testProbe = TestProbe("test")

      val actors = Await.result(IoC.createActor(Props(classOf[EchoActor])).map(_ :: Nil)(system.dispatcher), 5.seconds)
      val actor = actors.head
      val testMessage = "Example Message"
      testProbe.send(actor, testMessage)
      testProbe.expectMsgPF(30.seconds) {
        case response: String ⇒
          logger.info(response.toString)
          assert(response == testMessage)
        case _ ⇒
          fail("Unexpected message")
      }
    }
  }
}

class EchoActorMapper extends ClassMapper {
  val name = "echo"
  val clazz: Class[_] = classOf[EchoActor]
}

class EchoActor extends CommonSupportForActors {
  override def receive: Receive = {
    case text: String ⇒ reply(echo(text))
  }

  private def echo(text: String): Future[String] = Future { text }

  override def message(notification: Notification): String = "echo actor message"

  override def info(notification: Notification): Unit = log.info(s"echo actor info")

  override def reportException(notification: Notification): Exception = new Exception("Echo actor notification report")
} 
Example 165
Source File: STMultiNodeSpec.scala    From fusion-data   with Apache License 2.0 5 votes vote down vote up
package sample.multinode

import akka.remote.testkit.{ MultiNodeSpec, MultiNodeSpecCallbacks }
import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpecLike }

import scala.language.implicitConversions

trait STMultiNodeSpec extends MultiNodeSpecCallbacks with WordSpecLike with Matchers with BeforeAndAfterAll {
  self: MultiNodeSpec =>

  override protected def beforeAll(): Unit = {
    super.beforeAll()
    multiNodeSpecBeforeAll()
  }

  override protected def afterAll(): Unit = {
    multiNodeSpecAfterAll()
    super.afterAll()
  }

  // Might not be needed anymore if we find a nice way to tag all logging from a node
  implicit override def convertToWordSpecStringWrapper(s: String): WordSpecStringWrapper =
    new WordSpecStringWrapper(s"$s (on node '${self.myself.name}', $getClass)")
} 
Example 166
Source File: DonutBakingActorFSMTests.scala    From learn-akka   with Apache License 2.0 5 votes vote down vote up
package com.allaboutscala.learn.akka.fsm

import akka.actor.ActorSystem
import akka.testkit.{TestKit, ImplicitSender, DefaultTimeout, TestFSMRef}
import com.allaboutscala.learn.akka.fsm.Tutorial_09_AkkaFSM_PartSix._
import org.scalatest.{WordSpecLike, BeforeAndAfterAll, Matchers}


class DonutBakingActorFSMTests
  extends TestKit(ActorSystem("DonutActorFSM"))
    with ImplicitSender
    with DefaultTimeout
    with WordSpecLike
    with BeforeAndAfterAll
    with Matchers {

  private var donutBakingActorFSM: TestFSMRef[BakingStates, BakingData, DonutBakingActor] = _

  override protected def beforeAll(): Unit = {
    donutBakingActorFSM = TestFSMRef(new DonutBakingActor())
  }

  "DonutBakingActor" should {
    "have initial state of BakingStates.Stop" in {
      donutBakingActorFSM.stateName shouldEqual Stop
    }
  }

  import scala.concurrent.duration._
  "DonutBakingActor" should {
    "process BakeDonut event and switch to the BakingStates.Start state" in {
      donutBakingActorFSM ! BakeDonut
      awaitCond(donutBakingActorFSM.stateName == Start, 2 second, 1 second)
    }
  }

  "DonutBakingActor" should {
    "process StopBaking event and switch to BakingStates.Stop state" in {
      donutBakingActorFSM ! StopBaking
      awaitCond(donutBakingActorFSM.stateName == Stop, 2 second, 1 second)
    }
  }


  "DonutBakingActor current donut quantity" should {
    "equal to 1 after the StopBaking event" in {
      donutBakingActorFSM.stateData.qty shouldEqual 1
    }
  }

  override protected def afterAll(): Unit = {
    TestKit.shutdownActorSystem(system)
  }
} 
Example 167
Source File: ByteStrTest.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.common.state

import com.wavesplatform.common.utils.{Base58, Base64}
import org.scalatest.{Matchers, WordSpecLike}

class ByteStrTest extends Matchers with WordSpecLike {

  private def getSeqBytesArr(size: Int, from: Int = 1): Array[Byte] = (from until (from + size) map (_.toByte)).toArray

  "ByteStr" should {

    "correctly serialize int/boolean values" in {
      ByteStr.fromBytes(1).arr shouldBe Array[Byte](1)               // ByteVector(1)
      ByteStr.fromBytes(-100).arr shouldBe Array[Byte](-100)         // ByteVector(-100)
      ByteStr.fromBytes(Byte.MaxValue).arr shouldBe Array[Byte](127) // ByteVector(Byte.MaxValue.toInt)
    }

    "correctly serialize long values" in {
      ByteStr.fromLong(0x0102030405060708L).arr shouldBe Array[Byte](1, 2, 3, 4, 5, 6, 7, 8)        // ByteVector.fromLong(0x0102030405060708L)
      ByteStr.fromLong(33L).arr shouldBe Array[Byte](0, 0, 0, 0, 0, 0, 0, 33)                       // ByteVector.fromLong(33L)
      ByteStr.fromLong(Int.MaxValue.toLong).arr shouldBe Array[Byte](0, 0, 0, 0, 127, -1, -1, -1)   // ByteVector.fromLong(Int.MaxValue.toLong)
      ByteStr.fromLong(Int.MinValue.toLong).arr shouldBe Array[Byte](-1, -1, -1, -1, -128, 0, 0, 0) // ByteVector.fromLong(Int.MaxValue.toLong)
    }

    "be correctly created via fill method" in {
      ByteStr.fill(5)(0).arr shouldBe Array[Byte](0, 0, 0, 0, 0) // ByteVector.fill(5)(0)
    }

    "be correctly concatenated with another one" in {
      ByteStr(Array[Byte](1, 2, 3)) ++ ByteStr(Array[Byte](4, 5, 6)) shouldBe ByteStr(getSeqBytesArr(6)) // ByteVector(Array[Byte](1, 2, 3)) ++ ByteVector(Array[Byte](4, 5, 6))
    }

    "correctly take several bytes" in {
      ByteStr(getSeqBytesArr(10)).take(6) shouldBe ByteStr(getSeqBytesArr(6))
      ByteStr(getSeqBytesArr(10)).take(Int.MaxValue) shouldBe ByteStr(getSeqBytesArr(10))
      ByteStr(getSeqBytesArr(10)).take(Int.MinValue) shouldBe ByteStr.empty
    }

    "correctly drop several bytes" in {
      ByteStr(getSeqBytesArr(10)).drop(6) shouldBe ByteStr(Array[Byte](7, 8, 9, 10))
      ByteStr(getSeqBytesArr(10)).drop(Int.MaxValue) shouldBe ByteStr.empty
      ByteStr(getSeqBytesArr(10)).drop(Int.MinValue) shouldBe ByteStr(getSeqBytesArr(10))
    }

    "correctly takeRight several bytes" in {
      ByteStr(getSeqBytesArr(10)).takeRight(6) shouldBe ByteStr(Array[Byte](5, 6, 7, 8, 9, 10))
      ByteStr(getSeqBytesArr(3)).takeRight(-100) shouldBe ByteStr.empty
      ByteStr(getSeqBytesArr(3)).takeRight(100) shouldBe ByteStr(getSeqBytesArr(3))
    }

    "correctly dropRight several bytes" in {
      ByteStr(getSeqBytesArr(10)).dropRight(6) shouldBe ByteStr(Array[Byte](1, 2, 3, 4))
      ByteStr(getSeqBytesArr(3)).dropRight(-100) shouldBe ByteStr(getSeqBytesArr(3))
      ByteStr(getSeqBytesArr(3)).dropRight(100) shouldBe ByteStr.empty
    }

    "serialize to base64 if huge" in {
      val arr      = new Array[Byte](1024)
      val expected = "base64:" + Base64.encode(arr)
      ByteStr(arr).toString shouldBe expected
    }

    "serialize to base58 if small" in {
      val arr      = new Array[Byte](1023)
      val expected = Base58.encode(arr)
      ByteStr(arr).toString shouldBe expected
    }

    "trim using base64 if huge" in {
      val arr      = new Array[Byte](1024)
      val expected = Base64.encode(arr) + "..."
      ByteStr(arr).trim shouldBe expected
    }

    "trim using base64 if small" in {
      val arr      = new Array[Byte](1023)
      val expected = Base58.encode(arr).take(7) + "..."
      ByteStr(arr).trim shouldBe expected
    }
  }
} 
Example 168
Source File: CoordinatorSpec.scala    From cave   with MIT License 5 votes vote down vote up
package actors

import akka.actor.{ActorSystem, PoisonPill, Props}
import akka.testkit.{ImplicitSender, TestActorRef, TestKit}
import com.cave.metrics.data._
import init.AwsWrapper
import init.AwsWrapper.WorkItem
import org.mockito.Mockito._
import org.mockito.Matchers._
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{BeforeAndAfterAll, WordSpecLike}
import org.specs2.matcher.ShouldMatchers

import scala.concurrent.{Future, ExecutionContext}
import scala.util.Success

class CoordinatorSpec extends TestKit(ActorSystem()) with WordSpecLike with ShouldMatchers with ImplicitSender with BeforeAndAfterAll with AlertJsonData with MockitoSugar {

  val mockAwsWrapper = mock[AwsWrapper]
  val mockDataManager = mock[CacheDataManager]

  override def afterAll() = {
    system.shutdown()
  }

  "A coordinator" must {

    "create schedulers for all enabled alerts" in {

      val SomeId = "1234"
      val AnotherId = "4321"
      val OtherId = "12345"

      val alerts = List(
        Schedule(OrgName, Some(TeamName), None, NotificationUrl, Alert(Some(SomeId), AlertDescription, AlertEnabled, AlertPeriod, AlertCondition, Some(AlertHandbookUrl), Some(AlertRouting))),
        Schedule(OrgName, Some(TeamName), None, NotificationUrl, Alert(Some(AnotherId), AlertDescription, AlertEnabled, AlertPeriod, AlertCondition, Some(AlertHandbookUrl), Some(AlertRouting))))

      val moreAlerts = List(
        Schedule(TeamName, None, None, NotificationUrl, Alert(Some(OtherId), AlertDescription, AlertEnabled, AlertPeriod, AlertCondition, Some(AlertHandbookUrl), Some(AlertRouting)))
      )

      when(mockDataManager.getEnabledAlerts()).thenReturn(Success(Map(OrgName -> alerts, TeamName -> moreAlerts)))
      when(mockAwsWrapper.receiveMessages()(any[ExecutionContext])).thenReturn(Future.successful(List.empty[WorkItem]))
      val coordinator = TestActorRef(Props(new Coordinator(mockAwsWrapper, mockDataManager) {
        override def createScheduler(schedule: Schedule) = {}
      }))

      coordinator ! Coordinator.StatusRequest

      expectMsgPF() {
        case Coordinator.StatusResponse(cache, schedules) =>
          cache.schedulesByOrganization should haveSize(2)
          val forOrgName = cache.schedulesByOrganization(OrgName)
          forOrgName should haveSize(2)
          val forTeamName = cache.schedulesByOrganization(TeamName)
          forTeamName should haveSize(1)

          schedules should haveSize(3)

        case _ => fail("Unexpected message received.")
      }


      coordinator ! PoisonPill
      watch(coordinator)
      expectTerminated(coordinator)
    }
  }
} 
Example 169
Source File: CoordinatorSpec.scala    From cave   with MIT License 5 votes vote down vote up
package worker

import akka.actor._
import akka.testkit.{ImplicitSender, TestActorRef, TestKit}
import com.cave.metrics.data._
import init.AwsWrapper
import init.AwsWrapper.WorkItem
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

import scala.collection.mutable
import scala.concurrent.{ExecutionContext, Future}
import scala.util.Success

class CoordinatorSpec extends TestKit(ActorSystem()) with WordSpecLike with Matchers with ImplicitSender with BeforeAndAfterAll with AlertJsonData with MockitoSugar {

  def fakeCoordinator(awsWrapper: AwsWrapper, mockCheckers: mutable.Map[ActorRef, WorkItem]): Props = Props(new Coordinator(awsWrapper, shouldSendHistory = false) {

    override val checkers = mockCheckers
    override def createNotifier(item: WorkItem): Unit = { }
  })

  def fakeChecker(parentCoordinator: ActorRef): Props = Props(new Actor {
    def receive = {
      case "abort" =>
        parentCoordinator ! Checker.Aborted("Boom!")
        context stop self
      case "true" =>
        parentCoordinator ! Checker.Done(Success(true))
        context stop self
      case "false" =>
        parentCoordinator ! Checker.Done(Success(false))
        context stop self
    }
  })

  val mockAwsWrapper = mock[AwsWrapper]
  val mockDataManager = mock[CacheDataManager]

  override def afterAll() = {
    system.shutdown()
  }

  "A coordinator" must {

    "return its status" in {
      when(mockAwsWrapper.receiveMessages()(any[ExecutionContext])).thenReturn(Future.successful(List.empty[WorkItem]))

      val checkers = mutable.Map.empty[ActorRef, WorkItem]
      val mockItem = mock[WorkItem]

      val coordinator = TestActorRef(fakeCoordinator(mockAwsWrapper, checkers))

      val checker1 = TestActorRef(fakeChecker(coordinator))
      val checker2 = TestActorRef(fakeChecker(coordinator))
      val checker3 = TestActorRef(fakeChecker(coordinator))
      val checker4 = TestActorRef(fakeChecker(coordinator))
      val checker5 = TestActorRef(fakeChecker(coordinator))
      val checker6 = TestActorRef(fakeChecker(coordinator))

      checkers ++= mutable.Map(checker1 -> mockItem, checker2 -> mockItem, checker3 -> mockItem,
        checker4 -> mockItem, checker5 -> mockItem, checker6 -> mockItem)

      checker1 ! "abort"
      checker2 ! "abort"
      checker3 ! "false"
      checker4 ! "false"
      checker5 ! "false"
      checker6 ! "true"

      coordinator ! Coordinator.StatusRequest

      expectMsgPF() {
        case Coordinator.StatusResponse(currentlyActive, aborted, totalProcessed, noOfAlarmsTriggered) =>
          currentlyActive should be(0)
          aborted should be(2)
          noOfAlarmsTriggered should be(1)
          totalProcessed should be(4)
        case _ => fail("Unexpected message received.")
      }

      coordinator ! PoisonPill
      watch(coordinator)
      expectTerminated(coordinator)
    }
  }
} 
Example 170
Source File: NotifierSpec.scala    From cave   with MIT License 5 votes vote down vote up
package worker

import java.util.concurrent.Executor

import akka.actor._
import akka.testkit.TestKit
import com.cave.metrics.data.{AlertJsonData, Check}
import org.scalatest.{BeforeAndAfterAll, WordSpecLike}
import worker.web.{BadStatus, NotificationSender}

import scala.concurrent.Future

object NotifierSpec extends AlertJsonData {

  object FakeNotificationSender extends NotificationSender {
    def send(check: Check)(implicit exec: Executor): Future[Boolean] =
      if (check.schedule.alert.description == AlertDescription) Future.successful(true)
      else if (check.schedule.alert.description == AlertFiveDescription) Future.successful(false)
      else Future.failed(BadStatus(401))

    def shutdown(): Unit = { }
  }

  def fakeNotifier(n: Check): Props = Props(new Notifier(n) {
    override def client = FakeNotificationSender
  })
}

class NotifierSpec extends TestKit(ActorSystem()) with WordSpecLike with BeforeAndAfterAll {

  import worker.NotifierSpec._

  override def afterAll() = {
    system.shutdown()
  }

  "A notifier" must {
    "send Done(true) when successful" in {
      val notifier = system.actorOf(Props(new StepParent(fakeNotifier(InsufficientOrders), testActor)), "successful")

      expectMsg(Notifier.Done(result = true))
      watch(notifier)
      expectTerminated(notifier)
    }

    "send Done(false) when unsuccessful" in {
      val notifier = system.actorOf(Props(new StepParent(fakeNotifier(InsufficientOrdersFive), testActor)), "unsuccessful")

      expectMsg(Notifier.Done(result = false))
      watch(notifier)
      expectTerminated(notifier)
    }

    "properly finish in case of error" in {
      val notifier = system.actorOf(Props(new StepParent(fakeNotifier(OrdersLessThanPredicted), testActor)), "error")

      watch(notifier)
      expectTerminated(notifier)
    }
  }
}