org.scalatest.Matchers Scala Examples

The following examples show how to use org.scalatest.Matchers. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: TimeLimitedFutureSpec.scala    From gfc-concurrent   with Apache License 2.0 9 votes vote down vote up
package com.gilt.gfc.concurrent

import java.util.concurrent.TimeoutException
import scala.concurrent.{ Future, Await }
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import org.scalatest.{WordSpec, Matchers}

class TimeLimitedFutureSpec extends WordSpec with Matchers {
  import TimeLimitedFutureSpec._

  "RichFuture" when {
    import ScalaFutures._

    "waiting for a result to happen" should {
      "return the completed original Future if it completes before the given timeout" in {
        val now = System.currentTimeMillis
        val future: Future[String] = (Future { Thread.sleep(1000); "Here I am" }).withTimeout(Duration(5, "seconds"))
        val msg: String = Await.result(future, Duration(10, "seconds"))
        val elapsed = (System.currentTimeMillis - now)
        msg should equal ("Here I am")
        elapsed should be (2000L +- 1000L)
      }

      "return the failure of the original Future if it fails before the given timeout" in {
        val now = System.currentTimeMillis
        val future = (Future { Thread.sleep(1000); throw new NullPointerException("That hurts!") }).withTimeout(Duration(5, "seconds"))
        a [NullPointerException] should be thrownBy { Await.result(future, Duration(10, "seconds")) }
        val elapsed = (System.currentTimeMillis - now)
        elapsed should be (2000L +- 1000L)
      }

      "return the timeout of the original Future if it had one and it went off and was shorter than the given one" in {
        val now = System.currentTimeMillis
        val timingOutEarlier = Timeouts.timeout(Duration(1, "seconds"))
        val future = timingOutEarlier.withTimeout(Duration(5, "seconds"))
        a [TimeoutException] should be thrownBy { Await.result(future, Duration(10, "seconds")) }
        val elapsed: Long = (System.currentTimeMillis - now)
        elapsed should be >= 500l
        elapsed should be <= 4000l
      }

      "return the timeout if the original Future does not timeout of its own" in {
        val now = System.currentTimeMillis
        val timingOutLater = Timeouts.timeout(Duration(3, "seconds"))
        val future = timingOutLater.withTimeout(Duration(1, "seconds"))
        a [TimeoutException] should be thrownBy  { Await.result(future, Duration(10, "seconds")) }
        val elapsed: Long = (System.currentTimeMillis - now)
        elapsed should be >= 1000l
        elapsed should be <= 2500l
      }
    }

    // an example of how it could be used
    "used in our most common use case" should {
      "fit nicely" in {
        val call: Future[String] = svcCall(1000).withTimeout(Duration(5000, "milliseconds")).recover {
          case _: TimeoutException => "recover.timeout"
          case other => s"recover.${other.getMessage}"
        }
        Await.result(call, Duration(10, "seconds")) should be ("data-1000")

        val call2: Future[String] = svcCall(5000).withTimeout(Duration(1000, "milliseconds")).recover {
          case _: TimeoutException => "recover.timeout"
          case other => s"recover.${other.getMessage}"
        }
        Await.result(call2, Duration(10, "seconds")) should be ("recover.timeout")
      }
    }
  }
}

object TimeLimitedFutureSpec {
  def svcCall(latency: Long): Future[String] = Future { Thread.sleep(latency); s"data-${latency}" }
} 
Example 2
Source File: NestedCaseClassesTest.scala    From cleanframes   with Apache License 2.0 8 votes vote down vote up
package cleanframes

import com.holdenkarau.spark.testing.DataFrameSuiteBase
import org.apache.spark.sql.functions
import org.scalatest.{FlatSpec, Matchers}


class NestedCaseClassesTest
  extends FlatSpec
    with Matchers
    with DataFrameSuiteBase {

  "Cleaner" should "compile and use a custom transformer for a custom type" in {
    import cleanframes.syntax._ // to use `.clean`
    import spark.implicits._

    // define test data for a dataframe
    val input = Seq(
      // @formatter:off
      ("1",           "1",          "1",           "1",           null),
      (null,          "2",          null,          "2",           "corrupted"),
      ("corrupted",   null,         "corrupted",   null,          "true"),
      ("4",           "corrupted",  "4",           "4",           "false"),
      ("5",           "5",          "5",           "corrupted",   "false"),
      ("6",           "6",          "6",           "6",           "true")
      // @formatter:on
    )
      // give column names that are known to you
      .toDF("col1", "col2", "col3", "col4", "col5")

    // import standard functions for conversions shipped with the library
    import cleanframes.instances.all._

    // !important: you need to give a new structure to allow to access sub elements
    val renamed = input.select(
      functions.struct(
        input.col("col1") as "a_col_1",
        input.col("col2") as "a_col_2"
      ) as "a",
      functions.struct(
        input.col("col3") as "b_col_1",
        input.col("col4") as "b_col_2"
      ) as "b",
      input.col("col5") as "c"
    )

    val result = renamed.clean[AB]
      .as[AB]
      .collect

    result should {
      contain theSameElementsAs Seq(
        // @formatter:off
        AB( A(Some(1), Some(1)),  B(Some(1),  Some(1.0)), Some(false)),
        AB( A(None,    Some(2)),  B(None,     Some(2.0)), Some(false)),
        AB( A(None,    None),     B(None,     None),      Some(true)),
        AB( A(Some(4), None),     B(Some(4),  Some(4.0)), Some(false)),
        AB( A(Some(5), Some(5)),  B(Some(5),  None),      Some(false)),
        AB( A(Some(6), Some(6)),  B(Some(6),  Some(6.0)), Some(true))
        // @formatter:on
      )
    }
  }

}

case class A(a_col_1: Option[Int], a_col_2: Option[Float])

case class B(b_col_1: Option[Float], b_col_2: Option[Double])

case class AB(a: A, b: B, c: Option[Boolean]) 
Example 3
Source File: TanhSpec.scala    From BigDL   with Apache License 2.0 7 votes vote down vote up
package com.intel.analytics.bigdl.nn

import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.nn.tf.TanhGrad
import com.intel.analytics.bigdl.utils.T
import com.intel.analytics.bigdl.utils.serializer.ModuleSerializationTest
import org.scalatest.{FlatSpec, Matchers}

import scala.math.abs

@com.intel.analytics.bigdl.tags.Parallel
class TanhSpec extends FlatSpec with Matchers {
  "A Tanh Module " should "generate correct output and grad" in {
    val module = new Tanh[Double]()
    val input = Tensor[Double](2, 2, 2)
    input(Array(1, 1, 1)) = -0.17020166106522
    input(Array(1, 1, 2)) = 0.57785657607019
    input(Array(1, 2, 1)) = -1.3404131438583
    input(Array(1, 2, 2)) = 1.0938102817163
    input(Array(2, 1, 1)) = 1.120370157063
    input(Array(2, 1, 2)) = -1.5014141565189
    input(Array(2, 2, 1)) = 0.3380249235779
    input(Array(2, 2, 2)) = -0.625677742064
    val gradOutput = Tensor[Double](2, 2, 2)
    gradOutput(Array(1, 1, 1)) = 0.79903302760795
    gradOutput(Array(1, 1, 2)) = 0.019753993256018
    gradOutput(Array(1, 2, 1)) = 0.63136631483212
    gradOutput(Array(1, 2, 2)) = 0.29849314852618
    gradOutput(Array(2, 1, 1)) = 0.94380705454387
    gradOutput(Array(2, 1, 2)) = 0.030344664584845
    gradOutput(Array(2, 2, 1)) = 0.33804601291195
    gradOutput(Array(2, 2, 2)) = 0.8807330634445
    val expectedOutput = Tensor[Double](2, 2, 2)
    expectedOutput(Array(1, 1, 1)) = -0.16857698275003
    expectedOutput(Array(1, 1, 2)) = 0.52110579963112
    expectedOutput(Array(1, 2, 1)) = -0.87177144344863
    expectedOutput(Array(1, 2, 2)) = 0.79826462420686
    expectedOutput(Array(2, 1, 1)) = 0.80769763073281
    expectedOutput(Array(2, 1, 2)) = -0.90540347425835
    expectedOutput(Array(2, 2, 1)) = 0.32571298952384
    expectedOutput(Array(2, 2, 2)) = -0.55506882753488
    val expectedGrad = Tensor[Double](2, 2, 2)
    expectedGrad(Array(1, 1, 1)) = 0.77632594793144
    expectedGrad(Array(1, 1, 2)) = 0.014389771607755
    expectedGrad(Array(1, 2, 1)) = 0.15153710218424
    expectedGrad(Array(1, 2, 2)) = 0.1082854310036
    expectedGrad(Array(2, 1, 1)) = 0.32809049064441
    expectedGrad(Array(2, 1, 2)) = 0.0054694603766104
    expectedGrad(Array(2, 2, 1)) = 0.3021830658283
    expectedGrad(Array(2, 2, 2)) = 0.6093779706637
    val inputOrg = input.clone()
    val gradOutputOrg = gradOutput.clone()
    val output = module.forward(input)
    val gradInput = module.backward(input, gradOutput)
    expectedOutput.map(output, (v1, v2) => {
      assert(abs(v1 - v2) < 1e-6);
      v1
    })
    expectedGrad.map(gradInput, (v1, v2) => {
      assert(abs(v1 - v2) < 1e-6);
      v1
    })
    assert(input == inputOrg)
    assert(gradOutput == gradOutputOrg)
  }

  "A Tanh Module " should "be good in gradient check" in {
    val module = new Tanh[Double]()
    val input = Tensor[Double](2, 2, 2).rand()

    val checker = new GradientChecker(1e-4, 1e-2)
    checker.checkLayer[Double](module, input) should be(true)
  }
}

class TanhSerialTest extends ModuleSerializationTest {
  override def test(): Unit = {
    val module = TanhGrad[Float, Float]()

    val input = T(Tensor[Float](1, 5, 3, 4).rand(), Tensor[Float](1, 5, 3, 4).rand())

    runSerializationTest(module, input)
  }
} 
Example 4
Source File: CogroupTest.scala    From spark-tools   with Apache License 2.0 6 votes vote down vote up
package io.univalence.plumbus
import io.univalence.plumbus.test.SparkTestLike
import org.apache.spark.sql.Dataset
import org.scalatest.{ FunSuiteLike, Matchers }
import com.github.mrpowers.spark.fast.tests.DatasetComparer

class CogroupTest extends FunSuiteLike with SparkTestLike with Matchers with DatasetComparer {
  import spark.implicits._
  import io.univalence.plumbus.cogroup._

  val person1 = PersonWithId("1", "John", 32)
  val person2 = PersonWithId("2", "Mary", 32)

  val address1 = Address("1", "address1")
  val address2 = Address("2", "address2")
  val address3 = Address("1", "address3")

  val persons: Dataset[PersonWithId] = Seq(person1, person2).toDS()
  val addresses: Dataset[Address]    = Seq(address1, address2, address3).toDS()

  test("apply test") {
    val applyDS = apply(persons, addresses)(_.id, _.idPerson)
    val expectedDS = Seq(
      ("1", Seq(person1), Seq(address1, address3)),
      ("2", Seq(person2), Seq(address2))
    ).toDS()
    assertSmallDatasetEquality(applyDS, expectedDS, orderedComparison = false)
  }
}

case class Address(idPerson: String, name: String) 
Example 5
Source File: IntegrationTest.scala    From kmq   with Apache License 2.0 6 votes vote down vote up
package com.softwaremill.kmq.redelivery

import java.time.Duration
import java.util.Random

import akka.actor.ActorSystem
import akka.kafka.scaladsl.{Consumer, Producer}
import akka.kafka.{ConsumerSettings, ProducerMessage, ProducerSettings, Subscriptions}
import akka.stream.ActorMaterializer
import akka.testkit.TestKit
import com.softwaremill.kmq._
import com.softwaremill.kmq.redelivery.infrastructure.KafkaSpec
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.producer.{ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringDeserializer
import org.scalatest.concurrent.Eventually
import org.scalatest.time.{Seconds, Span}
import org.scalatest.{BeforeAndAfterAll, FlatSpecLike, Matchers}

import scala.collection.mutable.ArrayBuffer

class IntegrationTest extends TestKit(ActorSystem("test-system")) with FlatSpecLike with KafkaSpec with BeforeAndAfterAll with Eventually with Matchers {

  implicit val materializer = ActorMaterializer()
  import system.dispatcher

  "KMQ" should "resend message if not committed" in {
    val bootstrapServer = s"localhost:${testKafkaConfig.kafkaPort}"
    val kmqConfig = new KmqConfig("queue", "markers", "kmq_client", "kmq_redelivery", Duration.ofSeconds(1).toMillis,
    1000)

    val consumerSettings = ConsumerSettings(system, new StringDeserializer, new StringDeserializer)
      .withBootstrapServers(bootstrapServer)
      .withGroupId(kmqConfig.getMsgConsumerGroupId)
      .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")

    val markerProducerSettings = ProducerSettings(system,
      new MarkerKey.MarkerKeySerializer(), new MarkerValue.MarkerValueSerializer())
      .withBootstrapServers(bootstrapServer)
      .withProperty(ProducerConfig.PARTITIONER_CLASS_CONFIG, classOf[ParititionFromMarkerKey].getName)
    val markerProducer = markerProducerSettings.createKafkaProducer()

    val random = new Random()

    lazy val processedMessages = ArrayBuffer[String]()
    lazy val receivedMessages = ArrayBuffer[String]()

    val control = Consumer.committableSource(consumerSettings, Subscriptions.topics(kmqConfig.getMsgTopic)) // 1. get messages from topic
      .map { msg =>
      ProducerMessage.Message(
        new ProducerRecord[MarkerKey, MarkerValue](kmqConfig.getMarkerTopic, MarkerKey.fromRecord(msg.record), new StartMarker(kmqConfig.getMsgTimeoutMs)), msg)
    }
      .via(Producer.flow(markerProducerSettings, markerProducer)) // 2. write the "start" marker
      .map(_.message.passThrough)
      .mapAsync(1) { msg =>
        msg.committableOffset.commitScaladsl().map(_ => msg.record) // this should be batched
      }
      .map { msg =>
        receivedMessages += msg.value
        msg
      }
      .filter(_ => random.nextInt(5) != 0)
      .map { processedMessage =>
        processedMessages += processedMessage.value
        new ProducerRecord[MarkerKey, MarkerValue](kmqConfig.getMarkerTopic, MarkerKey.fromRecord(processedMessage), EndMarker.INSTANCE)
      }
      .to(Producer.plainSink(markerProducerSettings, markerProducer)) // 5. write "end" markers
      .run()

    val redeliveryHook = RedeliveryTracker.start(new KafkaClients(bootstrapServer), kmqConfig)

    val messages = (0 to 20).map(_.toString)
    messages.foreach(msg => sendToKafka(kmqConfig.getMsgTopic,msg))

    eventually {
      receivedMessages.size should be > processedMessages.size
      processedMessages.sortBy(_.toInt).distinct shouldBe messages
    }(PatienceConfig(timeout = Span(15, Seconds)), implicitly)

    redeliveryHook.close()
    control.shutdown()
  }

  override def afterAll(): Unit = {
    super.afterAll()
    TestKit.shutdownActorSystem(system)
  }
} 
Example 6
Source File: TransformerTest.scala    From incubator-s2graph   with Apache License 2.0 6 votes vote down vote up
package org.apache.s2graph.s2jobs.wal

import org.apache.s2graph.s2jobs.task.TaskConf
import org.apache.s2graph.s2jobs.wal.transformer._
import org.scalatest.{FunSuite, Matchers}
import play.api.libs.json.Json

class TransformerTest extends FunSuite with Matchers {
  val walLog = WalLog(1L, "insert", "edge", "a", "b", "s2graph", "friends", """{"name": 1, "url": "www.google.com"}""")

  test("test default transformer") {
    val taskConf = TaskConf.Empty
    val transformer = new DefaultTransformer(taskConf)
    val dimVals = transformer.toDimValLs(walLog, "name", "1")

    dimVals shouldBe Seq(DimVal("friends:name", "1"))
  }

  test("test ExtractDomain from URL") {
    val taskConf = TaskConf.Empty.copy(options =
      Map("urlDimensions" -> Json.toJson(Seq("url")).toString())
    )
    val transformer = new ExtractDomain(taskConf)
    val dimVals = transformer.toDimValLs(walLog, "url", "http://www.google.com/abc")

    dimVals shouldBe Seq(
      DimVal("host", "www.google.com"),
      DimVal("domain", "www.google.com"),
      DimVal("domain", "www.google.com/abc")
    )
  }
} 
Example 7
Source File: AvroParquetSourceTest.scala    From eel-sdk   with Apache License 2.0 6 votes vote down vote up
package io.eels.component.parquet

import java.nio.file.Paths

import io.eels.component.parquet.avro.AvroParquetSource
import io.eels.component.parquet.util.ParquetLogMute
import io.eels.schema._
import org.apache.avro.SchemaBuilder
import org.apache.avro.generic.{GenericData, GenericRecord}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.parquet.avro.AvroParquetWriter
import org.scalatest.{Matchers, WordSpec}

class AvroParquetSourceTest extends WordSpec with Matchers {
  ParquetLogMute()

  private implicit val conf = new Configuration()
  private implicit val fs = FileSystem.get(conf)

  private val personFile = Paths.get(getClass.getResource("/io/eels/component/parquet/person.avro.pq").toURI)
  private val resourcesDir = personFile.getParent

  "AvroParquetSource" should {
    "read schema" in {
      val people = AvroParquetSource(personFile)
      people.schema shouldBe StructType(
        Field("name", StringType, nullable = false),
        Field("job", StringType, nullable = false),
        Field("location", StringType, nullable = false)
      )
    }
    "read parquet files" in {
      val people = AvroParquetSource(personFile.toAbsolutePath()).toDataStream().toSet.map(_.values)
      people shouldBe Set(
        Vector("clint eastwood", "actor", "carmel"),
        Vector("elton john", "musician", "pinner")
      )
    }
    "read multiple parquet files using file expansion" in {
      import io.eels.FilePattern._
      val people = AvroParquetSource(s"${resourcesDir.toUri.toString}/*.pq").toDataStream().toSet.map(_.values)
      people shouldBe Set(
        Vector("clint eastwood", "actor", "carmel"),
        Vector("elton john", "musician", "pinner"),
        Vector("clint eastwood", "actor", "carmel"),
        Vector("elton john", "musician", "pinner")
      )
    }
    // todo add merge to parquet source
    "merge schemas" ignore {

      try {
        fs.delete(new Path("merge1.pq"), false)
      } catch {
        case t: Throwable =>
      }
      try {
        fs.delete(new Path("merge2.pq"), false)
      } catch {
        case t: Throwable =>
      }

      val schema1 = SchemaBuilder.builder().record("schema1").fields().requiredString("a").requiredDouble("b").endRecord()
      val schema2 = SchemaBuilder.builder().record("schema2").fields().requiredInt("a").requiredBoolean("c").endRecord()

      val writer1 = AvroParquetWriter.builder[GenericRecord](new Path("merge1.pq")).withSchema(schema1).build()
      val record1 = new GenericData.Record(schema1)
      record1.put("a", "aaaaa")
      record1.put("b", 124.3)
      writer1.write(record1)
      writer1.close()

      val writer2 = AvroParquetWriter.builder[GenericRecord](new Path("merge2.pq")).withSchema(schema2).build()
      val record2 = new GenericData.Record(schema2)
      record2.put("a", 111)
      record2.put("c", true)
      writer2.write(record2)
      writer2.close()

      ParquetSource(new Path("merge*")).schema shouldBe
        StructType(
          Field("a", StringType, nullable = false),
          Field("b", DoubleType, nullable = false),
          Field("c", BooleanType, nullable = false)
        )

      fs.delete(new Path(".merge1.pq.crc"), false)
      fs.delete(new Path(".merge2.pq.crc"), false)
      fs.delete(new Path("merge1.pq"), false)
      fs.delete(new Path("merge2.pq"), false)
    }
  }
} 
Example 8
Source File: RegressITCase.scala    From flink-tensorflow   with Apache License 2.0 6 votes vote down vote up
package org.apache.flink.contrib.tensorflow.ml

import com.twitter.bijection.Conversion._
import org.apache.flink.api.common.functions.RichFlatMapFunction
import org.apache.flink.api.scala._
import org.apache.flink.configuration.Configuration
import org.apache.flink.contrib.tensorflow.ml.signatures.RegressionMethod._
import org.apache.flink.contrib.tensorflow.types.TensorInjections.{message2Tensor, messages2Tensor}
import org.apache.flink.contrib.tensorflow.util.TestData._
import org.apache.flink.contrib.tensorflow.util.{FlinkTestBase, RegistrationUtils}
import org.apache.flink.core.fs.Path
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.util.Collector
import org.apache.flink.util.Preconditions.checkState
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{Matchers, WordSpecLike}
import org.tensorflow.Tensor
import org.tensorflow.contrib.scala.Arrays._
import org.tensorflow.contrib.scala.Rank._
import org.tensorflow.contrib.scala._
import org.tensorflow.example.Example
import resource._

@RunWith(classOf[JUnitRunner])
class RegressITCase extends WordSpecLike
  with Matchers
  with FlinkTestBase {

  override val parallelism = 1

  type LabeledExample = (Example, Float)

  def examples(): Seq[LabeledExample] = {
    for (v <- Seq(0.0f -> 2.0f, 1.0f -> 2.5f, 2.0f -> 3.0f, 3.0f -> 3.5f))
      yield (example("x" -> feature(v._1)), v._2)
  }

  "A RegressFunction" should {
    "process elements" in {
      val env = StreamExecutionEnvironment.getExecutionEnvironment
      RegistrationUtils.registerTypes(env.getConfig)

      val model = new HalfPlusTwo(new Path("../models/half_plus_two"))

      val outputs = env
        .fromCollection(examples())
        .flatMap(new RichFlatMapFunction[LabeledExample, Float] {
          override def open(parameters: Configuration): Unit = model.open()
          override def close(): Unit = model.close()

          override def flatMap(value: (Example, Float), out: Collector[Float]): Unit = {
            for {
              x <- managed(Seq(value._1).toList.as[Tensor].taggedAs[ExampleTensor])
              y <- model.regress_x_to_y(x)
            } {
              // cast as a 1D tensor to use the available conversion
              val o = y.taggedAs[TypedTensor[`1D`,Float]].as[Array[Float]]
              val actual = o(0)
              checkState(actual == value._2)
              out.collect(actual)
            }
          }
        })
        .print()

      env.execute()
    }
  }
} 
Example 9
Source File: LocalDBSCANArcherySuite.scala    From dbscan-on-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.mllib.clustering.dbscan

import java.net.URI

import scala.io.Source

import org.scalatest.FunSuite
import org.scalatest.Matchers
import org.apache.spark.mllib.linalg.Vectors

class LocalDBSCANArcherySuite extends FunSuite with Matchers {

  private val dataFile = "labeled_data.csv"

  test("should cluster") {

    val labeled: Map[DBSCANPoint, Double] =
      new LocalDBSCANArchery(eps = 0.3F, minPoints = 10)
        .fit(getRawData(dataFile))
        .map(l => (l, l.cluster.toDouble))
        .toMap

    val expected: Map[DBSCANPoint, Double] = getExpectedData(dataFile).toMap

    labeled.foreach {
      case (key, value) => {
        val t = expected(key)
        if (t != value) {
          println(s"expected: $t but got $value for $key")
        }

      }
    }

    labeled should equal(expected)

  }

  def getExpectedData(file: String): Iterator[(DBSCANPoint, Double)] = {
    Source
      .fromFile(getFile(file))
      .getLines()
      .map(s => {
        val vector = Vectors.dense(s.split(',').map(_.toDouble))
        val point = DBSCANPoint(vector)
        (point, vector(2))
      })
  }

  def getRawData(file: String): Iterable[DBSCANPoint] = {

    Source
      .fromFile(getFile(file))
      .getLines()
      .map(s => DBSCANPoint(Vectors.dense(s.split(',').map(_.toDouble))))
      .toIterable
  }

  def getFile(filename: String): URI = {
    getClass.getClassLoader.getResource(filename).toURI
  }

} 
Example 10
Source File: ExpiresDirectiveSpec.scala    From akka-http-extensions   with Apache License 2.0 5 votes vote down vote up
package com.lonelyplanet.akka.http.extensions.directives

import akka.http.scaladsl.model.DateTime
import akka.http.scaladsl.testkit.ScalatestRouteTest
import org.scalatest.{FlatSpec, Matchers}
import akka.http.scaladsl.server.Directives._
import ExpiresDirective._
import akka.http.scaladsl.model.StatusCodes.OK

class ExpiresDirectiveSpec extends FlatSpec with Matchers with ScalatestRouteTest {
  private val expirationDate = DateTime.now

  private val route = path("test") {
    expires(expirationDate) {
      complete {
        "OK"
      }
    }
  }

  "ExpiresDirective" should "set `Expires` header correctly" in {
    Get("/test") ~> route ~> check {
      status shouldBe OK
      responseAs[String] shouldBe "OK"
      header("Expires").get.value shouldBe expirationDate.toRfc1123DateTimeString
    }
  }
} 
Example 11
Source File: TraceTokenMDCLoggingTest.scala    From akka-http-extensions   with Apache License 2.0 5 votes vote down vote up
package com.lonelyplanet.akka.http.extensions.logging

import ch.qos.logback.classic.spi.ILoggingEvent
import ch.qos.logback.classic.{Level, Logger, LoggerContext}
import ch.qos.logback.core.AppenderBase
import com.lonelyplanet.akka.http.extensions.tracing.{MaybeTraceTokenHolder, TraceToken}
import org.scalatest.{FlatSpec, Matchers}
import org.slf4j.LoggerFactory

import scala.util.Random

class TraceTokenMDCLoggingSpec extends FlatSpec with Matchers {
  it should "log trace token if one is present" in {
    withInMemoryAppender { appender =>
      val traceToken = TraceToken.random
      val loggingTester = new LoggingTester(Some(traceToken))
      val message = randomMessage

      loggingTester.doLog(message)

      appender.output should not be empty
      appender.output.lines.foreach({ line =>
        line.contains(message) shouldBe true
        line.contains(traceToken.toString) shouldBe true
      })
    }
  }

  private def withInMemoryAppender(f: (InMemoryLoggingAppender) => Unit) = {
    val loggerContext = LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
    val appender = new InMemoryLoggingAppender
    appender.setContext(loggerContext)

    val logger = LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME).asInstanceOf[Logger]
    logger.setLevel(Level.ALL)
    logger.detachAndStopAllAppenders()

    logger.addAppender(appender)
    appender.start()
    f(appender)
    logger.detachAppender(appender)
    appender.stop()
  }

  private def randomMessage = Random.alphanumeric.take(20).mkString("")
}

private class InMemoryLoggingAppender extends AppenderBase[ILoggingEvent] {
  private val builder = new StringBuilder

  override def append(event: ILoggingEvent): Unit = {
    builder.append(event.getMessage)
    builder.append(" ")
    if (event.getMDCPropertyMap.containsKey(TraceToken.MDCKey)) {
      builder.append(event.getMDCPropertyMap.get(TraceToken.MDCKey))
    }
    builder.append("\n")
  }

  def output: String = builder.toString()
  def clear(): Unit = builder.clear()
}

private class LoggingTester(maybeTraceTokenFunc: => Option[TraceToken]) extends TraceTokenMDCLogging with MaybeTraceTokenHolder {
  override def maybeTraceToken: Option[TraceToken] = maybeTraceTokenFunc
  def doLog(message: String): Unit = {
    logger.trace(message)
    logger.debug(message)
    logger.info(message)
    logger.warn(message)
    logger.error(message)
  }
} 
Example 12
Source File: ExceptionHandlingSpec.scala    From akka-http-extensions   with Apache License 2.0 5 votes vote down vote up
package com.lonelyplanet.akka.http.extensions

import akka.http.scaladsl.testkit.ScalatestRouteTest
import org.scalatest.{FlatSpec, Matchers}
import akka.http.scaladsl.model.StatusCodes._
import com.lonelyplanet.akka.http.extensions.fixtures.ErrorResponses
import spray.json._

class ExceptionHandlingSpec extends FlatSpec with Matchers with ScalatestRouteTest {

  it should "return correct status codes" in {
    val routing = new Routing

    Get("doesnt-exist") ~> routing.route ~> check {
      val json = responseAs[String].parseJson
      json shouldBe ErrorResponses.defaultErrorResponse
      response.status shouldBe NotFound
    }

    Get("doesnt-exist") ~> addHeader("X-Trace-Token", "test") ~> routing.route ~> check {
      val json = responseAs[String].parseJson
      json shouldBe ErrorResponses.errorResponseWithToken("test")
      response.status shouldBe NotFound
    }

    Get("/exception") ~> routing.route ~> check {
      response.status shouldBe InternalServerError
    }

    Post("/") ~> routing.route ~> check {
      response.status shouldBe MethodNotAllowed
    }

    Get("/resource") ~> routing.route ~> check {
      response.status shouldBe NotFound
    }
  }
} 
Example 13
Source File: PaginationDirectivesWithDefaults.scala    From akka-http-extensions   with Apache License 2.0 5 votes vote down vote up
package com.lonelyplanet.akka.http.extensions

import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.testkit.{RouteTestTimeout, ScalatestRouteTest}
import org.scalatest.{FlatSpec, Matchers}

import scala.concurrent.duration.FiniteDuration

class PaginationDirectivesWithDefaults extends PaginationSpec {

  override def testConfigSource =
    """akka.http.extensions.pagination.defaults.enabled = true
      | akka.http.extensions.pagination.defaults.offset = 0
      | akka.http.extensions.pagination.defaults.limit = 50
    """.stripMargin

  "Pagination with defaults" should "not have page if no page is requested" in {

    Get("/filter-test") ~> paginationRoute ~> check {
      status shouldEqual StatusCodes.OK
      responseAs[String] === "NoPage"
    }

    Get("/filter-test") ~> paginationOrDefaultsRoute ~> check {
      status shouldEqual StatusCodes.OK
      responseAs[String] === "NoPage"
    }
  }

  it should "have a page with defaults if one of the parameters is set" in {
    Get("/filter-test?offset=1") ~> paginationRoute ~> check {
      responseAs[String] shouldEqual PageRequest(1, 50, Map.empty).toString
    }

    Get("/filter-test?limit=100") ~> paginationRoute ~> check {
      responseAs[String] shouldEqual PageRequest(0, 100, Map.empty).toString
    }

    Get("/filter-test?offset=1") ~> paginationOrDefaultsRoute ~> check {
      responseAs[String] shouldEqual PageRequest(1, 50, Map.empty).toString
    }

    Get("/filter-test?limit=100") ~> paginationOrDefaultsRoute ~> check {
      responseAs[String] shouldEqual PageRequest(0, 100, Map.empty).toString
    }
  }

  it should "return the page object that was requested" in {
    Get("/filter-test?offset=1&limit=10") ~> paginationRoute ~> check {
      status shouldEqual StatusCodes.OK
      responseAs[String] shouldEqual PageRequest(1, 10, Map.empty).toString
    }

    Get("/filter-test?offset=1&limit=10") ~> paginationOrDefaultsRoute ~> check {
      status shouldEqual StatusCodes.OK
      responseAs[String] shouldEqual PageRequest(1, 10, Map.empty).toString
    }
  }

  it should "return the page object with sorting that was requested" in {
    Get("/filter-test?offset=1&limit=10&sort=name,asc;age,desc") ~> paginationRoute ~> check {
      status shouldEqual StatusCodes.OK
      responseAs[String] shouldEqual PageRequest(1, 10, Map("name" -> Order.Asc, "age" -> Order.Desc)).toString
    }

    Get("/filter-test?offset=1&limit=10&sort=name,asc;age,desc") ~> paginationOrDefaultsRoute ~> check {
      status shouldEqual StatusCodes.OK
      responseAs[String] shouldEqual PageRequest(1, 10, Map("name" -> Order.Asc, "age" -> Order.Desc)).toString
    }
  }
} 
Example 14
Source File: PaginationSpec.scala    From akka-http-extensions   with Apache License 2.0 5 votes vote down vote up
package com.lonelyplanet.akka.http.extensions

import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.testkit.{RouteTestTimeout, ScalatestRouteTest}
import org.scalatest.{FlatSpec, Matchers}

import scala.concurrent.duration.FiniteDuration

class PaginationSpec extends FlatSpec with PaginationDirectives with Matchers with ScalatestRouteTest {
  implicit val routeTestTimeout: RouteTestTimeout = RouteTestTimeout(FiniteDuration(10, "s"))
  val config = testConfig

  def paginationRoute =
    path("filter-test") {
      withOptionalPagination { page =>
        complete {
          page match {
            case Some(p) => p.toString
            case None    => "NoPage"
          }
        }
      }
    }

  def paginationOrDefaultsRoute =
    path("filter-test") {
      withPagination { page =>
        complete {
          page.toString
        }
      }
    }
} 
Example 15
Source File: GraphQLVisitorSpec.scala    From macro-visit   with Apache License 2.0 5 votes vote down vote up
package sangria.visitor

import org.parboiled2.Position
import org.scalatest.{Matchers, WordSpec}
import sangria.ast._
import sangria.macros._
import sangria.visitor.util.StringMatchers

import scala.collection.immutable.Vector

class GraphQLVisitorSpec extends WordSpec with Matchers with StringMatchers {
  "Visitor when used with GraphQL AST" should {
    "traverse and transform AST" in {
      val doc =
        graphql"""
          query Foo {
            # first field
            person(id: String, filters: [{firstName1: "Bob"}, {lastName1: "Doe"}]) {
              ...Names

              interests(first: 10) {
                name
                ranks(filter: {f1: "test"})
              }
            }
          }

          fragment Names on Person {
            firstName
            lastName
          }
        """

      var fields = Vector.empty[Field]
      var inputFields = Vector.empty[ObjectField]

      val enterField = (field: Field) => {
        fields = fields :+ field

        field.name match {
          case "firstName" =>
            VisitorCommand.Transform(field.copy(comments = List(Comment("Test comment"))))
          case "interests" =>
            VisitorCommand.Skip
          case _ =>
            VisitorCommand.Continue
        }
      }

      val leaveInputField = (field: ObjectField) => {
        inputFields = inputFields :+ field

        VisitorCommand.Continue
      }

      val res = visit[AstNode](doc,
        Visit[Field](enterField),
        Visit[ObjectField](
          enter = _ => VisitorCommand.Continue,
          leave = leaveInputField),
        VisitAnyFieldByName[Document, Option[Position]]("position", (_, _) => VisitorCommand.Transform(None)))

      res.renderPretty should equal (
        """query Foo {
          |  # first field
          |  person(id: String, filters: [{firstName1: "Bob"}, {lastName1: "Doe"}]) {
          |    ...Names
          |    interests(first: 10) {
          |      name
          |      ranks(filter: {f1: "test"})
          |    }
          |  }
          |}
          |
          |fragment Names on Person {
          |  # Test comment
          |  firstName
          |  lastName
          |}""".stripMargin) (after being strippedOfCarriageReturns)


      fields.map(_.name) should be (Vector("person", "interests", "firstName", "lastName"))
      inputFields.map(_.name) should be (Vector("firstName1", "lastName1"))
    }
  }
} 
Example 16
Source File: IteratorProcessorTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.obj.value

import org.mmadt.language.LanguageException
import org.mmadt.processor.Processor
import org.mmadt.storage.StorageFactory._
import org.scalatest.prop.TableDrivenPropertyChecks
import org.scalatest.{FunSuite, Matchers}


    assertThrows[LanguageException] {
      int(10) ===> bool.and(bfalse)
    }

    assertThrows[LanguageException] {
      int(10) ===> str
    }

    assertThrows[LanguageException] {
      int(10) ===> str.q(2)
    }

    assertThrows[LanguageException] {
      str("hello") ===> bool
    }
  }
} 
Example 17
Source File: SpatialJoinBehaviors.scala    From SpatialSpark   with Apache License 2.0 5 votes vote down vote up
package spatialspark.join

import com.vividsolutions.jts.geom.Geometry
import org.apache.spark.rdd.RDD
import org.scalatest.{FlatSpec, Matchers}
import spatialspark.operator.SpatialOperator._

trait SpatialJoinBehaviors {
  this: FlatSpec with Matchers with GeometryFixtures =>

  
  private def testAlgorithmWithSwap(algorithm: => SpatialJoinAlgorithm,
                                    firstGeomWithId: RDD[(Long, Geometry)],
                                    secondGeomWithId: RDD[(Long, Geometry)],
                                    predicate: SpatialOperator,
                                    expectedResult: List[(Long, Long)]) = {
    val matchedPairs = algorithm.run(firstGeomWithId, secondGeomWithId, predicate)
    val matchedPairsReversed = algorithm.run(secondGeomWithId, firstGeomWithId, predicate)

    val expectedResultReversed = expectedResult.map {
      case (x, y) => (y, x)
    }

    matchedPairs should be(expectedResult)
    matchedPairsReversed should be(expectedResultReversed)
  }

  def spatialJoinAlgorithm(algorithm: SpatialJoinAlgorithm): Unit = {

    it should "work for rectangles intersection" in {
      // only A intersects with C
      testAlgorithmWithSwap(algorithm, geomABWithId, geomCWithId, Intersects, List((0L, 0L)))
    }

    it should "work for point and rectangle intersection" in {
      // only B intersects with D
      testAlgorithmWithSwap(algorithm, geomABCWithId, geomDWithId, Intersects, List((1L, 0L)))
    }

    it should "work for empty intersection" in {
      testAlgorithmWithSwap(algorithm, geomCWithId, geomDWithId, Intersects, List())
    }

    it should "work with empty lists" in {
      testAlgorithmWithSwap(algorithm, emptyGeomWithId, geomABWithId, Intersects, List())
    }

  }

} 
Example 18
Source File: MBRSpec.scala    From SpatialSpark   with Apache License 2.0 5 votes vote down vote up
package spatialspark.util

import com.vividsolutions.jts.geom.{Envelope, Geometry}
import com.vividsolutions.jts.io.WKTReader
import org.scalatest.{Matchers, FlatSpec}

class MBRSpec extends FlatSpec with Matchers {

  val Mbr = MBR(-1, 0, 5, 7)

  behavior of "MBR case class"

  it should "calculate center correctly" in {
    Mbr.center should be ((2, 3.5))
  }

  it should "check intersection correctly" in {
    Mbr.intersects(Mbr) should be (true)
    Mbr.intersects(MBR(0, 0, 1, 1)) should be (true)
    Mbr.intersects(MBR(4, 4, 6, 6)) should be (true)
    Mbr.intersects(MBR(5, 7, 10, 10)) should be (true)
    Mbr.intersects(MBR(6, 6, 10, 10)) should be (false)
  }

  it should "union with other mbr correctly" in {
    val bigMbr = MBR(-20, -20, 20, 20)

    Mbr.union(Mbr) should be (Mbr)
    Mbr.union(MBR(0, 0, 1, 1)) should be (Mbr)
    Mbr.union(MBR(5, 7, 10, 10)) should be (MBR(-1, 0, 10, 10))
    Mbr.union(bigMbr) should be (bigMbr)
  }

  it should "have correct WKT representation" in {
    val geometry: Geometry = new WKTReader().read(Mbr.toText)
    val geometryEnvelope: Envelope = geometry.getEnvelopeInternal

    geometry should be a 'rectangle
    Mbr.toEnvelope should be (geometryEnvelope)
  }

  it should "generate correct string with separator" in {
    Mbr.toString("\t") should be ("-1.0\t0.0\t5.0\t7.0")
  }
} 
Example 19
Source File: FeatureSpec.scala    From haystack-trends   with Apache License 2.0 5 votes vote down vote up
package com.expedia.www.haystack.trends.feature

import java._
import java.util.Properties

import com.expedia.metrics.MetricData
import com.expedia.open.tracing.Span
import com.expedia.www.haystack.commons.entities.encoders.Base64Encoder
import com.expedia.www.haystack.trends.config.AppConfiguration
import com.expedia.www.haystack.trends.config.entities.{KafkaConfiguration, TransformerConfiguration}
import org.apache.kafka.streams.StreamsConfig
import org.easymock.EasyMock
import org.scalatest.easymock.EasyMockSugar
import org.scalatest.{FeatureSpecLike, GivenWhenThen, Matchers}


trait FeatureSpec extends FeatureSpecLike with GivenWhenThen with Matchers with EasyMockSugar {

  protected val METRIC_TYPE = "gauge"

  def generateTestSpan(duration: Long): Span = {
    val operationName = "testSpan"
    val serviceName = "testService"
    Span.newBuilder()
      .setDuration(duration)
      .setOperationName(operationName)
      .setServiceName(serviceName)
      .build()
  }

  protected def mockAppConfig: AppConfiguration = {
    val kafkaConsumeTopic = "test-consume"
    val kafkaProduceTopic = "test-produce"
    val streamsConfig = new Properties()
    streamsConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "test-app")
    streamsConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "test-kafka-broker")
    val kafkaConfig = KafkaConfiguration(new StreamsConfig(streamsConfig), kafkaProduceTopic, kafkaConsumeTopic, null, null, 0l)
    val transformerConfig = TransformerConfiguration(new Base64Encoder, enableMetricPointServiceLevelGeneration = true, List())
    val appConfiguration = mock[AppConfiguration]

    expecting {
      appConfiguration.kafkaConfig.andReturn(kafkaConfig).anyTimes()
      appConfiguration.transformerConfiguration.andReturn(transformerConfig).anyTimes()
    }
    EasyMock.replay(appConfiguration)
    appConfiguration
  }

  protected def getMetricDataTags(metricData : MetricData): util.Map[String, String] = {
    metricData.getMetricDefinition.getTags.getKv
  }

} 
Example 20
Source File: DefaultSaverITCase.scala    From flink-tensorflow   with Apache License 2.0 5 votes vote down vote up
package org.apache.flink.contrib.tensorflow.io

import org.apache.flink.contrib.tensorflow.models.savedmodel.DefaultSavedModelLoader
import org.apache.flink.contrib.tensorflow.util.{FlinkTestBase, RegistrationUtils}
import org.apache.flink.core.fs.Path
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{Matchers, WordSpecLike}
import org.tensorflow.{Session, Tensor}

import scala.collection.JavaConverters._

@RunWith(classOf[JUnitRunner])
class DefaultSaverITCase extends WordSpecLike
  with Matchers
  with FlinkTestBase {

  override val parallelism = 1

  "A DefaultSaver" should {
    "run the save op" in {
      val env = StreamExecutionEnvironment.getExecutionEnvironment
      RegistrationUtils.registerTypes(env.getConfig)

      val loader = new DefaultSavedModelLoader(new Path("../models/half_plus_two"), "serve")
      val bundle = loader.load()
      val saverDef = loader.metagraph.getSaverDef
      val saver = new DefaultSaver(saverDef)

      def getA = getVariable(bundle.session(), "a").floatValue()
      def setA(value: Float) = setVariable(bundle.session(), "a", Tensor.create(value))

      val initialA = getA
      println("Initial value: " + initialA)

      setA(1.0f)
      val savePath = tempFolder.newFolder("model-0").getAbsolutePath
      val path = saver.save(bundle.session(), savePath)
      val savedA = getA
      savedA shouldBe (1.0f)
      println("Saved value: " + getA)

      setA(2.0f)
      val updatedA = getA
      updatedA shouldBe (2.0f)
      println("Updated value: " + updatedA)

      saver.restore(bundle.session(), path)
      val restoredA = getA
      restoredA shouldBe (savedA)
      println("Restored value: " + restoredA)
    }

    def getVariable(sess: Session, name: String): Tensor = {
      val result = sess.runner().fetch(name).run().asScala
      result.head
    }

    def setVariable(sess: Session, name: String, value: Tensor): Unit = {
      sess.runner()
        .addTarget(s"$name/Assign")
        .feed(s"$name/initial_value", value)
        .run()
    }
  }
} 
Example 21
Source File: ArraysTest.scala    From flink-tensorflow   with Apache License 2.0 5 votes vote down vote up
package org.tensorflow.contrib.scala

import com.twitter.bijection.Conversion._
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{Matchers, WordSpecLike}
import org.tensorflow.contrib.scala.Arrays._
import org.tensorflow.contrib.scala.Rank._
import resource._

@RunWith(classOf[JUnitRunner])
class ArraysTest extends WordSpecLike
  with Matchers {

  "Arrays" when {
    "Array[Float]" should {
      "convert to Tensor[`1D`,Float]" in {
        val expected = Array(1f,2f,3f)
        managed(expected.as[TypedTensor[`1D`,Float]]).foreach { t =>
          t.shape shouldEqual Array(expected.length)
          val actual = t.as[Array[Float]]
          actual shouldEqual expected
        }
      }
    }
  }
} 
Example 22
Source File: LabeledElementOpticsLawsSpec.scala    From xml-lens   with MIT License 5 votes vote down vote up
package pl.msitko.xml.optics.laws

import monocle.law.discipline.{OptionalTests, TraversalTests}
import pl.msitko.xml.test.utils.ArbitraryElementConfig
import org.scalacheck.Arbitrary
import org.scalatest.Matchers
import pl.msitko.xml.test.utils.{ArbitraryElementConfig, ArbitraryInstances, CogenInstances}

class LabeledElementOpticsLawsSpec extends LawsSpec with Matchers with ArbitraryInstances with CogenInstances {
  import pl.msitko.xml.entities.Instances._
  import pl.msitko.xml.optics.LabeledElementOptics._

  implicit val arbLabeledElem =
    Arbitrary(labeledElementGen(ArbitraryElementConfig(4, 4, Some("abc"), None)))
  implicit val arbElem =
    Arbitrary(labeledElementGen(ArbitraryElementConfig(1, 2, None, Some("someAttr"))).map(_.element))

  val deepTest = TraversalTests(deep("abc"))
  val isLabeledTest = OptionalTests(isLabeled("abc"))

  checkLaws("deep Traversal", deepTest)
  checkLaws("isLabeled Optional", isLabeledTest)
} 
Example 23
Source File: ElementOpticsLawsSpec.scala    From xml-lens   with MIT License 5 votes vote down vote up
package pl.msitko.xml.optics.laws

import monocle.law.discipline.{LensTests, OptionalTests, TraversalTests}
import org.scalacheck.Arbitrary
import org.scalatest.Matchers
import pl.msitko.xml.entities.Node
import pl.msitko.xml.optics.ElementOptics
import pl.msitko.xml.test.utils.{ArbitraryElementConfig, ArbitraryInstances, CogenInstances}

class ElementOpticsLawsSpec extends LawsSpec with Matchers with ArbitraryInstances with CogenInstances {
  import pl.msitko.xml.entities.Instances._
  import pl.msitko.xml.optics.ElementOptics._

  import scalaz.std.string._

  implicit val arbLabeledElem = Arbitrary(labeledElementGen(ArbitraryElementConfig(4, 4, Some("abc"), None)))
  implicit val arbNode = Arbitrary(arbLabeledElem.arbitrary.map(_.asInstanceOf[Node]))
  implicit val arbElem =
    Arbitrary(labeledElementGen(ArbitraryElementConfig(1, 2, None, Some("someAttr"))).map(_.element))
  implicit val arbAttr = Arbitrary(attributeGen(Some("someAttr")))

  val deeperTest              = TraversalTests(deeper("abc"))
  val allLabeledElementsTest  = TraversalTests(allLabeledElements)
  val hasTextOnlyTest         = OptionalTests(hasTextOnly)
  val attributeTest           = OptionalTests(attribute("someAttr"))
  val hasOneChildTest         = OptionalTests(hasOneChild)
  val attributesTest          = LensTests(attributes)
  val childrenTest            = LensTests(ElementOptics.children)

  checkLaws("deeper Traversal", deeperTest)
  checkLaws("allLabeledElements Traversal", allLabeledElementsTest)
  checkLaws("hasTextOnly Optional", hasTextOnlyTest)
  checkLaws("attribute Optional", attributeTest)
  checkLaws("hasOneChild Optional", hasOneChildTest)
  checkLaws("attributes Lens", attributesTest)
  // TODO: investigate why tests are slow with default value for maxSize
  checkLaws("children Lens", childrenTest, 8)
} 
Example 24
Source File: SimpleTransformationSpec.scala    From xml-lens   with MIT License 5 votes vote down vote up
package pl.msitko.xml.bench

import org.scalactic.TypeCheckedTripleEquals
import org.scalatest.{FlatSpec, Matchers}

class SimpleTransformationSpec extends FlatSpec with Matchers with TypeCheckedTripleEquals {
  import SimpleTransformation._

  "SimpleTransformation" should "work" in {
    val withLens = transformWith(SimpleTransformationLens)
    val withStd  = transformWith(SimpleTransformationStd).replace('\'', '"')

    withLens should === (example.output)
    withLens should === (withStd)
  }

  def transformWith(transformer: => SimpleTransformation): String = {
    transformer.transform(example.input)
  }
} 
Example 25
Source File: UnitSpec.scala    From fgbio   with MIT License 5 votes vote down vote up
package com.fulcrumgenomics.testing

import java.io.PrintStream
import java.nio.file.{Files, Path}

import com.fulcrumgenomics.FgBioDef._
import com.fulcrumgenomics.bam.api.{SamRecord, SamSource}
import com.fulcrumgenomics.cmdline.FgBioTool
import com.fulcrumgenomics.commons.reflect.ReflectionUtil
import com.fulcrumgenomics.commons.util.{LazyLogging, LogLevel, Logger}
import com.fulcrumgenomics.sopt.cmdline.CommandLineProgramParser
import com.fulcrumgenomics.sopt.util.ParsingUtil
import com.fulcrumgenomics.util.Io
import com.fulcrumgenomics.vcf.api.{Variant, VcfSource}
import htsjdk.variant.variantcontext.VariantContext
import htsjdk.variant.vcf.VCFFileReader
import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}

import scala.reflect.ClassTag
import scala.reflect.runtime.universe._


trait ErrorLogLevel extends UnitSpec with BeforeAndAfterAll {
  private var logLevel = Logger.level

  override protected def beforeAll(): Unit = {
    this.logLevel = Logger.level
    Logger.level  = LogLevel.Error
  }

  override protected def afterAll(): Unit = {
    Logger.level = LogLevel.Info
    Logger.level = this.logLevel
  }
} 
Example 26
Source File: ProblemWithPlayFrameworkMappings.scala    From play-conditional-form-mapping   with Apache License 2.0 5 votes vote down vote up
package uk.gov.voa.play.form

import org.scalatest.{Matchers, FlatSpec}
import play.api.data.Form
import play.api.data.Forms._

class ProblemWithPlayFrameworkMappings extends FlatSpec with Matchers {

  behavior of "vanilla play conditional mapping"

  it should "not contain an error for the conditional validation when there is a field-level error" in {
    val data = Map("nonUkResident" -> "true")
    val res = form.bind(data)

    assert(res.errors.length == 1)
    assert(res.errors.head.key === "email")
  }

  it should "not allow an field-level error message for a conditional validation" in {
    val data = Map("nonUkResident" -> "true", "email" -> "[email protected]")
    val res = form.bind(data)

    assert(res.errors.length == 1)
    assert(res.errors.head.key === "")
  }

  lazy val form = Form(mapping(
    "nonUkResident" -> boolean,
    "country" -> optional(nonEmptyText),
    "email" -> nonEmptyText
  )(Model.apply)(Model.unapply).verifying("Error.countryRequired", x => x.nonUkResident && x.country.isDefined))

}

case class Model(nonUkResident: Boolean, country: Option[String], email: String)

class SolutionUsingConditionalMappings extends FlatSpec with Matchers {
  import ConditionalMappings._

  behavior of "conditional mappings"

  it should "contain a field level errors for the field and conditional mappings" in {
    val data = Map("nonUkResident" -> "true")
    val res = form.bind(data)

    assert(res.errors.length == 2)
    assert(res.errors.head.key === "country")
    assert(res.errors.tail.head.key === "email")
  }

  lazy val form = Form(mapping(
    "nonUkResident" -> boolean,
    "country" -> mandatoryIfTrue("nonUkResident", nonEmptyText),
    "email" -> nonEmptyText
  )(Model.apply)(Model.unapply))
} 
Example 27
Source File: onlyIfAny.scala    From play-conditional-form-mapping   with Apache License 2.0 5 votes vote down vote up
package uk.gov.voa.play.form

import org.scalatest.{FlatSpec, Matchers}
import org.scalatest.OptionValues._
import play.api.data.Form
import play.api.data.Forms._

class onlyIfAny extends FlatSpec with Matchers {
  import ConditionalMappings._

  behavior of "only if any"

  it should "apply the mapping to the target field if any of the source fields have their required value" in {
    val data = Map("s1" -> "abc", "s2" -> "abc", "s3" -> "abc", "target" -> "magic")
    Seq("s1", "s2", "s3") foreach { f =>
      val d = data.updated(f, "magicValue")
      val res = form.bind(d)

      assert(res.value.value.target.value === "magic")
    }
  }

  it should "not apply the mapping to the target field neither of the source fields have the required value" in {
    val data = Map("s1" -> "abc", "s2" -> "abc", "s3" -> "abc", "target" -> "magic")
    val res = form.bind(data)

    assert(res.value.value.target === None)
  }

  lazy val form = Form(mapping(
    "s1" -> nonEmptyText,
    "s2" -> nonEmptyText,
    "s3" -> nonEmptyText,
    "target" -> onlyIfAny(Seq("s1" -> "magicValue", "s2" -> "magicValue", "s3" -> "magicValue"), optional(nonEmptyText))
  )(Model.apply)(Model.unapply))

  case class Model(s1: String, s2: String, s3: String, target: Option[String])
} 
Example 28
Source File: MandatoryIfNot.scala    From play-conditional-form-mapping   with Apache License 2.0 5 votes vote down vote up
package uk.gov.voa.play.form

import org.scalatest.{Matchers, FlatSpec}
import play.api.data.Form
import play.api.data.Forms._

class MandatoryIfNot extends FlatSpec with Matchers {
  import ConditionalMappings._

  it should "mandate the target field if the source field DOES not match the specified value" in {
    val data = Map("source" -> "NotTheMagicValue")
    val res = form.bind(data)

    assert(res.errors.head.key === "target")
  }

  it should "not mandate the target field if the source field DOES NOT match the specified value" in {
    val data = Map("source" -> "magicValue")
    val res = form.bind(data)

    assert(res.errors.isEmpty)
  }

  lazy val form = Form(mapping(
    "source" -> nonEmptyText,
    "target" -> mandatoryIfNot("source", "magicValue", nonEmptyText)
  )(Model.apply)(Model.unapply))

  case class Model(source: String, target: Option[String])
} 
Example 29
Source File: MandatoryIfAnyAreTrue.scala    From play-conditional-form-mapping   with Apache License 2.0 5 votes vote down vote up
package uk.gov.voa.play.form

import org.scalatest.{Matchers, FlatSpec}
import play.api.data.Form
import play.api.data.Forms._

class MandatoryIfAnyAreTrue extends FlatSpec with Matchers {
  import ConditionalMappings._

  behavior of "mandatory if any are true"

  it should "mandate the target field if any of the source fields are true" in {
    Seq("f1", "f2", "f3") foreach { f =>
      val data = Map(f -> "true")
      val res = form.bind(data)

      assert(res.errors.head.key === "target")
    }
  }

  it should "not mandate the target field if neither of the source fields are true" in {
    val res = form.bind(Map.empty[String, String])
    assert(res.errors.isEmpty)
  }

  lazy val form = Form(mapping(
    "f1" -> boolean,
    "f2" -> boolean,
    "f3" -> boolean,
    "target" -> mandatoryIfAnyAreTrue(Seq("f1", "f2", "f3"), nonEmptyText)
  )(Model.apply)(Model.unapply))

  case class Model(f1: Boolean, f2: Boolean, f3: Boolean, target: Option[String])
} 
Example 30
Source File: ChainedConditions.scala    From play-conditional-form-mapping   with Apache License 2.0 5 votes vote down vote up
package uk.gov.voa.play.form

import org.scalatest.{Matchers, FlatSpec}
import play.api.data.Form
import play.api.data.Forms._

class ChainedConditions extends FlatSpec with Matchers {
  import ConditionalMappings._

  behavior of "chained mappings"

  it should "apply mappings if all of the chained criteria are satisfied" in {
    val data = Map("name" -> "Francoise", "age" -> "21")
    val res = form.bind(data)

    assert(res.errors.head.key === "favouriteColour")
  }

  it should "not apply mappings if any part of the chained critieria is not satisfied" in {
    val data = Map("name" -> "Francoise", "age" -> "20")
    val res = form.bind(data)

    assert(res.errors.isEmpty)
  }

  lazy val form = Form(mapping(
    "name" -> nonEmptyText,
    "age" -> number,
    "favouriteColour" -> mandatoryIf(
      isEqual("name", "Francoise") and isEqual("age", "21"),
      nonEmptyText
    )
  )(Model.apply)(Model.unapply))

  case class Model(name: String, age: Int, favouriteColour: Option[String])
} 
Example 31
Source File: MandatoryIfFalse.scala    From play-conditional-form-mapping   with Apache License 2.0 5 votes vote down vote up
package uk.gov.voa.play.form

import org.scalatest.{Matchers, FlatSpec}
import play.api.data.Form
import play.api.data.Forms._

class MandatoryIfFalse extends FlatSpec with Matchers {
  import ConditionalMappings._

  behavior of "mandatory if false"

  it should "mandate the target field if the source field is false, with field-level errors" in {
    val data = Map("source" -> "false")
    val res = form.bind(data)

    assert(res.errors.head.key === "target")
  }

  it should "not mandate the target field if the source field is not false" in {
    val res = form.bind(Map.empty[String, String])

    assert(res.errors.isEmpty)
  }

  lazy val form = Form(mapping(
    "source" -> boolean,
    "target" -> mandatoryIfFalse("source", nonEmptyText)
  )(Model.apply)(Model.unapply))

  case class Model(source: Boolean, target: Option[String])
} 
Example 32
Source File: OnlyIfTrue.scala    From play-conditional-form-mapping   with Apache License 2.0 5 votes vote down vote up
package uk.gov.voa.play.form

import org.scalatest.{Matchers, FlatSpec}
import org.scalatest.OptionValues._
import play.api.data.Form
import play.api.data.Forms._

class OnlyIfTrue extends FlatSpec with Matchers {
  import ConditionalMappings._

  behavior of "isTrue and isFalse"

  it should "apply a mapping with value TRUE is string in a case-variation of TRUE" in {
    isTrue("source")(Map("source" -> "true")) should be(true)
    isTrue("source")(Map("source" -> "TRUE")) should be(true)
    isTrue("source")(Map("source" -> "TRuE")) should be(true)
  }

  it should "apply a mapping with value FALSE is string in a case-variation of FALSE" in {
    isTrue("source")(Map("source" -> "false")) should be(false)
    isTrue("source")(Map("source" -> "FALSE")) should be(false)
    isTrue("source")(Map("source" -> "fAlSe")) should be(false)
  }

  it should "apply a mapping with value TRUE is string in a case-variation of FALSE" in {
    isFalse("source")(Map("source" -> "false")) should be(true)
    isFalse("source")(Map("source" -> "FALSE")) should be(true)
    isFalse("source")(Map("source" -> "fAlSe")) should be(true)
  }

  it should "apply a mapping with value FALSE is string in a case-variation of TRUE" in {
    isFalse("source")(Map("source" -> "true")) should be(false)
    isFalse("source")(Map("source" -> "TRUE")) should be(false)
    isFalse("source")(Map("source" -> "TRuE")) should be(false)
  }

  it should "apply a mapping with value FALSE is string in not a case-variation of TRUE or FALSE" in {
    isTrue("source")(Map("source" -> "non-sensical")) should be(false)
    isFalse("source")(Map("source" -> "non-sensical")) should be(false)
  }

  behavior of "only if true"

  it should "apply the mapping to the target field if the source field is true" in {
    val data = Map("source" -> "true", "target" -> "Bonjour")
    val res = form.bind(data)

    assert(res.value.value === Model(true, Some("Bonjour")))
  }

  it should "ignore the mapping and set the default value if the source field is not true" in {
    val data = Map("source" -> "false", "target" -> "Bonjour")
    val res = form.bind(data)

    assert(res.value.value === Model(false, None))
  }

  it should "not mandate the target field even if the source field is true" in {
    val data = Map("source" -> "true")
    val res = form.bind(data)

    assert(res.errors.isEmpty)
  }

  lazy val form = Form(mapping(
    "source" -> boolean,
    "target" -> onlyIfTrue("source", optional(nonEmptyText))
  )(Model.apply)(Model.unapply))

  case class Model(source: Boolean, target: Option[String])
} 
Example 33
Source File: MandatoryIfAllEqual.scala    From play-conditional-form-mapping   with Apache License 2.0 5 votes vote down vote up
package uk.gov.voa.play.form

import org.scalatest.{Matchers, FlatSpec}
import play.api.data.Form
import play.api.data.Forms._

class MandatoryIfAllEqual extends FlatSpec with Matchers {
  import ConditionalMappings._

  behavior of "mandatory if all equal"

  it should "mandate the target field if all of the source fields match their required value" in {
    val data = Map("s1" -> "s1val", "s2" -> "s2val", "s3" -> "s3val")
    val res = form.bind(data)

    assert(res.errors.head.key === "target")
  }

  it should "not mandate the target fields if any of the source fields do not match their required value" in {
    val data = Map("s1" -> "s1val", "s2" -> "s2val", "s3" -> "s3val")
    Seq("s1", "s2", "s3") foreach { f =>
      val data2 = data.updated(f, "notrequiredvalue")
      val res = form.bind(data2)

      assert(res.errors.isEmpty)
    }

  }

  lazy val form = Form(mapping(
    "s1" -> nonEmptyText,
    "s2" -> nonEmptyText,
    "s3" -> nonEmptyText,
    "target" -> mandatoryIfAllEqual(Seq("s1" -> "s1val", "s2" -> "s2val", "s3" -> "s3val"), nonEmptyText)
  )(Model.apply)(Model.unapply))

  case class Model(s1: String, s2: String, s3: String, target: Option[String])
} 
Example 34
Source File: MandatoryIfEqual.scala    From play-conditional-form-mapping   with Apache License 2.0 5 votes vote down vote up
package uk.gov.voa.play.form

import org.scalatest.{Matchers, FlatSpec}
import play.api.data.Form
import play.api.data.Forms._

class MandatoryIfEqual extends FlatSpec with Matchers {
  import ConditionalMappings._

  behavior of "mandatory if equal"

  it should "mandate the target field if the source has the required value" in {
    val data = Map("country" -> "England")
    val res = form.bind(data)

    assert(res.errors.head.key === "town")
  }

  it should "not mandate the target field if the source field does not have the required value" in {
    val data = Map("country" -> "Scotland")
    val res = form.bind(data)

    assert(res.errors.isEmpty)
  }

  lazy val form = Form(mapping(
    "country" -> nonEmptyText,
    "town" -> mandatoryIfEqual("country", "England", nonEmptyText)
  )(Model.apply)(Model.unapply))

  case class Model(country: String, town: Option[String])
} 
Example 35
Source File: MandatoryIfExists.scala    From play-conditional-form-mapping   with Apache License 2.0 5 votes vote down vote up
package uk.gov.voa.play.form

import org.scalatest.{Matchers, FlatSpec}
import play.api.data.Form
import play.api.data.Forms._

class MandatoryIfExists extends FlatSpec with Matchers {
  import ConditionalMappings._

  behavior of "mandatory if exists"

  it should "mandate the target field if any value for the source field is supplied" in {
    val data = Map("source" -> "anyrandomvalue")
    val res = form.bind(data)

    assert(res.errors.head.key === "target")
  }

  it should "not mandate the target field is  no value for the source field is supplied" in {
    val res = form.bind(Map.empty[String, String])

    assert(res.errors.isEmpty)
  }

  lazy val form = Form(mapping(
    "source" ->  optional(nonEmptyText),
    "target" -> mandatoryIfExists("source", nonEmptyText)
  )(Model.apply)(Model.unapply))

  case class Model(source: Option[String], target: Option[String])
} 
Example 36
Source File: ApiDataOperationsTest.scala    From Conseil   with Apache License 2.0 5 votes vote down vote up
package tech.cryptonomic.conseil.api.routes.platform.data

import org.scalatest.{Matchers, WordSpec}
import tech.cryptonomic.conseil.common.generic.chain.DataTypes.{OperationType, Predicate}

class ApiDataOperationsTest extends WordSpec with Matchers {

  "ApiDataOperations" should {
      "sanitizeFields" in {
        // given
        val input = List.empty

        // when
        val result = ApiDataOperations.sanitizeFields(input)

        // then
        result shouldBe List.empty
      }

      "sanitizePredicates" in {
        // given
        val examplePredicates = List(
          Predicate(
            field = "some_field",
            operation = OperationType.in,
            set = List(
              "valid",
              "valid_value",
              "invalid*value",
              "another;invalid,value",
              "yet.another.value"
            )
          )
        )

        // when
        val results = ApiDataOperations.sanitizePredicates(examplePredicates).head.set

        // then
        results should contain allElementsOf List(
          "valid",
          "valid_value",
          "invalidvalue",
          "anotherinvalidvalue",
          "yet.another.value"
        )
        results.size shouldBe 5

      }
    }

} 
Example 37
Source File: SecurityTest.scala    From Conseil   with Apache License 2.0 5 votes vote down vote up
package tech.cryptonomic.conseil.api.security

import akka.http.scaladsl.testkit.ScalatestRouteTest
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.time.{Millis, Seconds, Span}
import org.scalatest.{Matchers, WordSpec}
import tech.cryptonomic.conseil.api.security.Security.SecurityApi

class SecurityTest extends WordSpec with Matchers with ScalatestRouteTest with ScalaFutures {

  implicit override val patienceConfig = PatienceConfig(timeout = Span(2, Seconds), interval = Span(20, Millis))

  "The SecurityApi" should {

      "valid itself" in {
        SecurityApi(Set.empty, None).isValid shouldBe false
        SecurityApi(Set.empty, Some(false)).isValid shouldBe false

        SecurityApi(Set("some-key"), Some(false)).isValid shouldBe true
        SecurityApi(Set("some-key"), None).isValid shouldBe true
        SecurityApi(Set.empty, Some(true)).isValid shouldBe true
        SecurityApi(Set("some-key"), Some(true)).isValid shouldBe true
      }

      "validate a given key" in {
        SecurityApi(Set("some-key"), None).validateApiKey(Some("some-key")).futureValue shouldBe true
        SecurityApi(Set("some-key"), Some(true)).validateApiKey(Some("some-key")).futureValue shouldBe true

        SecurityApi(Set.empty, None).validateApiKey(Some("some-key")).futureValue shouldBe false
        SecurityApi(Set.empty, Some(true)).validateApiKey(Some("some-key")).futureValue shouldBe false

        SecurityApi(Set.empty, None).validateApiKey(None).futureValue shouldBe false
        SecurityApi(Set.empty, Some(true)).validateApiKey(None).futureValue shouldBe true
      }

    }
} 
Example 38
Source File: DefaultDatabaseOperationsTest.scala    From Conseil   with Apache License 2.0 5 votes vote down vote up
package tech.cryptonomic.conseil.api.sql

import java.sql.Timestamp
import java.time.LocalDateTime

import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{Matchers, WordSpec}
import slick.jdbc.PostgresProfile.api._
import tech.cryptonomic.conseil.api.TezosInMemoryDatabaseSetup
import tech.cryptonomic.conseil.api.sql.DefaultDatabaseOperations._
import tech.cryptonomic.conseil.common.testkit.InMemoryDatabase
import tech.cryptonomic.conseil.common.tezos.Tables
import tech.cryptonomic.conseil.common.tezos.Tables.FeesRow

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.language.postfixOps

class DefaultDatabaseOperationsTest
    extends WordSpec
    with Matchers
    with InMemoryDatabase
    with TezosInMemoryDatabaseSetup
    with ScalaFutures {

  "The default database operations" should {
      val fees: List[FeesRow] = List.tabulate(5) { i =>
        FeesRow(
          1 + i,
          3 + i,
          5 + i,
          Timestamp.valueOf(LocalDateTime.of(2018, 11, 22, 12, 30)),
          s"$i-example",
          None,
          None
        )
      }

      "count distinct elements in column properly" in {
        dbHandler.run(Tables.Fees ++= fees).isReadyWithin(5 seconds) shouldBe true
        dbHandler.run(countDistinct("tezos", "fees", "timestamp")).futureValue shouldBe 1
        dbHandler.run(countDistinct("tezos", "fees", "low")).futureValue shouldBe 5
      }

      "select distinct elements from column properly" in {
        dbHandler.run(Tables.Fees ++= fees).isReadyWithin(5 seconds) shouldBe true
        dbHandler.run(selectDistinct("tezos", "fees", "timestamp")).futureValue shouldBe List(
          "2018-11-22 12:30:00"
        )
        dbHandler.run(selectDistinct("tezos", "fees", "low")).futureValue should contain theSameElementsAs List(
          "1",
          "2",
          "3",
          "4",
          "5"
        )
      }

      "select distinct elements from column with 'like' properly" in {
        dbHandler.run(Tables.Fees ++= fees).isReadyWithin(5 seconds) shouldBe true
        dbHandler.run(selectDistinctLike("tezos", "fees", "kind", "1-")).futureValue shouldBe List(
          "1-example"
        )
      }
    }
} 
Example 39
Source File: StakerDaoTest.scala    From Conseil   with Apache License 2.0 5 votes vote down vote up
package tech.cryptonomic.conseil.indexer.tezos.michelson.contracts

import org.scalatest.{Matchers, OptionValues, WordSpec}
import tech.cryptonomic.conseil.common.tezos.TezosTypes._

class StakerDaoTest extends WordSpec with Matchers with OptionValues {

  "The Token Contracts operations for the StakerDao contract" should {

      "read a balance update from big map diff" in {
        //given

        //values taken from mainnet operations
        val ledgerId = ContractId("KT1EctCuorV2NfVb1XTQgvzJ88MQtWP8cMMv")
        val mapId = 20
        val params = """
             |{
             |  "prim": "Right",
             |  "args": [
             |    {
             |      "prim": "Left",
             |      "args": [
             |        {
             |          "prim": "Left",
             |          "args": [
             |            {
             |              "prim": "Right",
             |              "args": [
             |                {
             |                  "prim": "Pair",
             |                  "args": [
             |                    {
             |                      "bytes": "00007374616b65722d64616f2f7265736572766f6972"
             |                    },
             |                    {
             |                      "prim": "Pair",
             |                      "args": [
             |                        {
             |                          "bytes": "0000c528aa23546060e4459c5b37df752eea5bf5edc3"
             |                        },
             |                        {
             |                          "int": "1"
             |                        }
             |                      ]
             |                    }
             |                  ]
             |                }
             |              ]
             |            }
             |          ]
             |        }
             |      ]
             |    }
             |  ]
             |}
          """.stripMargin

        val senderMapUpdate = Contract.BigMapUpdate(
          action = "update",
          key = Micheline("""{"bytes": "00007374616b65722d64616f2f7265736572766f6972"}"""),
          key_hash = ScriptId("exprucaLf6G5Robew77nwXRNR7gAbUJ3da2yAwqJdhyZCXZdEkLz8t"),
          big_map = Decimal(mapId),
          value = Some(Micheline("""{"int": "1499998"}"""))
        )

        val receiverMapUpdate = Contract.BigMapUpdate(
          action = "update",
          key = Micheline("""{"bytes": "0000c528aa23546060e4459c5b37df752eea5bf5edc3"}"""),
          key_hash = ScriptId("exprtv5jtq14XnqMvskagVojNgSd8bXjxTZtDYY58MtAek5gbLKA4C"),
          big_map = Decimal(mapId),
          value = Some(Micheline("""{"int": "1"}"""))
        )

        //register the token info
        val sut = TokenContracts.fromConfig(List(ledgerId -> "FA1.2-StakerDao"))
        //set the map id for the contract
        sut.setMapId(ledgerId, mapId)

        //when
        val balanceUpdates = List(senderMapUpdate, receiverMapUpdate).map(
          mapUpdate =>
            sut
              .readBalance(ledgerId)(
                diff = mapUpdate,
                params = Some(Left(Parameters(Micheline(params))))
              )
              .value
        )

        //then
        balanceUpdates should contain theSameElementsAs List(
          AccountId("tz1dcWXLS1UBeGc7EazGvoNE6D8YSzVkAsSa") -> BigInt(1),
          AccountId("tz1WAVpSaCFtLQKSJkrdVApCQC1TNK8iNxq9") -> BigInt(1499998)
        )

      }
    }

} 
Example 40
Source File: DefaultDatabaseOperationsTest.scala    From Conseil   with Apache License 2.0 5 votes vote down vote up
package tech.cryptonomic.conseil.indexer.sql

import java.sql.Timestamp
import java.time.LocalDateTime

import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{Matchers, WordSpec}
import slick.jdbc.PostgresProfile.api._
import tech.cryptonomic.conseil.common.testkit.InMemoryDatabase
import tech.cryptonomic.conseil.common.tezos.Tables
import tech.cryptonomic.conseil.common.tezos.Tables.{Fees, FeesRow}
import tech.cryptonomic.conseil.indexer.sql.DefaultDatabaseOperations._
import tech.cryptonomic.conseil.indexer.tezos.TezosInMemoryDatabaseSetup

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.language.postfixOps

class DefaultDatabaseOperationsTest
    extends WordSpec
    with Matchers
    with InMemoryDatabase
    with TezosInMemoryDatabaseSetup
    with ScalaFutures {

  "The default database operations" should {
      val fees: List[FeesRow] = List.tabulate(5) { i =>
        FeesRow(
          1 + i,
          3 + i,
          5 + i,
          Timestamp.valueOf(LocalDateTime.of(2018, 11, 22, 12, 30)),
          s"$i-example",
          None,
          None
        )
      }

      "insert data when table is empty" in {
        dbHandler.run(insertWhenEmpty[Fees](Tables.Fees, fees)).futureValue shouldBe Some(5)
      }

      "do not insert data when table is not empty" in {
        dbHandler.run(Tables.Fees ++= fees).isReadyWithin(5 seconds) shouldBe true
        dbHandler.run(insertWhenEmpty[Fees](Tables.Fees, fees)).futureValue.value shouldBe Some(0)
      }
    }
} 
Example 41
Source File: LorreAppConfigTest.scala    From Conseil   with Apache License 2.0 5 votes vote down vote up
package tech.cryptonomic.conseil.indexer.config

import org.scalatest.{Matchers, WordSpec}
import tech.cryptonomic.conseil.indexer.config.LorreAppConfig.Natural
import tech.cryptonomic.conseil.indexer.config.LorreAppConfig.Loaders._

class LorreAppConfigTest extends WordSpec with Matchers {

  "LorreAppConfig.Natural" should {
      "match a valid positive integer string" in {
        "10" match {
          case Natural(value) => value shouldBe 10
          case _ => fail("the matcher didn't correctly identify an integer")
        }
      }

      "refuse a zero integer string" in {
        "0" match {
          case Natural(value) => fail(s"a zero string shouldn't match as $value")
          case _ =>
        }
      }

      "refuse a negative integer string" in {
        "-10" match {
          case Natural(value) => fail(s"a negative integer string shouldn't match as $value")
          case _ =>
        }
      }

      "refuse a non-numeric string" in {
        "abc10" match {
          case Natural(value) => fail(s"a generic string shouldn't match as $value")
          case _ =>
        }
      }
    }

  "LorreAppConfig.Loaders" should {
      "extract the client host pool configuration for streaming http" in {
        import scala.collection.JavaConverters._

        val typedConfig = loadAkkaStreamingClientConfig(namespace = "akka.streaming-client")
        typedConfig shouldBe 'right

        val Right(HttpStreamingConfiguration(pool)) = typedConfig

        //verify expected entries in the pool config
        val configKeys = pool.getConfig("akka.http.host-connection-pool").entrySet.asScala.map(_.getKey)

        configKeys should contain allOf (
          "min-connections",
          "max-connections",
          "max-retries",
          "max-open-requests",
          "pipelining-limit",
          "idle-timeout",
          "pool-implementation",
          "response-entity-subscription-timeout"
        )

      }

      "fail to extract the client host pool configuration with the wrong namespace" in {
        import pureconfig.error.ThrowableFailure

        val typedConfig = loadAkkaStreamingClientConfig(namespace = "streaming-client")
        typedConfig shouldBe 'left

        val Left(failures) = typedConfig

        failures.toList should have size 1

        failures.head shouldBe a[ThrowableFailure]

        failures.head.asInstanceOf[ThrowableFailure].throwable shouldBe a[com.typesafe.config.ConfigException.Missing]

      }
    }

} 
Example 42
Source File: TezosTypesTest.scala    From Conseil   with Apache License 2.0 5 votes vote down vote up
package tech.cryptonomic.conseil.common.tezos

import java.time.Instant

import org.scalatest.{EitherValues, Matchers, OptionValues, WordSpec}
import tech.cryptonomic.conseil.common.tezos.TezosTypes._

class TezosTypesTest extends WordSpec with Matchers with OptionValues with EitherValues {

  val sut = TezosTypes

  "The Base58Check verifier" should {
      "accept an empty string" in {
        sut.isBase58Check("") shouldBe true
      }

      "accept a correctly encoded string" in {
        sut.isBase58Check(
          "signiRfcqmbGc6UtW1WzuJNGzRRsWDLpafxZZPwwTMntFwup8rTxXEgcLD5UBWkYmMqZECVEr33Xw5sh9NVi45c4FVAXvQSf"
        ) shouldBe true
      }

      "reject a string with forbidden chars" in {
        sut.isBase58Check(
          "signiRfcqmbGc6UtW1WzulJNGzRRsWDLpafxZZPwwTMntFwup8rTxXEgcLD5UBWkYmMqZECVEr33Xw5sh9NVi45c4FVAXvQSf"
        ) shouldBe false
        sut.isBase58Check(
          "$signiRfcqmbGc6UtW1WzulJNGzRRsWDpafxZZPwwTMntFwup8rTxXEgcLD5UBWkYmMqZECVEr33Xw5sh9NVi45c4FVAXvQSf"
        ) shouldBe false
        sut.isBase58Check(
          "signiRfcqmbGc6UtW1WzulJNGzRRsWDpafxZZPwwTMntFwup8rTxXEgcLD5UBWkYmMqZECVEr33Xw5sh9NVi45c4FVAXvQSf*"
        ) shouldBe false
      }

      "reject a string with spaces" in {
        sut.isBase58Check(
          "signiRfcqmbGc6UtW1WzuJNGzRRs DLpafxZZPwwTMntFwup8rTxXEgcLD5UBWkYmMqZECVEr33Xw5sh9NVi45c4FVAXvQSf"
        ) shouldBe false
        sut.isBase58Check(
          " signiRfcqmbGc6UtW1WzuJNGzRRsDLpafxZZPwwTMntFwup8rTxXEgcLD5UBWkYmMqZECVEr33Xw5sh9NVi45c4FVAXvQSf"
        ) shouldBe false
        sut.isBase58Check(
          "signiRfcqmbGc6UtW1WzuJNGzRRsDLpafxZZPwwTMntFwup8rTxXEgcLD5UBWkYmMqZECVEr33Xw5sh9NVi45c4FVAXvQSf "
        ) shouldBe false
      }

    }

  "The Syntax import" should {
      "allow building Block-tagged generic data" in {
        import TezosTypes.Syntax._
        val someTime = Some(Instant.ofEpochMilli(0))
        val content = "A content string"
        val (hash, level) = (BlockHash("hash"), 1)

        content.taggedWithBlock(hash, level, someTime, None, None) shouldEqual BlockTagged(
          hash,
          level,
          someTime,
          None,
          None,
          content
        )
      }
    }

  "The BlockTagged wrapper" should {
      "convert to a tuple" in {
        val someTime = Some(Instant.ofEpochMilli(0))
        val content = "A content string"
        val (hash, level) = (BlockHash("hash"), 1)

        BlockTagged(hash, level, someTime, None, None, content).asTuple shouldEqual (hash, level, someTime, None, None, content)
      }
    }

} 
Example 43
Source File: MathUtilTest.scala    From Conseil   with Apache License 2.0 5 votes vote down vote up
package tech.cryptonomic.conseil.common.util

import org.scalatest.{FlatSpec, Matchers}

class MathUtilTest extends FlatSpec with Matchers {

  "MathUtilTest" should "correctly calculate the mean of a sequence" in {
      val dataset = List(1d, 2d, 3d, 4d, 5d, 6d, 7d, 8d, 9d, 10d)
      MathUtil.mean(dataset) should be(5.5)
    }

  "MathUtilTest" should "correctly calculate the population standard deviation of a sequence" in {
      val dataset = List(1d, 2d, 3d, 4d, 5d, 6d, 7d, 8d, 9d, 10d)
      MathUtil.stdev(dataset) should be(2.8722813232690143)
    }

  "MathUtilTest" should "correctly calculate the population standard deviation of a sequence corrected for using samples" in {
      val dataset = List(1d, 2d, 3d, 4d, 5d, 6d, 7d, 8d, 9d, 10d)
      MathUtil.sampledStdev(dataset) should be(3.0276503540974917)
    }
} 
Example 44
Source File: CollectionOpsTest.scala    From Conseil   with Apache License 2.0 5 votes vote down vote up
package tech.cryptonomic.conseil.common.util

import org.scalatest.{Matchers, WordSpec}
import CollectionOps._

class CollectionOpsTest extends WordSpec with Matchers {

  "Collection operations" should {

      "allow grouping over sequences of tuples" in {

        val seq = Seq(
          1 -> "a",
          2 -> "aa",
          3 -> "aaa",
          1 -> "b",
          2 -> "bb"
        )

        groupByKey(seq) shouldEqual Map(
          1 -> Seq("a", "b"),
          2 -> Seq("aa", "bb"),
          3 -> Seq("aaa")
        )

      }

      "provide extension syntax to group over sequences of tuples" in {

        val seq = Seq(
          1 -> "a",
          2 -> "aa",
          3 -> "aaa",
          1 -> "b",
          2 -> "bb"
        )

        seq.byKey shouldEqual Map(
          1 -> Seq("a", "b"),
          2 -> Seq("aa", "bb"),
          3 -> Seq("aaa")
        )

      }

      "allow to define operations on bounds for a non-empty collection" in {
        val seq = Seq(
          "a",
          "aa",
          "aaa",
          "b",
          "bb"
        )

        applyOnBounds(seq)((first, last) => (first, last)) shouldEqual Some(("a", "bb"))

      }

      "allow to define operations on bounds for an empty collection, returning an empty value" in {
        val seq = Seq.empty[Any]

        applyOnBounds(seq)((first, last) => (first, last)) shouldBe 'empty

      }

      "provide extension syntax to call operations on bounds for a non-empty collection" in {
        val seq = Seq(
          "a",
          "aa",
          "aaa",
          "b",
          "bb"
        )

        seq.onBounds((first, last) => (first, last)) shouldEqual Some(("a", "bb"))

      }

      "provide extension syntax to call operations on bounds for an empty collection, returning an empty value" in {
        val seq = Seq.empty[Any]

        seq.onBounds((first, last) => (first, last)) shouldBe 'empty

      }

    }
} 
Example 45
Source File: CryptoUtilTest.scala    From Conseil   with Apache License 2.0 5 votes vote down vote up
package tech.cryptonomic.conseil.common.util

import org.scalatest.{FlatSpec, Matchers}
import cats.implicits._

class CryptoUtilTest extends FlatSpec with Matchers {

  "CryptoUtil" should "correctly decode and encode a Tezos account ID as bytes" in {
      val accountID = "tz1Z5pFi5Sy99Kcz36XA5WtKW7Z6NVG9LdA4"
      val decoded = CryptoUtil.base58CheckDecode(accountID, "tz1").get
      val encoded = CryptoUtil.base58CheckEncode(decoded.toList, "tz1").get
      encoded should be(accountID)
    }

  it should "correctly decode and encode a Tezos operation ID" in {
      val operationID = "op26bhfiE1tVKiZHfkujRcasnghyRnvDx9gnKGiNwAW98M71EWF"
      val decoded = CryptoUtil.base58CheckDecode(operationID, "op").get
      val encoded = CryptoUtil.base58CheckEncode(decoded.toList, "op").get
      encoded should be(operationID)
    }

  it should "correctly pack and unpack a Tezos account ID as hex-string" in {
      val accountID = "tz1Z5pFi5Sy99Kcz36XA5WtKW7Z6NVG9LdA4"
      val packed = CryptoUtil.packAddress(accountID).get
      //packing adds the packet length at the beginning, read doesn't care
      val unpacked = CryptoUtil.readAddress(packed.drop(12)).get
      unpacked should be(accountID)
    }

  it should "read a binary address to its b58check tezos id" in {
      val address = CryptoUtil.readAddress("0000a8d45bdc966ddaaac83188a1e1c1fde2a3e05e5c").get
      address shouldBe "tz1b2icJC4E7Y2ED1xsZXuqYpF7cxHDtduuP"
    }

  it should "decode a zarith signed number" in {
      val hex = List(
        "86bb230200000000",
        "b8c6ce95020200000000",
        "ac9a010200000000",
        "840e0200000000",
        "a88c010200000000",
        "090200000000",
        "490200000000"
      )

      val nums = hex.traverse(CryptoUtil.decodeZarithNumber).get

      nums should contain theSameElementsInOrderAs List(
        BigInt(290502),
        BigInt(291099064),
        BigInt(9900),
        BigInt(900),
        BigInt(9000),
        BigInt(9),
        BigInt(-9)
      )
    }
} 
Example 46
Source File: DatabaseUtilTest.scala    From Conseil   with Apache License 2.0 5 votes vote down vote up
package tech.cryptonomic.conseil.common.util

import org.scalatest.{Matchers, WordSpec}
import tech.cryptonomic.conseil.common.generic.chain.DataTypes.Predicate
import tech.cryptonomic.conseil.common.generic.chain.DataTypes.OperationType

class DatabaseUtilTest extends WordSpec with Matchers {

  "Database utils query builder" should {
      val sut = DatabaseUtil.QueryBuilder

      "concatenate multiple values in a sql action" in {
        sut.insertValuesIntoSqlAction(Seq("a", "b", "c")).queryParts.mkString("") shouldBe "('a','b','c')"
        sut.insertValuesIntoSqlAction(Seq(1, 2, 3)).queryParts.mkString("") shouldBe "('1','2','3')"
      }

      "concatenate an empty sequence of values in a sql action" in {
        sut.insertValuesIntoSqlAction(Seq.empty[Any]).queryParts.mkString("") shouldBe "()"
      }

      "concatenate a singleton sequence in a sql action" in {
        sut.insertValuesIntoSqlAction(Seq("single")).queryParts.mkString("") shouldBe "('single')"
      }

      "concatenate groups of predicates with OR" in {
        val predicates =
          Predicate("fa", OperationType.eq, List("a"), false, None, group = Some("1")) ::
              Predicate("fb", OperationType.eq, List("b"), false, None, group = Some("1")) ::
              Predicate("fc", OperationType.eq, List("c"), false, None, group = Some("2")) ::
              Predicate("fd", OperationType.eq, List("d"), false, None, group = Some("2")) ::
              Nil

        val first :: rest = sut.makePredicates(predicates)
        val fragment = sut.concatenateSqlActions(first, rest: _*).queryParts.mkString
        fragment shouldBe "AND (True AND fa = 'a' AND fb = 'b') OR (True AND fc = 'c' AND fd = 'd') "
      }
    }

} 
Example 47
Source File: PathTemplateParserSpec.scala    From cloudstate   with Apache License 2.0 5 votes vote down vote up
package io.cloudstate.proxy

import io.cloudstate.proxy.PathTemplateParser.{PathTemplateParseException, TemplateVariable}
import org.scalatest.{Matchers, WordSpec}

class PathTemplateParserSpec extends WordSpec with Matchers {

  private def matches(template: PathTemplateParser.ParsedTemplate, path: String): Option[List[String]] = {
    val m = template.regex.pattern.matcher(path)
    if (m.matches()) {
      Some((1 to m.groupCount()).map(m.group).toList)
    } else None
  }

  private def failParse(path: String) = {
    val e = the[PathTemplateParseException] thrownBy PathTemplateParser.parse(path)
    // Uncomment to sanity check the user friendliness of error messages
    // println(e.prettyPrint + "\n")
    e
  }

  "The path template parse" should {
    "parse a simple path template" in {
      val template = PathTemplateParser.parse("/foo")
      template.fields shouldBe empty
      matches(template, "/foo") shouldBe Some(Nil)
      matches(template, "/bar") shouldBe None
    }

    "parse a path template with variables" in {
      val template = PathTemplateParser.parse("/foo/{bar}/baz")
      template.fields shouldBe List(TemplateVariable(List("bar"), false))
      matches(template, "/foo") shouldBe None
      matches(template, "/foo/bl/ah/baz") shouldBe None
      matches(template, "/foo/blah/baz") shouldBe Some(List("blah"))
    }

    "parse a path template with multiple variables" in {
      val template = PathTemplateParser.parse("/foo/{bar}/baz/{other}")
      template.fields shouldBe List(TemplateVariable(List("bar"), false), TemplateVariable(List("other"), false))
      matches(template, "/foo/blah/baz") shouldBe None
      matches(template, "/foo/blah/baz/blah2") shouldBe Some(List("blah", "blah2"))
    }

    "support variable templates" in {
      val template = PathTemplateParser.parse("/foo/{bar=*/a/*}/baz")
      template.fields shouldBe List(TemplateVariable(List("bar"), true))
      matches(template, "/foo/blah/baz") shouldBe None
      matches(template, "/foo/bl/a/h/baz") shouldBe Some(List("bl/a/h"))
    }

    "support rest of path glob matching" in {
      val template = PathTemplateParser.parse("/foo/{bar=**}")
      template.fields shouldBe List(TemplateVariable(List("bar"), true))
      matches(template, "/foo/blah/baz") shouldBe Some(List("blah/baz"))
    }

    "support verbs" in {
      val template = PathTemplateParser.parse("/foo/{bar}:watch")
      template.fields shouldBe List(TemplateVariable(List("bar"), false))
      matches(template, "/foo/blah") shouldBe None
      matches(template, "/foo/blah:watch") shouldBe Some(List("blah"))
    }

    "fail to parse nested variables" in {
      val e = failParse("/foo/{bar={baz}}")
      e.column shouldBe 11
    }

    "fail to parse templates that don't start with a slash" in {
      val e = failParse("foo")
      e.column shouldBe 1
    }

    "fail to parse templates with missing equals" in {
      val e = failParse("/foo/{bar*}")
      e.column shouldBe 10
    }

    "fail to parse templates with no closing bracket" in {
      val e = failParse("/foo/{bar")
      e.column shouldBe 10
    }

    "fail to parse templates with badly placed stars" in {
      val e = failParse("/f*o")
      e.column shouldBe 3
    }

    "fail to parse templates with bad variable patterns" in {
      val e = failParse("/foo/{bar=ba*}")
      e.column shouldBe 13
    }
  }

} 
Example 48
Source File: AnySupportSpec.scala    From cloudstate   with Apache License 2.0 5 votes vote down vote up
package io.cloudstate.javasupport.impl

import com.example.shoppingcart.Shoppingcart
import com.google.protobuf.{ByteString, Empty}
import io.cloudstate.javasupport.Jsonable
import io.cloudstate.protocol.entity.UserFunctionError
import io.cloudstate.protocol.event_sourced.EventSourcedProto
import org.scalatest.{Matchers, OptionValues, WordSpec}

import scala.beans.BeanProperty

class AnySupportSpec extends WordSpec with Matchers with OptionValues {

  private val anySupport = new AnySupport(Array(Shoppingcart.getDescriptor, EventSourcedProto.javaDescriptor),
                                          getClass.getClassLoader,
                                          "com.example")
  private val addLineItem = Shoppingcart.AddLineItem
    .newBuilder()
    .setName("item")
    .setProductId("id")
    .setQuantity(10)
    .build()

  "Any support" should {

    "support se/deserializing java protobufs" in {
      val any = anySupport.encodeScala(addLineItem)
      any.typeUrl should ===("com.example/" + Shoppingcart.AddLineItem.getDescriptor.getFullName)
      anySupport.decode(any) should ===(addLineItem)
    }

    "support se/deserializing scala protobufs" in {
      val error = UserFunctionError("error")
      val any = anySupport.encodeScala(UserFunctionError("error"))
      any.typeUrl should ===("com.example/cloudstate.UserFunctionError")
      anySupport.decode(any) should ===(error)
    }

    "support resolving a service descriptor" in {
      val methods = anySupport.resolveServiceDescriptor(Shoppingcart.getDescriptor.findServiceByName("ShoppingCart"))
      methods should have size 3
      val method = methods("AddItem")

      // Input type
      method.inputType.typeUrl should ===("com.example/" + Shoppingcart.AddLineItem.getDescriptor.getFullName)
      method.inputType.typeClass should ===(classOf[Shoppingcart.AddLineItem])
      val iBytes = method.inputType.asInstanceOf[ResolvedType[Any]].toByteString(addLineItem)
      method.inputType.parseFrom(iBytes) should ===(addLineItem)

      // Output type - this also checks that when java_multiple_files is true, it works
      method.outputType.typeUrl should ===("com.example/" + Empty.getDescriptor.getFullName)
      method.outputType.typeClass should ===(classOf[Empty])
      val oBytes = method.outputType.asInstanceOf[ResolvedType[Any]].toByteString(Empty.getDefaultInstance)
      method.outputType.parseFrom(oBytes) should ===(Empty.getDefaultInstance)
    }

    def testPrimitive[T](name: String, value: T, defaultValue: T) = {
      val any = anySupport.encodeScala(value)
      any.typeUrl should ===(AnySupport.CloudStatePrimitive + name)
      anySupport.decode(any) should ===(value)

      val defaultAny = anySupport.encodeScala(defaultValue)
      defaultAny.typeUrl should ===(AnySupport.CloudStatePrimitive + name)
      defaultAny.value.size() shouldBe 0
      anySupport.decode(defaultAny) should ===(defaultValue)
    }

    "support se/deserializing strings" in testPrimitive("string", "foo", "")
    "support se/deserializing ints" in testPrimitive("int32", 10, 0)
    "support se/deserializing longs" in testPrimitive("int64", 10L, 0L)
    "support se/deserializing floats" in testPrimitive("float", 0.5f, 0f)
    "support se/deserializing doubles" in testPrimitive("double", 0.5d, 0d)
    "support se/deserializing bytes" in testPrimitive("bytes", ByteString.copyFromUtf8("foo"), ByteString.EMPTY)
    "support se/deserializing booleans" in testPrimitive("bool", true, false)

    "support se/deserializing json" in {
      val myJsonable = new MyJsonable
      myJsonable.field = "foo"
      val any = anySupport.encodeScala(myJsonable)
      any.typeUrl should ===(AnySupport.CloudStateJson + classOf[MyJsonable].getName)
      anySupport.decode(any).asInstanceOf[MyJsonable].field should ===("foo")
    }

  }

}

@Jsonable
class MyJsonable {
  @BeanProperty var field: String = _
} 
Example 49
Source File: TopologicalSortSpec.scala    From algorithmaday   with GNU General Public License v3.0 5 votes vote down vote up
package org.pfcoperez.dailyalgorithm.applications

import org.scalatest.{ FlatSpec, Matchers, Inside }

class TopologicalSortSpec extends FlatSpec with Matchers with Inside {
  import TopologicalSort._

  "TopologicalSort algorithm implementation" should "order elements by their dependency tree" in {

    val dm: DependencyMatrix[Char] = Map(
      'A' -> Set('E'),
      'B' -> Set('F'),
      'C' -> Set('G'),
      'D' -> Set('G'),
      'E' -> Set('H'),
      'F' -> Set('H', 'I'),
      'G' -> Set('I'),
      'H' -> Set('J'),
      'I' -> Set('H'),
      'J' -> Set())

    inside(topologicalSort(dm)) {
      case Some(order) =>
        val node2index = order.zipWithIndex.toMap

        node2index('J') shouldBe 0
        node2index('H') shouldBe 1
        node2index('E') should be < node2index('A')
        node2index('F') should be < node2index('B')
        node2index('I') should be < node2index('G')
        node2index('G') should be < node2index('C')
        node2index('G') should be < node2index('D')

    }

  }

  it should "fail to create an order when there are cyclic dependencies" in {

    val dm: DependencyMatrix[Char] = Map(
      'A' -> Set('B'),
      'B' -> Set('C'),
      'C' -> Set('A'))

    topologicalSort(dm) shouldBe empty

  }

  it should "serve to make a plan to develop interstellar travel" in {

    val technologicalDependencies: DependencyMatrix[String] = Map(
      "Interstellar Travel" -> Set("Safe cabins", "Warp drive"),
      "Quantum computing" -> Set(),
      "Magnetic shields" -> Set("Quantum computing"),
      "Dark matter confinement" -> Set("Quantum computing"),
      "Safe cabins" -> Set("Magnetic shields"),
      "Warp drive" -> Set("Dark matter confinement"))

    inside(topologicalSort(technologicalDependencies)) {
      case Some(plan) =>

        plan.head shouldBe "Quantum computing"
        plan.last shouldBe "Interstellar Travel"

        val planIndexes: Map[String, Int] = plan.zipWithIndex.toMap

        planIndexes("Dark matter confinement") should be < planIndexes("Warp drive")
        planIndexes("Magnetic shields") should be < planIndexes("Safe cabins")

    }

  }

} 
Example 50
Source File: ConvexHullSpec.scala    From algorithmaday   with GNU General Public License v3.0 5 votes vote down vote up
package org.pfcoperez.dailyalgorithm.geometry

import org.scalatest.{ Matchers, WordSpec }

import org.pfcoperez.dailyalgorithm.Geometry._

class ConvexHullSpec extends WordSpec with Matchers {

  implicit def tuple2point(p: (Double, Double)): Point = Vect(p._1, p._2)

  "Gift Wrapping 2D Convex Hull algorithm" when {

    val valuesAndExpectations = Seq(
      Set[Vect]((-1.0, -1.0), (1.0, -1.0), (1.0, 1.0), (-1.0, 1.0), (0.0, 0.0)) -> Some(Set[Vect]((1.0, -1.0), (1.0, 1.0), (-1.0, 1.0), (-1.0, -1.0))),
      Set[Vect]((-1.0, 1.0), (0.0, 0.0)) -> None,
      Set[Vect]((-1.0, 0.0), (0.0, 0.0), (1.0, 1.0), (0.5, 0.5)) -> Some(Set[Vect]((0.0, 0.0), (1.0, 1.0), (-1.0, 0.0))),
      Set[Vect]((10.0, 10.0), (20.0, 25.0), (30.0, 30.0), (10.0, 30.0)) -> Some(Set[Vect]((10.0, 30.0), (30.0, 30.0), (10.0, 10.0))),
      Set[Vect](
        (4.4, 14.0),
        (6.7, 15.25),
        (6.9, 12.8),
        (2.1, 11.1),
        (9.5, 14.9),
        (13.2, 11.9),
        (10.3, 12.3),
        (6.8, 9.5),
        (3.3, 7.7),
        (0.6, 5.1),
        (5.3, 2.4),
        (8.45, 4.7),
        (11.5, 9.6),
        (13.8, 7.3),
        (12.9, 3.1),
        (11.0, 1.1)) -> Some(Set[Vect](
          (5.3, 2.4),
          (11.0, 1.1),
          (12.9, 3.1),
          (13.8, 7.3),
          (13.2, 11.9),
          (9.5, 14.9),
          (6.7, 15.25),
          (4.4, 14.0),
          (2.1, 11.1),
          (0.6, 5.1))))

    valuesAndExpectations foreach {
      case (input, expected) => s"CH($input)" should {
        s"result in $expected" in {
          giftWrappingConvexHull(input).map(_.toSet) shouldBe expected
          fasterGiftWrappingConvexHull(input).map(_.toSet) shouldBe expected
        }
      }
    }

  }

} 
Example 51
Source File: AncestrySpec.scala    From algorithmaday   with GNU General Public License v3.0 5 votes vote down vote up
package org.pfcoperez.dailyalgorithm.datastructures.graphs.directed

import org.scalatest.{ FlatSpec, Matchers }

class AncestrySpec extends FlatSpec with Matchers {

  val F = Node("F")
  val E = Node("E")
  val D = Node("D")
  val C = Node("C")
  val B = Node("B", Seq(E, F))
  val A = Node("A", Seq(B, C, D))
  val H = Node("H", Seq(F))
  val I = Node("I", Seq(H))
  val G = Node("G", Seq(I))

  val nodes = Seq(A, B, C, D, E, F, G, H, I)

  "The findParents function" should "generate the whole ancestry of the provided nodes" in {

    val ancestry = findParents(nodes)

    ancestry.get(A) shouldBe None
    ancestry.get(B) shouldBe Some(Set(A))
    ancestry.get(C) shouldBe Some(Set(A))
    ancestry.get(D) shouldBe Some(Set(A))
    ancestry.get(E) shouldBe Some(Set(B))
    ancestry.get(F) shouldBe Some(Set(B, H))
    ancestry.get(G) shouldBe None
    ancestry.get(H) shouldBe Some(Set(I))
    ancestry.get(I) shouldBe Some(Set(G))

  }

} 
Example 52
Source File: BidirectionalSearchSpec.scala    From algorithmaday   with GNU General Public License v3.0 5 votes vote down vote up
package org.pfcoperez.dailyalgorithm.datastructures.graphs.directed

import org.scalatest.{ FlatSpec, Matchers }

class BidirectionalSearchSpec extends FlatSpec with Matchers {

  "Bidirectional search" should "be able to find whther any two nodes are connected" in {

    val F = Node("F")
    val E = Node("E")
    val D = Node("D")
    val C = Node("C")
    val B = Node("B", Seq(E, F))
    val A = Node("A", Seq(B, C, D))
    val H = Node("H", Seq(F))
    val I = Node("I", Seq(H))
    val G = Node("G", Seq(I))

    val pairsInTouch = Seq(
      A -> I,
      B -> H,
      E -> E)

    val unreachablePairs = Seq(
      E -> F,
      D -> C,
      D -> I)

    pairsInTouch foreach {
      case (from, to) =>
        atSameConnectedComponent(from, to) shouldBe true
        atSameConnectedComponent(to, from) shouldBe true
    }

    unreachablePairs foreach {
      case (from, to) =>
        println(from.value, to.value)
        atSameConnectedComponent(from, to) shouldBe false
        atSameConnectedComponent(to, from) shouldBe false
    }

  }

} 
Example 53
Source File: MinimaxSpec.scala    From algorithmaday   with GNU General Public License v3.0 5 votes vote down vote up
package org.pfcoperez.dailyalgorithm.gametheory

import org.scalatest.{ WordSpec, Matchers }

import scala.language.implicitConversions
import scala.language.postfixOps

class MinimaxSpec extends WordSpec with Matchers {

  "Minimax#bestMovement template" can {

    "model tic-tac-toe game" should {

      trait BoardValue
      trait Player extends BoardValue
      object X extends Player
      object O extends Player
      object E extends BoardValue

      type Board = Vector[Vector[BoardValue]]
      type Score = Int
      type Position = (Int, Int)
      case class Movement(position: Position, byMaxPlayer: Boolean)

      val boardLength = 3

      val emptyBoard = Vector.fill(boardLength)(Vector.fill(boardLength)(E))

      def isMaxPlayer(player: Player): Boolean = player == X
      def whichPlayer(isMaxPlayer: Boolean): Player = if (isMaxPlayer) X else O

      implicit def validMovements(board: Board, isMaxPlayer: Boolean): Seq[Movement] =
        for {
          i <- 0 until boardLength
          j <- 0 until boardLength
          if board(i)(j) == E
        } yield Movement((i, j), isMaxPlayer)

      implicit def applyMovement(board: Board, movement: Movement): Board = {
        val Movement((i, j), isMaxPlayer) = movement
        val row = board(i)
        board.updated(i, row.updated(j, whichPlayer(isMaxPlayer)))
      }

      implicit def score(board: Board): Score = {
        for {
          player <- Seq(X, O)
          orientedBoard <- Seq(board, board.transpose)
          crossWin = orientedBoard.exists(_ == Vector.fill(boardLength)(player))
          xWin = Seq((i: Int) => boardLength - 1 - i, (i: Int) => i) exists { trans =>
            (0 until boardLength) forall { k =>
              board(k)(trans(k)) == player
            }
          }
          if (crossWin || xWin)
        } yield {
          if (isMaxPlayer(player)) 10 else -10
        }
      }.headOption getOrElse {
        val h = boardLength / 2
        if (board(h)(h) == whichPlayer(true)) 1
        else if (board(h)(h) == whichPlayer(false)) -1
        else 0
      }

      def computeBestPossibleMovement(board: Board, player: Player, maxDepth: Int = 100): Option[(Movement, Score)] =
        Minimax.bestMovement[Movement, Board, Score](board, isMaxPlayer(player), maxDepth)

      "able to detect that it can win in a lose-lose situation" in {

        val lostHand = Vector(
          Vector(O, X, E),
          Vector(O, O, E),
          Vector(X, X, O))

        computeBestPossibleMovement(lostHand, X).get._2 shouldBe -10

      }

      "able to find the best movement" in {
        computeBestPossibleMovement(emptyBoard, X).get._2 shouldBe 10
      }

      "able to tie a game a player can't win" in {

        val losingHand = Vector(
          Vector(X, O, E),
          Vector(O, O, E),
          Vector(X, X, O))

        val res = computeBestPossibleMovement(losingHand, X)
        println(res)
        res.get._2 shouldBe -1

      }

    }

  }

} 
Example 54
Source File: BinaryVersionTests.scala    From scaladex   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package ch.epfl.scala.index.model.release

import ch.epfl.scala.index.model.{Milestone, ReleaseCandidate, release}
import org.scalatest.prop.TableDrivenPropertyChecks
import org.scalatest.{FunSpec, Matchers}

class BinaryVersionTests
    extends FunSpec
    with Matchers
    with TableDrivenPropertyChecks {
  it("should parse any binary version") {
    val inputs = Table(
      ("input", "output"),
      ("1", MajorBinary(1)),
      ("1.x", MajorBinary(1)),
      ("2.12", MinorBinary(2, 12)),
      ("0.6", MinorBinary(0, 6)),
      ("2.13.0", PatchBinary(2, 13, 0)),
      ("0.4.0", PatchBinary(0, 4, 0)),
      ("0.4.0-M2", PreReleaseBinary(0, 4, Some(0), Milestone(2))),
      ("0.23.0-RC1",
       release.PreReleaseBinary(0, 23, Some(0), ReleaseCandidate(1))),
      ("1.1-M1", release.PreReleaseBinary(1, 1, None, Milestone(1)))
    )

    forAll(inputs) { (input, output) =>
      BinaryVersion.parse(input) should contain(output)
    }
  }

  it("should be ordered") {
    val inputs = Table[BinaryVersion, BinaryVersion](
      ("lower", "higher"),
      (MajorBinary(1), MajorBinary(2)),
      (MajorBinary(1), MinorBinary(1, 1)), // 1.x < 1.1
      (MajorBinary(1), MinorBinary(2, 1)),
      (release.PreReleaseBinary(1, 2, None, Milestone(1)), MinorBinary(1, 2)),
      (MajorBinary(1), release.PreReleaseBinary(2, 0, None, Milestone(1)))
    )

    forAll(inputs) { (lower, higher) =>
      lower shouldBe <(higher)
    }
  }
} 
Example 55
Source File: TestPerfectScores.scala    From spark-ranking-metrics   with The Unlicense 5 votes vote down vote up
package com.github.jongwook

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.scalactic.{Equality, TolerantNumerics}
import org.scalatest.{FlatSpec, Matchers}


class TestPerfectScores extends FlatSpec with Matchers {
  import TestFixture._
  implicit val doubleEquality: Equality[Double] = TolerantNumerics.tolerantDoubleEquality(eps)

  val perfectScores: Seq[(String, Map[Metric, Seq[Double]])] = {
    val spark = SparkSession.builder().master(new SparkConf().get("spark.master", "local[8]")).getOrCreate()

    import spark.implicits._

    val predictionDF = spark.createDataset(prediction)
    val groundTruthDF = spark.createDataset(groundTruth)

    for ((name, df) <- Seq("prediction" -> predictionDF, "ground truth" -> groundTruthDF)) yield {
      val metrics = new SparkRankingMetrics(df, df, itemCol = "product", predictionCol = "rating")

      name -> Map[Metric, Seq[Double]](
        NDCG -> metrics.ndcgAt(ats),
        MAP -> metrics.mapAt(ats),
        Precision -> metrics.precisionAt(Seq(Integer.MAX_VALUE)),
        Recall -> metrics.recallAt(Seq(Integer.MAX_VALUE))
      )
    }
  }

  for ((name, scores) <- perfectScores) {
    for (metric <- Seq(NDCG, MAP, Precision, Recall)) {
      s"In $name dataset, our $metric implementation" should s"return 1.0 for the perfect prediction" in {
        for (score <- scores(metric)) {
          score should equal(1.0)
        }
      }
    }
  }
} 
Example 56
Source File: DSLSpec.scala    From nd4s   with Apache License 2.0 5 votes vote down vote up
package org.nd4s

import org.junit.runner.RunWith
import org.nd4j.linalg.api.ndarray.INDArray
import org.nd4j.linalg.factory.Nd4j
import org.scalatest.junit.JUnitRunner
import org.scalatest.{FlatSpec, Matchers}
import org.nd4s.Implicits._

@RunWith(classOf[JUnitRunner])
class DSLSpec extends FlatSpec with Matchers {

  "DSL" should "wrap and extend an INDArray" in {

    // This test just verifies that an INDArray gets wrapped with an implicit conversion

    val nd = Nd4j.create(Array[Float](1, 2), Array(2, 1))
    val nd1 = nd + 10L // + creates new array, += modifies in place

    nd.get(0) should equal(1)
    nd1.get(0) should equal(11)

    val nd2 = nd += 100
    nd2 should equal(nd)
    nd2.get(0) should equal(101)

    // Verify that we are working with regular old INDArray objects
    nd2 match {
      case i: INDArray => // do nothing
      case _ => fail("Expect our object to be an INDArray")
    }

  }

  "DSL" should "not prevent Map[Int,T] creation" in {
    Map(0->"hello") shouldBe a [Map[_,_]]
  }
} 
Example 57
Source File: NDArrayCollectionAPITest.scala    From nd4s   with Apache License 2.0 5 votes vote down vote up
package org.nd4s

import org.nd4s.Implicits._
import org.scalatest.{Matchers, FlatSpec}

class NDArrayCollectionAPITest extends FlatSpec with Matchers{
  "CollectionLikeNDArray" should "provides filter API" ignore {
    val ndArray =
      Array(
        Array(1, 2, 3),
        Array(4, 5, 6),
        Array(7, 8, 9)
      ).toNDArray

    val filtered = ndArray.filter(_ > 3)

    assert(filtered ==
      Array(
        Array(0, 0, 0),
        Array(4, 5, 6),
        Array(7, 8, 9)
      ).toNDArray)
  }
  it should "provides filter bitmask API" ignore {
    val ndArray =
      Array(
        Array(1, 2, 3),
        Array(4, 5, 6),
        Array(7, 8, 9)
      ).toNDArray

    val filterMasked = ndArray.filterBit(_ % 2 == 0)

    assert(filterMasked ==
      Array(
        Array(0, 1, 0),
        Array(1, 0, 1),
        Array(0, 1, 0)
      ).toNDArray)
  }
  it should "provides map API" ignore {
    val ndArray =
      Array(
        Array(1, 2, 3),
        Array(4, 5, 6),
        Array(7, 8, 9)
      ).toNDArray

    val mapped = ndArray.map(_ * 2 + 1)

    assert(mapped ==
      Array(
        Array(3, 5, 7),
        Array(9, 11, 13),
        Array(15, 17, 19)
      ).toNDArray)
  }

  it should "provides forall checker" in {
    val ndArray =
      Array(
        Array(1, 2, 3),
        Array(4, 5, 6),
        Array(7, 8, 9)
      ).toNDArray

    //check if all elements in nd meet the criteria.
    assert(ndArray > 0)
    assert(ndArray.forall(_ > 0))
    "ndArray.forallC(_.absoluteValue().doubleValue() > 0)" shouldNot typeCheck
    assert(ndArray < 10)
    assert(!(ndArray >= 5))
  }

  it should "provides exist API" in {
    val ndArray =
      Array(
        Array(1, 2, 3),
        Array(4, 5, 6),
        Array(7, 8, 9)
      ).toNDArray

    //check if any element in nd meet the criteria.
    assert(ndArray.exists(_ > 8))
  }
} 
Example 58
Source File: MainecoonTestSuite.scala    From mainecoon   with Apache License 2.0 5 votes vote down vote up
package mainecoon.tests

import cats.arrow.FunctionK
import cats.instances.AllInstances
import cats.kernel.Eq
import mainecoon.syntax.AllSyntax
import org.scalacheck.{Arbitrary, Gen}
import org.scalatest.{FunSuite, Matchers}
import org.typelevel.discipline.scalatest.Discipline

import scala.util.Try

class MainecoonTestSuite extends FunSuite with Matchers with Discipline with TestInstances with AllInstances with AllSyntax with cats.syntax.AllSyntax


trait TestInstances {
  implicit val catsDataArbitraryOptionList: Arbitrary[FunctionK[Option, List]] = Arbitrary(Gen.const(λ[FunctionK[Option, List]](_.toList)))
  implicit val catsDataArbitraryListOption: Arbitrary[FunctionK[List, Option]] = Arbitrary(Gen.const(λ[FunctionK[List, Option]](_.headOption)))
  implicit val catsDataArbitraryTryOption: Arbitrary[FunctionK[Try, Option]] = Arbitrary(Gen.const(λ[FunctionK[Try, Option]](_.toOption)))
  implicit val catsDataArbitraryOptionTry: Arbitrary[FunctionK[Option, Try]] = Arbitrary(Gen.const(λ[FunctionK[Option, Try]](o => Try(o.get))))
  implicit val catsDataArbitraryListVector: Arbitrary[FunctionK[List, Vector]] = Arbitrary(Gen.const(λ[FunctionK[List, Vector]](_.toVector)))
  implicit val catsDataArbitraryVectorList: Arbitrary[FunctionK[Vector, List]] = Arbitrary(Gen.const(λ[FunctionK[Vector, List]](_.toList)))

  implicit val eqThrow: Eq[Throwable] = Eq.allEqual
} 
Example 59
Source File: WriteOutDemoPlots.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot

import java.io.File
import java.nio.file.{Files, Paths}

import com.cibo.evilplot.demo.DemoPlots
import javax.imageio.ImageIO
import org.scalatest.{FunSpec, Matchers}
import com.cibo.evilplot.geometry.Drawable
import scala.util.Try

object WriteOutDemoPlots {
  def apply(args:Array[String]):Unit = 
    for(plotName <- args; plot <- DemoPlots.get(Symbol(plotName)))
       plot write new java.io.File(s"$plotName.png")
}
class WriteOutDemoPlots extends FunSpec with Matchers {

  //-- DemoPlot name and ratio of colored pixels (to represent an simple hash)
  val plots = Seq(
    'linePlot -> 0.01498,
    'heatmap -> 0.81790,
    'pieChart -> 0.44209,
    'clusteredBarChart -> 0.31712,
    'clusteredStackedBarChart -> 0.30259,
    'stackedBarChart -> 0.35687,
    'barChart -> 0.18869,
    'functionPlot -> 0.01728,
    'markerPlot -> 0.01008,
    'crazyPlot -> 0.10755,
    'facetedPlot -> 0.04951,
    'marginalHistogram -> 0.04002,
    'scatterPlot -> 0.02314,
    'boxPlot -> 0.29182,
    'facetedPlot -> 0.04951,
    'histogramOverlay -> 0.32281
  )

  val tmpPathOpt = {
    val tmpPath = Paths.get("/tmp/evilplot")
    if (Files.notExists(tmpPath)) Try{Files.createDirectories(tmpPath)}
    if(Files.notExists(tmpPath)) None else {
      println(s"Saving rendered png's to $tmpPath")
      Some(tmpPath)
    }
  }

  describe("Demo Plots") {
    it("render to consistent murmur hash") {
      for { (name, ratioTruth) <- plots; plot <- DemoPlots.get(name)} {

        val bi = plot.asBufferedImage

        def isColored(c:Int):Boolean = {
          val r = (c >> 16) & 0xFF;
          val g = (c >> 8) & 0xFF;
          val b = (c >> 8) & 0xFF;
          r + g + b > 10
        }

        val ratio:Double = {
          val pixels = (for(x <- 0 until bi.getWidth; y <- 0 until bi.getHeight) yield bi.getRGB(x,y)).toArray
          pixels.count(isColored).toDouble/pixels.size
        }

        val delta = math.abs(ratioTruth - ratio)
        println(f"""$name -> $ratio%5.5f, //delta = $delta%8.8f""")
        assert(delta < 0.0015, s"$name out of range $ratio != $ratioTruth")

        //--write img to file if the tmp path is available
        for(_ <- None; tmpPath <- tmpPathOpt){
          val file = new File(s"${tmpPath.toAbsolutePath.toString}/${name.name}.png")
          ImageIO.write(bi, "png", file)
          file.exists() shouldBe true
        }
      }
    }
  }
} 
Example 60
Source File: Graphics2DRenderContextSpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.geometry

import java.awt.image.BufferedImage
import java.awt.{BasicStroke, Color, Graphics2D}

import org.scalatest.{FunSpec, Matchers}

class Graphics2DRenderContextSpec extends FunSpec with Matchers with Graphics2DSupport {
  describe("state stack operations") {
    it("The state should be the same before and after a stack op.") {
      val graphics = Graphics2DTestUtils.graphics2D
      val ctx = Graphics2DRenderContext(graphics)
      val GraphicsState(initialTransform, initialFill, initialColor, initialStroke) =
        ctx.initialState
      Graphics2DRenderContext.applyOp(ctx) {
        ctx.graphics.translate(34, 20)
        ctx.graphics.setPaint(Color.BLUE)
        ctx.graphics.setStroke(new BasicStroke(3))
      }
      ctx.graphics.getTransform shouldBe initialTransform
      ctx.fillColor shouldBe initialFill
      ctx.strokeColor shouldBe initialColor
      ctx.graphics.getStroke shouldBe initialStroke
    }
  }
}

object Graphics2DTestUtils {
  def graphics2D: Graphics2D = {
    val bi = new BufferedImage(800, 600, BufferedImage.TYPE_INT_ARGB)
    bi.createGraphics()
  }
} 
Example 61
Source File: MatrixOperationsSpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.numeric

import org.scalatest.{FunSpec, Matchers}

class KernelDensityEstimationSpec extends FunSpec with Matchers {
  describe("KernelDensityEstimation") {

    it("should properly calculate the matrix product A * B^T") {
      val a = Array(
        Array(0.9477583, 0.7026756, 0.0075461, 0.8175592),
        Array(0.5654393, 0.7140698, 0.5457264, 0.1904566),
        Array(0.8049051, 0.5844244, 0.5987555, 0.1988892),
        Array(0.6323643, 0.2691138, 0.7707659, 0.4891442),
        Array(0.2572372, 0.6319369, 0.2961405, 0.8173221)
      )

      val b = Array(
        Array(0.95817, 0.72988, 0.39111, 0.51126),
        Array(0.42121, 0.98949, 0.41734, 0.76212),
        Array(0.55427, 0.47121, 0.73324, 0.42507),
        Array(0.98662, 0.85474, 0.22522, 0.52602),
        Array(0.94610, 0.54784, 0.21054, 0.92127)
      )

      val answer = Array(
        Array(1.8419, 1.7207, 1.2095, 1.9674, 2.0364),
        Array(1.3738, 1.3176, 1.1310, 1.3913, 1.2165),
        Array(1.5337, 1.3188, 1.2451, 1.5331, 1.3910),
        Array(1.3539, 1.2271, 1.2504, 1.2848, 1.3586),
        Array(1.2414, 1.4801, 1.0049, 1.2906, 1.4049)
      )
      val calculatedResult = MatrixOperations.matrixMatrixTransposeMult(a, b).flatten.toSeq
      (calculatedResult zip answer.flatten.toSeq).foreach {
        case (calculated, actual) => calculated shouldEqual actual +- 0.003
      }
    }
  }
} 
Example 62
Source File: MatrixOperationsSpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.numeric

import org.scalatest.{FunSpec, Matchers}

class KernelDensityEstimationSpec extends FunSpec with Matchers {
  describe("KernelDensityEstimation") {

    it("should properly calculate the matrix product A * B^T") {
      val a = Array(
        Array(0.9477583, 0.7026756, 0.0075461, 0.8175592),
        Array(0.5654393, 0.7140698, 0.5457264, 0.1904566),
        Array(0.8049051, 0.5844244, 0.5987555, 0.1988892),
        Array(0.6323643, 0.2691138, 0.7707659, 0.4891442),
        Array(0.2572372, 0.6319369, 0.2961405, 0.8173221)
      )

      val b = Array(
        Array(0.95817, 0.72988, 0.39111, 0.51126),
        Array(0.42121, 0.98949, 0.41734, 0.76212),
        Array(0.55427, 0.47121, 0.73324, 0.42507),
        Array(0.98662, 0.85474, 0.22522, 0.52602),
        Array(0.94610, 0.54784, 0.21054, 0.92127)
      )

      val answer = Array(
        Array(1.8419, 1.7207, 1.2095, 1.9674, 2.0364),
        Array(1.3738, 1.3176, 1.1310, 1.3913, 1.2165),
        Array(1.5337, 1.3188, 1.2451, 1.5331, 1.3910),
        Array(1.3539, 1.2271, 1.2504, 1.2848, 1.3586),
        Array(1.2414, 1.4801, 1.0049, 1.2906, 1.4049)
      )
      val calculatedResult = MatrixOperations.matrixMatrixTransposeMult(a, b).flatten.toSeq
      (calculatedResult zip answer.flatten.toSeq).foreach {
        case (calculated, actual) => calculated shouldEqual actual +- 0.003
      }
    }
  }
} 
Example 63
Source File: AffineTransformSpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.geometry

import org.scalatest.{FunSpec, Matchers}

class AffineTransformSpec extends FunSpec with Matchers {

  import com.cibo.evilplot.plot.aesthetics.DefaultTheme._

  describe("The AffineTransform") {
    it("should translate a point") {
      AffineTransform.identity.translate(1.0, 0.0)(1.0, 1.0) should be((2.0, 1.0))
      AffineTransform.identity.translate(0.0, 1.0)(1.0, 1.0) should be((1.0, 2.0))
    }

    it("should scale a point") {
      AffineTransform.identity.scale(2.0, 1.0)(1.0, 1.0) should be((2.0, 1.0))
      AffineTransform.identity.scale(1.0, 2.0)(1.0, 1.0) should be((1.0, 2.0))
    }

    it("should flip a point across the axes") {
      AffineTransform.identity.flipOverX(0.0, 1.0) should be((0.0, -1.0))
      AffineTransform.identity.flipOverY(1.0, 0.0) should be((-1.0, 0.0))
    }

    it("should rotate by 90 degrees") {
      val (x, y) = AffineTransform.identity.rotateDegrees(90)(1.0, 0.0)
      x should be(0.0 +- 1e-9)
      y should be(1.0 +- 1e-9)
    }

    it("should compose two affine transforms") {
      val translate = AffineTransform.identity.translate(1.0, 0.0)
      val scale = AffineTransform.identity.scale(1.0, 2.0)

      translate.compose(scale)(2.0, 3.0) should be((3.0, 6.0))
      scale.compose(translate)(2.0, 3.0) should be((3.0, 6.0))
    }
  }
} 
Example 64
Source File: TextMetricsSpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.geometry

import org.scalatest.{FunSpec, Matchers}
import com.cibo.evilplot.DOMInitializer

class TextMetricsSpec extends FunSpec with Matchers {
  DOMInitializer.init()

  // The size depends on the font which can vary from system to system.
  describe("measure") {
    it("returns the right size for a small font") {
      val extent = TextMetrics.measure(Text("test", 5))
      extent.height shouldBe 5.0 +- 0.1
    }

    it("returns the right size for a large font") {
      val extent = TextMetrics.measure(Text("test", 64))
      extent.height shouldBe 64.0 +- 0.1
    }
  }
} 
Example 65
Source File: DrawableSpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.geometry

import com.cibo.evilplot.colors.{ColorGradients, FillGradients}
import org.scalatest.{FunSpec, Matchers}
import io.circe.syntax._
class DrawableSpec extends FunSpec with Matchers {

  import com.cibo.evilplot.plot.aesthetics.DefaultTheme._

  def encodeDeocde(before: Drawable) = {
    val str = Drawable.drawableEncoder(before).noSpaces
    val after = io.circe.parser.parse(str).right.get.as[Drawable].right.get
    after
  }

  describe("EmptyDrawable") {
    it("has zero size") {
      EmptyDrawable().extent shouldBe Extent(0, 0)
    }

    it("does nothing") {
      val context = new MockRenderContext
      EmptyDrawable().draw(context) // This should not throw an exception
    }

    it("can be serialized") {
      val before = EmptyDrawable()
      val str = Drawable.drawableEncoder(before).noSpaces
      val after = io.circe.parser.parse(str).right.get.as[Drawable].right.get
      after shouldBe before
    }
  }

  describe("Line") {
    val line = Line(5, 10)

    it("has the right extent") {
      line.extent shouldBe Extent(5, 10)
    }

    it("renders itself") {
      var drawn = 0
      val context = new MockRenderContext {
        override def draw(line: Line): Unit = drawn += 1
      }

      line.draw(context)
      drawn shouldBe 1
    }
  }

  describe("Disc") {
    it("has the right extent") {
      Disc(5).extent shouldBe Extent(10, 10)
    }
  }

  describe("Wedge") {
    it("has the right extent") {
      Wedge(180, 5).extent shouldBe Extent(10, 10)
    }
  }

  describe("extent"){
    it("can be serialized and deserialized"){

    }
  }

  describe("Interaction"){
    it("can be serialized and deserialized"){
      encodeDeocde(Interaction(Disc(10), EmptyEvent())) shouldEqual Interaction(Disc(10), EmptyEvent())
      encodeDeocde(Interaction(Disc(10), OnClick(_ => ()))) shouldEqual Interaction(Disc(10), EmptyEvent())

    }
  }

  describe("Gradient2d"){
    it("can be serialized and deserialized"){

      val gradient = LinearGradient.bottomToTop(Extent(100, 100),  FillGradients.distributeEvenly(ColorGradients.viridis))

      encodeDeocde(GradientFill(Rect(10), gradient)) shouldEqual GradientFill(Rect(10), gradient)
    }
  }
} 
Example 66
Source File: GeometrySpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.geometry

import org.scalatest.{FunSpec, Matchers}
import com.cibo.evilplot.DOMInitializer

class GeometrySpec extends FunSpec with Matchers {

  DOMInitializer.init()

  describe("Geometry") {

    // pick different values so that we can tell if they get swapped
    val width = 1.0
    val height = 2.0
    val length = 3.0
    val strokeWidth = 4.0

    it("Line extent") {
      val extent = Line(length, strokeWidth).extent
      extent shouldEqual Extent(length, strokeWidth)
    }

    it("Rect extent") {
      val extent = Rect(width, height).extent
      extent shouldEqual Extent(width, height)
    }

  }

  describe("labels") {
    class TestContext extends MockRenderContext {
      var discDrawn: Boolean = false
      var textDrawn: Boolean = false
      override def draw(disc: Disc): Unit = discDrawn = true
      override def draw(text: Text): Unit = textDrawn = true
      override def draw(translate: Translate): Unit = translate.r.draw(this)
    }

    it("titled should draw the drawable and text") {
      val context = new TestContext
      val d = Disc(5).titled("message")
      d.draw(context)
      context.discDrawn shouldBe true
      context.textDrawn shouldBe true
    }

    it("labeled should draw the drawable and text") {
      val context = new TestContext
      val d = Disc(5).labeled("message")
      d.draw(context)
      context.discDrawn shouldBe true
      context.textDrawn shouldBe true
    }
  }
} 
Example 67
Source File: OverlaySpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.plot

import com.cibo.evilplot.geometry.{Drawable, EmptyDrawable, Extent}
import com.cibo.evilplot.numeric.{Bounds, Point}
import com.cibo.evilplot.plot.aesthetics.Theme
import com.cibo.evilplot.plot.renderers.PlotRenderer
import org.scalatest.{FunSpec, Matchers}

class OverlaySpec extends FunSpec with Matchers {

  import com.cibo.evilplot.plot.aesthetics.DefaultTheme._

  describe("Overlay") {
    it("it gets the bounds right for a single plot") {
      val inner = ScatterPlot(Seq(Point(1.0, 10.0), Point(2.0, 20.0)))
      val overlay = Overlay(inner)
      overlay.xbounds shouldBe inner.xbounds
      overlay.ybounds shouldBe inner.ybounds
    }

    it("combines bounds from multiple plots") {
      val inner1 = ScatterPlot(Seq(Point(10.0, -1.0)))
      val inner2 = ScatterPlot(Seq(Point(3.0, 20.0)))
      val overlay = Overlay(inner1, inner2)
      overlay.xbounds shouldBe Bounds(inner2.xbounds.min, inner1.xbounds.max)
      overlay.ybounds shouldBe Bounds(inner1.ybounds.min, inner2.ybounds.max)
    }

    it("occupies the right extents") {
      val inner1 = ScatterPlot(Seq(Point(10.0, -1.0)))
      val inner2 = ScatterPlot(Seq(Point(11.0, 1.0)))
      val overlay = Overlay(inner1, inner2)
      val extent = Extent(600, 400)
      overlay.render(extent).extent shouldBe extent
    }

    it("updates bounds on subplots") {
      var xbounds: Bounds = Bounds(0, 0)
      var ybounds: Bounds = Bounds(0, 0)

      val testRenderer = new PlotRenderer {
        def render(plot: Plot, plotExtent: Extent)(implicit theme: Theme): Drawable = {
          xbounds = plot.xbounds
          ybounds = plot.ybounds
          EmptyDrawable().resize(plotExtent)
        }
      }

      val inner = new Plot(
        xbounds = Bounds(1, 2),
        ybounds = Bounds(3, 4),
        renderer = testRenderer
      )

      val overlay = Overlay(inner)
      val updated = overlay.xbounds(5, 6).ybounds(7, 8)
      updated.render(Extent(100, 200))

      xbounds shouldBe Bounds(5, 6)
      ybounds shouldBe Bounds(7, 8)
    }

    it("should throw an exception with no plots") {
      an[IllegalArgumentException] should be thrownBy Overlay()
    }
  }
} 
Example 68
Source File: FacetsSpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.plot

import com.cibo.evilplot.geometry.{Drawable, EmptyDrawable, Extent, Rect}
import com.cibo.evilplot.numeric.Point
import com.cibo.evilplot.plot.aesthetics.Theme
import org.scalatest.{FunSpec, Matchers}
import com.cibo.evilplot.plot.components.{FacetedPlotComponent, Position}

class FacetsSpec extends FunSpec with Matchers {

  import com.cibo.evilplot.plot.aesthetics.DefaultTheme._

  describe("Facets") {
    it("is the correct size with one facet") {
      val inner = ScatterPlot(Seq(Point(1, 1), Point(2, 2)))
      val faceted = Facets(Seq(Seq(inner)))

      faceted.xbounds shouldBe inner.xbounds
      faceted.ybounds shouldBe inner.ybounds

      val extent = Extent(600, 400)
      faceted.plotExtent(extent) shouldBe inner.plotExtent(extent)
    }

    it("works with rows of differing sizes") {
      val inner1 = ScatterPlot(Seq(Point(1, 1), Point(2, 2)))
      val inner2 = ScatterPlot(Seq(Point(2, 1), Point(4, 2)))
      val inner3 = ScatterPlot(Seq(Point(3, 1), Point(5, 2)))
      val faceted = Facets(Seq(Seq(inner1, inner2), Seq(inner3)))

      val extent = Extent(600, 400)
      faceted.render(extent).extent shouldBe extent
    }

    it("is the correct size with a title") {
      val titleHeight = 10
      val inner = ScatterPlot(Seq(Point(1, 1), Point(2, 2)))
      val faceted = Facets(
        Seq(
          Seq(inner, inner),
          Seq(inner, inner),
          Seq(inner, inner)
        )
      ).title(Rect(1, titleHeight))

      faceted.xbounds shouldBe inner.xbounds
      faceted.ybounds shouldBe inner.ybounds

      val extent = Extent(600, 400)
      faceted.plotExtent(extent) shouldBe Extent(extent.width, extent.height - titleHeight)
    }

    it("has the right plotOffset.x") {

      val inner1 = ScatterPlot(Seq(Point(1, 1), Point(2, 2)))
      val inner2 = ScatterPlot(Seq(Point(1, 1), Point(2, 2)))

      // Plot component that is larger for `inner2` than `inner1`.
      object TestComponent extends FacetedPlotComponent {
        val position: Position = Position.Left
        override val repeated: Boolean = true
        override def size(plot: Plot): Extent = if (plot == inner2) Extent(10, 10) else Extent(0, 0)
        def render(plot: Plot, extent: Extent, row: Int, column: Int)(
          implicit theme: Theme): Drawable =
          EmptyDrawable()
      }

      val faceted = Facets(Seq(Seq(inner1), Seq(inner2))) :+ TestComponent
      faceted.plotOffset.x shouldBe 10
    }

    it("throws an exception with no facets") {
      an[IllegalArgumentException] should be thrownBy Facets(Seq.empty)
    }
  }
} 
Example 69
Source File: ScatterPlotSpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.plot

import com.cibo.evilplot.numeric.Point
import org.scalatest.{FunSpec, Matchers}

class ScatterPlotSpec extends FunSpec with Matchers {

  import com.cibo.evilplot.plot.aesthetics.DefaultTheme._

  describe("ScatterPlot") {
    it("sets adheres to bound buffers") {
      val data = Seq(Point(-1, 10), Point(20, -5))
      val plot = ScatterPlot(data, xBoundBuffer = Some(0.1), yBoundBuffer = Some(0.1))

      plot.xbounds.min should be < -1.0
      plot.xbounds.max should be > 20.0
      plot.ybounds.min should be < -5.0
      plot.ybounds.max should be > 10.0
    }

    it("sets exact bounds without buffering") {
      val data = Seq(Point(-1, 10), Point(20, -5))
      val plot = ScatterPlot(data)

      plot.xbounds.min shouldBe -1.0
      plot.xbounds.max shouldBe 20.0
      plot.ybounds.min shouldBe -5.0
      plot.ybounds.max shouldBe 10.0
    }

    it("sets reasonable bounds with only 1 point") {
      val plot = ScatterPlot(Seq(Point(2, 3)))
      plot.xbounds.min shouldBe 2.0 +- 0.0000001
      plot.xbounds.max shouldBe 2.0 +- 0.0000001
      plot.ybounds.min shouldBe 3.0 +- 0.0000001
      plot.ybounds.max shouldBe 3.0 +- 0.0000001
    }
  }
} 
Example 70
Source File: BarChartSpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.plot

import com.cibo.evilplot.geometry.Extent
import com.cibo.evilplot.numeric.Bounds
import org.scalatest.{FunSpec, Matchers}

class BarChartSpec extends FunSpec with Matchers {

  import com.cibo.evilplot.plot.aesthetics.DefaultTheme._

  describe("BarChart") {
    it("should have the right bounds without buffer") {
      val plot = BarChart(Seq[Double](10, 20, 15))
      plot.xbounds shouldBe Bounds(0, 3)
      plot.ybounds shouldBe Bounds(10, 20)
    }

    it("should have the right bounds with buffer") {
      val plot = BarChart(Seq[Double](10, 20, 15), boundBuffer = Some(.1))
      plot.xbounds shouldBe Bounds(0, 3)
      plot.ybounds.min should be < 10.0
      plot.ybounds.max should be > 20.0
    }

    it("should have the right bounds with stacked bars") {
      val plot =
        BarChart.stacked(Seq(Seq(10.0, 5), Seq(20.0, 7), Seq(15.0, 0)), boundBuffer = Some(0))
      plot.xbounds shouldBe Bounds(0, 3)
      plot.ybounds shouldBe Bounds(15, 27)
    }

    it("should have the right extents") {
      val plot = BarChart(Seq(10.0, 20, 15))
      val extent = Extent(200, 200)
      plot.render(extent).extent shouldBe extent
    }

    it("should not explode if there is no data") {
      val plot = BarChart(Seq.empty)
      val extent = Extent(200, 300)
      plot.render(extent).extent shouldBe extent
    }
  }
} 
Example 71
Source File: CartesianPlotSpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.plot

import com.cibo.evilplot.geometry.Extent
import com.cibo.evilplot.numeric.{Bounds, Point}
import org.scalatest.{FunSpec, Matchers}

class CartesianPlotSpec extends FunSpec with Matchers {

  import com.cibo.evilplot.plot.aesthetics.DefaultTheme._

  describe("Cartesian Plot") {
    it("has the right bounds") {
      val plot = CartesianPlot(
        Seq(Point(1, 2), Point(3, 4)),
        xboundBuffer = Some(0),
        yboundBuffer = Some(0))()
      plot.xbounds shouldBe Bounds(1, 3)
      plot.ybounds shouldBe Bounds(2, 4)
    }

    it("works with a single point") {
      val plot = CartesianPlot(Seq(Point(1, 2)))()
      val extent = Extent(100, 200)
      plot.render(extent).extent shouldBe Extent(100, 200)
    }

    it("works with no data") {
      val plot = CartesianPlot(Seq.empty)()
      val extent = Extent(100, 200)
      plot.render(extent).extent shouldBe Extent(100, 200)
    }
  }
} 
Example 72
Source File: HeatmapSpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.plot

import com.cibo.evilplot.geometry.Extent
import com.cibo.evilplot.numeric.Bounds
import org.scalatest.{FunSpec, Matchers}

class HeatmapSpec extends FunSpec with Matchers {

  import com.cibo.evilplot.plot.aesthetics.DefaultTheme._

  describe("Heatmap") {
    it("has the right bounds") {
      val plot = Heatmap(Seq(Seq(1), Seq(2)))
      plot.xbounds shouldBe Bounds(0, 1)
      plot.ybounds shouldBe Bounds(0, 2)
    }

    it("should work an empty row") {
      val plot = Heatmap(Seq(Seq.empty))
      val extent = Extent(100, 200)
      plot.render(extent).extent shouldBe extent
    }

    it("should work with no data") {
      val plot = Heatmap(Seq.empty)
      val extent = Extent(100, 200)
      plot.render(extent).extent shouldBe extent
    }
  }
} 
Example 73
Source File: BoxPlotSpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.plot

import com.cibo.evilplot.geometry.Extent
import com.cibo.evilplot.numeric.Bounds
import org.scalatest.{FunSpec, Matchers}

class BoxPlotSpec extends FunSpec with Matchers {

  import com.cibo.evilplot.plot.aesthetics.DefaultTheme._

  describe("BoxPlot") {
    it("should have the right extents") {
      val plot = BoxPlot(Seq(Seq(1.0, 2.0)))
      val extent = Extent(100, 200)
      plot.render(extent).extent shouldBe extent
    }

    it("should have the right bounds") {
      val plot = BoxPlot(Seq(Seq(1.0, 2.0)), boundBuffer = Some(0))
      plot.xbounds shouldBe Bounds(0, 1)
      plot.ybounds shouldBe Bounds(1, 2)
    }

    it("should not explode with no data") {
      val plot = BoxPlot(Seq.empty)
      val extent = Extent(200, 200)
      plot.render(extent).extent shouldBe extent
    }

    it("should not explode when there is no data for a box") {
      val plot = BoxPlot(Seq(Seq.empty))
      val extent = Extent(200, 200)
      plot.render(extent).extent shouldBe extent
    }
  }
} 
Example 74
Source File: MixedBoundsOverlaySpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.plot

import com.cibo.evilplot.geometry.{Drawable, EmptyDrawable, Extent}
import com.cibo.evilplot.numeric.{Bounds, Point}
import com.cibo.evilplot.plot.aesthetics.Theme
import com.cibo.evilplot.plot.renderers.PlotRenderer
import org.scalatest.{FunSpec, Matchers}

class MixedBoundsOverlaySpec extends FunSpec with Matchers {

  import com.cibo.evilplot.plot.aesthetics.DefaultTheme._

  describe("MixedBoundsOverlay") {

    it("it has the bounds that are set for it") {
      val xbounds = Bounds(-2, 2)
      val ybounds = Bounds(100, 200)
      val inner1 = ScatterPlot(Seq(Point(10.0, -1.0)))
      val inner2 = ScatterPlot(Seq(Point(3.0, 20.0)))
      val overlay = MixedBoundsOverlay(xbounds, ybounds, inner1, inner2)
      overlay.xbounds shouldBe xbounds
      overlay.ybounds shouldBe ybounds
    }

    it("occupies the right extents") {
      val inner1 = ScatterPlot(Seq(Point(10.0, -1.0)))
      val inner2 = ScatterPlot(Seq(Point(11.0, 1.0)))
      val overlay = MixedBoundsOverlay(Bounds(0, 1), Bounds(0, 1), inner1, inner2)
      val extent = Extent(600, 400)
      overlay.render(extent).extent shouldBe extent
    }

    it("doesn't update bounds on subplots") {
      var xbounds: Bounds = Bounds(0, 0)
      var ybounds: Bounds = Bounds(0, 0)

      val testRenderer = new PlotRenderer {
        def render(plot: Plot, plotExtent: Extent)(implicit theme: Theme): Drawable = {
          xbounds = plot.xbounds
          ybounds = plot.ybounds
          EmptyDrawable().resize(plotExtent)
        }
      }

      val inner = new Plot(
        xbounds = Bounds(1, 2),
        ybounds = Bounds(3, 4),
        renderer = testRenderer
      )

      val overlay = MixedBoundsOverlay(Bounds(50, 80), Bounds(50, 80), inner)
      val updated = overlay.xbounds(5, 6).ybounds(7, 8)
      updated.render(Extent(100, 200))

      xbounds shouldBe Bounds(1, 2)
      ybounds shouldBe Bounds(3, 4)
    }

    it("should throw an exception with no plots") {
      an[IllegalArgumentException] should be thrownBy MixedBoundsOverlay(Bounds(1, 2), Bounds(1, 2))
    }

  }

} 
Example 75
Source File: PlotSpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.plot

import com.cibo.evilplot.geometry.{Drawable, EmptyDrawable, Extent}
import com.cibo.evilplot.DOMInitializer
import com.cibo.evilplot.numeric.Bounds
import com.cibo.evilplot.plot.aesthetics.Theme
import com.cibo.evilplot.plot.renderers.PlotRenderer
import org.scalatest.{FunSpec, Matchers}

class PlotSpec extends FunSpec with Matchers {

  import com.cibo.evilplot.plot.aesthetics.DefaultTheme._

  DOMInitializer.init()

  // Renderer to do nothing.
  private[evilplot] case object EmptyPlotRenderer extends PlotRenderer {
    def render(plot: Plot, plotExtent: Extent)(implicit theme: Theme): Drawable =
      EmptyDrawable().resize(plotExtent)
  }

  // Renderer to get the plot extent.
  private case class PlotExtentPlotRenderer() extends PlotRenderer {
    var plotExtentOpt: Option[Extent] = None

    def render(plot: Plot, plotExtent: Extent)(implicit theme: Theme): Drawable = {
      plotExtentOpt = Some(plotExtent)
      EmptyDrawable().resize(plotExtent)
    }
  }

  private def newPlot(
    xbounds: Bounds = Bounds(0, 1),
    ybounds: Bounds = Bounds(0, 1),
    renderer: PlotRenderer = EmptyPlotRenderer
  ): Plot = Plot(xbounds, ybounds, renderer)

  it("should have the right extent") {
    val plot = newPlot()
    val extent = Extent(300, 400)
    plot.render(extent).extent shouldBe extent
  }

  it("should render the full plot area") {
    val extent = Extent(10, 20)
    val renderer = PlotExtentPlotRenderer()
    val plot = newPlot(renderer = renderer)
    plot.render(extent).extent shouldBe extent
    renderer.plotExtentOpt shouldBe Some(extent)
  }

  it("text should reduce the size of the plot area") {
    val extent = Extent(100, 200)
    val renderer = PlotExtentPlotRenderer()
    val plot = newPlot(renderer = renderer).title("Test")
    plot.render(extent).extent shouldBe extent
    renderer.plotExtentOpt.get.height should be < extent.height
    renderer.plotExtentOpt.get.width shouldBe extent.width
  }

  it("a background should not affect the size of the plot area") {
    val extent = Extent(300, 200)
    val renderer = PlotExtentPlotRenderer()
    val plot = newPlot(renderer = renderer).background()
    plot.render(extent).extent shouldBe extent
    renderer.plotExtentOpt.get shouldBe extent
  }

  it("xbounds/ybounds without parens should access bounds") {
    val plot = newPlot(xbounds = Bounds(0, 2), ybounds = Bounds(0, 5))

    plot.xbounds shouldBe Bounds(0, 2)
    plot.ybounds shouldBe Bounds(0, 5)
  }

  it("partial xbounds/ybounds update should work") {
    val plot = newPlot(xbounds = Bounds(0, 2), ybounds = Bounds(0, 5))
    val updatedX = plot.xbounds(lower = -5)
    updatedX.xbounds shouldBe Bounds(-5, 2)
    val doubleUpdatedX = updatedX.xbounds(upper = 5)
    doubleUpdatedX.xbounds shouldBe Bounds(-5, 5)

    val updatedY = plot.ybounds(lower = -7)
    updatedY.ybounds shouldBe Bounds(-7, 5)
    val doubleUpdatedY = updatedY.ybounds(upper = 7)
    doubleUpdatedY.ybounds shouldBe Bounds(-7, 7)
  }

} 
Example 76
Source File: TransformUtilsSpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.plot

import com.cibo.evilplot.geometry.Extent
import com.cibo.evilplot.numeric.{Bounds, Point}
import org.scalatest.{FunSpec, Matchers}

class TransformUtilsSpec extends FunSpec with Matchers {

  describe("PlotUtils") {

    it("computes the correct buffer") {
      val zeroTen = PlotUtils.boundsWithBuffer(xs = Seq(0.0, 10.0), 0.0)
      zeroTen shouldEqual Bounds(0.0, 10.0)

      val zeroTenTen = PlotUtils.boundsWithBuffer(xs = Seq(0.0, 10.0), 0.1)
      zeroTenTen shouldEqual Bounds(-1.0, 11.0)

      val negZeroTenTen = PlotUtils.boundsWithBuffer(xs = Seq(0.0, -10.0), buffer = 0.1)
      negZeroTenTen shouldEqual Bounds(-11.0, 1.0)
    }

    it("computes bounds") {
      val points = Seq(Point(0.0, 0.0), Point(10.0, 10.0))

      PlotUtils.bounds(points, 0.1) shouldEqual (Bounds(-1.0, 11.0), Bounds(-1.0, 11.0))
      PlotUtils.bounds(points, 0.0, xboundBuffer = Some(0.1)) shouldEqual (Bounds(-1.0, 11.0), Bounds(
        0,
        10.0))
      PlotUtils.bounds(points, 0.0, yboundBuffer = Some(0.1)) shouldEqual (Bounds(0, 10.0), Bounds(
        -1.0,
        11.0))
    }

  }

  describe("TransformWorldToScreen") {

    val xTransformer =
      TransformWorldToScreen.xCartesianTransformer(Bounds(0, 100), extent = Extent(100, 100))
    val yTransformer =
      TransformWorldToScreen.yCartesianTransformer(Bounds(0, 100), extent = Extent(100, 100))

    it("default x transformer works properly") {

      xTransformer(-100) shouldEqual -100.0 +- 0.000001
      xTransformer(0) shouldEqual 0.0 +- 0.000001
      xTransformer(100) shouldEqual 100.0 +- 0.000001

    }

    it("default y transformer works properly") {

      yTransformer(-100) shouldEqual 200.0 +- 0.000001
      yTransformer(0) shouldEqual 100.0 +- 0.000001
      yTransformer(100) shouldEqual 0.0 +- 0.000001
    }

    it("Transforms to screen correctly") {
      import TransformWorldToScreen._
      val transformer =
        TransformWorldToScreen.yCartesianTransformer(Bounds(0, 10), extent = Extent(100, 100))

      transformDatumToWorld(Point(0.0, 0.0), xTransformer, yTransformer) shouldEqual Point(
        0.0,
        100.0)
    }
  }
} 
Example 77
Source File: PathRendererSpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.plot.renderers

import com.cibo.evilplot.geometry.LineStyle
import org.scalatest.{FunSpec, Matchers}

class PathRendererSpec extends FunSpec with Matchers {
  describe("Legend stroke lengths") {
    import LineStyle._
    import PathRenderer._
    it("should use the default for a solid style") {
      calcLegendStrokeLength(Solid) shouldBe baseLegendStrokeLength
    }

    it("should always return at least the baseLegendStrokeLength") {
      calcLegendStrokeLength(Dotted) shouldBe 9
      calcLegendStrokeLength(evenlySpaced(3)) should be >= baseLegendStrokeLength
      calcLegendStrokeLength(LineStyle(Seq(1, 1))) shouldBe baseLegendStrokeLength
    }

    it("should use at least 4x the pattern length with a single element pattern") {
      calcLegendStrokeLength(evenlySpaced(6)) shouldBe 24
    }

    it("should use a minimum of 2x the pattern length with a regular element pattern") {
      calcLegendStrokeLength(DashDot) shouldBe 26
    }
  }
} 
Example 78
Source File: ContourPlotSpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.plot

import com.cibo.evilplot.geometry.Extent
import com.cibo.evilplot.numeric.{Bounds, Point}
import org.scalatest.{FunSpec, Matchers}

class ContourPlotSpec extends FunSpec with Matchers {

  import com.cibo.evilplot.plot.aesthetics.DefaultTheme._

  describe("ContourPlot") {
    it("it has the right bounds") {
      val plot = ContourPlot(Seq(Point(1, 2), Point(3, 4)), boundBuffer = Some(0.0))
      plot.xbounds shouldBe Bounds(1, 3)
      plot.ybounds shouldBe Bounds(2, 4)
    }

    it("works with no data") {
      val plot = ContourPlot(Seq.empty)
      val extent = Extent(100, 200)
      plot.render(extent).extent shouldBe extent
    }
  }
} 
Example 79
Source File: HistogramSpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.plot

import com.cibo.evilplot.geometry.Extent
import com.cibo.evilplot.numeric.{Bounds, Point}
import org.scalatest.{FunSpec, Matchers}

class HistogramSpec extends FunSpec with Matchers {

  import com.cibo.evilplot.plot.aesthetics.DefaultTheme._

  describe("Histogram") {
    val plot = Histogram(Seq(1.0, 1, 1, 2, 3, 4, 4, 5), boundBuffer = Some(0))

    it("has the right bounds") {
      plot.xbounds shouldBe Bounds(1, 5)
      plot.ybounds shouldBe Bounds(0, 3)
    }

    it("has the right extents") {
      val extent = Extent(300, 400)
      plot.render(extent).extent.width shouldBe extent.width +- 1e-6
      plot.render(extent).extent.height shouldBe extent.height +- 1e-6
    }

    it("works with no data") {
      val extent = Extent(100, 200)
      val emptyPlot = Histogram(Seq.empty)
      emptyPlot.render(extent).extent shouldBe extent
    }
  }

  describe("Binning") {
    val data = Seq[Double](1, 1, 1, 3, 3, 4, 4, 5)

    it("works on a simple example") {
      val bins = Histogram.createBins(data, Bounds(0, 5), 5)
      bins should contain theSameElementsAs Seq(
        Point(0, 0),
        Point(1, 3),
        Point(2, 0),
        Point(3, 2),
        Point(4, 3)
      )
    }

    it("works when asked to normalize") {
      val bins = Histogram.normalize(data, Bounds(0, 5), 5)
      bins.map(_.y) should contain theSameElementsAs Seq(
        0,
        .375,
        0,
        .25,
        .375
      )
      bins.map(_.y).sum shouldBe 1.0 +- 1e-5
    }

    it("works for cumulative binner") {
      val bins = Histogram.cumulative(data, Bounds(0, 5), 5)
      bins should contain theSameElementsAs Seq(
        Point(0, 0),
        Point(1, 3),
        Point(2, 3),
        Point(3, 5),
        Point(4, 8)
      )
    }

    it("works for density binner") {
      val bins = Histogram.density(data, Bounds(0, 5), 5)
      bins.map(_.y) should contain theSameElementsAs Seq(
        0,
        0.375,
        0,
        0.25,
        0.375
      )
    }

    it("works for cumulativeDensity binner") {
      val bins = Histogram.cumulativeDensity(data, Bounds(0, 5), 5)
      bins.map(_.y) should contain theSameElementsAs Seq(
        0,
        0.375,
        0.375,
        0.625,
        1.000
      )
    }
  }
} 
Example 80
Source File: MarkerPlotSpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.plot

import com.cibo.evilplot.demo.DemoPlots.{plotAreaSize, theme}
import com.cibo.evilplot.geometry.{Extent, Rect, Rotate, Style, Text, Wedge}
import com.cibo.evilplot.numeric.{Bounds, Point}
import com.cibo.evilplot.plot.components.{Marker, Position}
import org.scalatest.{FunSpec, Matchers}

class MarkerPlotSpec extends FunSpec with Matchers {

  describe("Marker Plot") {
    it("overlay marker displays correctly") {
      val marker = Marker(Position.Overlay, _ => Rect(25), Extent(25, 25), 0, 0)
      val data = Seq(Point(-1, 10), Point(20, -5))
      val plot =
        ScatterPlot(data, xBoundBuffer = Some(0.1), yBoundBuffer = Some(0.1)).component(marker)

      plot.xbounds.min should be < -1.0
      plot.xbounds.max should be > 20.0
      plot.ybounds.min should be < -5.0
      plot.ybounds.max should be > 10.0
      marker.size.height should be < 26.0
      marker.size.width should be < 26.0
      marker.x shouldBe 0
      marker.y shouldBe 0
    }

  }

} 
Example 81
Source File: AxesSpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.plot.components

import com.cibo.evilplot.numeric.{Bounds, Point}
import com.cibo.evilplot.plot.{Bar, BarChart, ScatterPlot}
import org.scalatest.{FunSpec, Matchers}

class AxesSpec extends FunSpec with Matchers {

  import com.cibo.evilplot.plot.aesthetics.DefaultTheme._

  describe("discrete X") {
    it("should set the default bounds") {
      val plot = BarChart(Seq(3.0, 4)).xAxis()
      plot.xbounds shouldBe Bounds(0, 2)
    }

    it("should set bounds for labels") {
      val plot = BarChart(Seq(3.0, 4)).xAxis(Seq("one", "two"))
      plot.xbounds shouldBe Bounds(0, 2)
    }

    it("should set bounds for more labels") {
      val plot = BarChart(Seq(3.0, 4)).xAxis(Seq("one", "two", "three"))
      plot.xbounds shouldBe Bounds(0, 3)
    }

    it("should set bounds for fewer labels") {
      val plot = BarChart(Seq(3.0, 4)).xAxis(Seq("one"))
      plot.xbounds shouldBe Bounds(0, 1)
    }
  }

  describe("continuous X") {
    it("should set reasonable default bounds") {
      val plot =
        ScatterPlot(Seq(Point(3, 4), Point(5, 6)), xBoundBuffer = Some(0), yBoundBuffer = Some(0))
          .xAxis()
      plot.xbounds shouldBe Bounds(3, 5)
    }

    it("should not update the bounds multiple times") {
      val plot = ScatterPlot(
        Seq(Point(0, 0), Point(1.007, 2)),
        xBoundBuffer = Some(0),
        yBoundBuffer = Some(0))
        .xbounds(0, 1.1)
        .xAxis()
      plot.xbounds.min shouldBe 0.0 +- 1e-6
      plot.xbounds.max shouldBe 1.1 +- 1e-6
    }
  }

  describe("continuous Y") {
    it("should set reasonable default bounds") {
      val plot =
        ScatterPlot(Seq(Point(3, 4), Point(5, 6)), xBoundBuffer = Some(0), yBoundBuffer = Some(0))
          .yAxis()
      plot.ybounds shouldBe Bounds(4, 6)
    }
  }
} 
Example 82
Source File: KernelDensityEstimationSpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.numeric

import com.cibo.evilplot.numeric.KernelDensityEstimation._
import org.scalatest.{FunSpec, Matchers}

import scala.util.Random.nextDouble

class KernelDensityEstimationSpec extends FunSpec with Matchers {
  describe("KernelDensityEstimation") {
    it("should properly calculate probability densities from the normal distribution") {
      val doubles: Seq[Double] = Seq(0.383738345, 0.678363183, 0.870892648, 0.955542032,
        0.739779717, 0.495273777, 0.346604271, 0.971385358, 0.998761496, 0.222603808, 0.370077565,
        0.081424898, 0.775284522, 0.005343148, 0.470091059, 0.510200712, 0.361834899, 0.259037336,
        0.806185498, 0.337947191)

      val densities: Seq[Double] = Seq(0.3706244, 0.3169451, 0.2730322, 0.2527211, 0.3034387,
        0.3528943, 0.3756844, 0.2488927, 0.2422704, 0.3891794, 0.3725376, 0.3976220, 0.2953862,
        0.3989366, 0.3572100, 0.3502560, 0.3736631, 0.3857797, 0.2882561, 0.3767993)

      (doubles zip densities).foreach {
        case (x, d) =>
          probabilityDensityInNormal(x) shouldEqual d +- 1e-5
      }
    }
  }

  describe("outer product calculation") {
    it("should calculate correctly") {
      val xs = Vector(-.3015008, 0.6520850)
      val ys = Vector(-.3033709, 0.6459446, 1.7718656)

      val outer =
        Array(Array(0.09146657, -0.1947528, -0.5342189), Array(-0.19782361, 0.4212108, 1.1554070))
      (outerProduct(xs.toArray, ys.toArray).flatten.toSeq zip outer.flatten.toSeq) foreach {
        case (calculated, actual) => calculated shouldBe actual +- 1e-6
      }
    }
  }

  describe("bandwidth estimation") {
    it("should always be non-negative") { // no ScalaCheck in scala.js ?
      (0 until 10) map (_ => Vector.fill(10)(nextDouble)) foreach (bandwidthEstimate(_) should be >= 0.0)
    }

    it("should be calculated properly for some sample vectors") {
      val xs = Vector(-1.06575970, 0.42420074, 0.02938372, 2.04974410, -1.63546604, 0.27436596,
        -0.90455302, 0.86564478, 1.68234299, 0.19371170)
      val ys = Vector(-0.7695360, 0.5401861, 0.3025197, 1.8889234, 1.1587218, -0.6744424, 0.9437049)

      bandwidthEstimate(xs) shouldBe 2.847659 +- 1e-6
      bandwidthEstimate(ys) shouldBe 2.652604 +- 1e-6
    }

    it("should reutrn NaN on a one element vector") {
      val xs = Vector(-1.045696)
      bandwidthEstimate(xs).isNaN shouldBe true
    }

    it("should return NaN on an empty vector") {
      bandwidthEstimate(Vector[Double]()).isNaN shouldBe true
    }
  }
} 
Example 83
Source File: BoxPlotSummaryStatisticsSpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.numeric

import org.scalatest.{FunSpec, Matchers}

import scala.util.Random

class BoxPlotSummaryStatisticsSpec extends FunSpec with Matchers {
  val tol = 1e-8
  val data = List(-2541.335733882479, 1577.0315624249806, -808.0673232141799, 680.9128930911302,
    -2445.2589645401004, -7.260674159999326, -1762.1261882364997, -776.52236318016,
    -3198.781083548529, 517.4382306836906, -1982.1566564704299, -1700.7419477605)
  describe("BoxPlotSummaryStatistics") {
    it("should correctly calculate quartiles using linear interpolation between values") {
      val boxPlot = BoxPlotSummaryStatistics(data)
      // NumPy on this list: [ np.percentile(data, x) for x in xrange(25, 100, 25) ] ==
      val (first, second, third) = (-2097.9322334878475, -1254.4046354873399, 123.91405205092315)
      // low tolerance because above data is only to hundredths place
      boxPlot.lowerQuantile shouldEqual first +- tol
      boxPlot.middleQuantile shouldEqual second +- tol
      boxPlot.upperQuantile shouldEqual third +- tol
    }

    it("should give the maximum when asked for the 1.0 quantile") {
      val boxPlot = BoxPlotSummaryStatistics(data, quantiles = (0.0, 0.5, 1.0))
      boxPlot.upperQuantile shouldEqual data.max +- tol
    }

    it(
      "0.5 quantile w linear interpolation should give the same answer as median (even number of elements in list)") {
      val medianData: Seq[Double] = Seq.fill(50)(Random.nextDouble())
      val sorted = medianData.sorted
      val median = (sorted(24) + sorted(25)) / 2.0
      val boxPlot = BoxPlotSummaryStatistics(medianData)
      boxPlot.middleQuantile shouldEqual median +- tol
    }

    it(
      "0.5 quantile w linear interpolation should give the same answer as median (odd number of elements in list)") {
      val medianData: Seq[Double] = Seq.fill(49)(Random.nextDouble())
      val sorted = medianData.sorted
      val median = sorted(24)
      val boxPlot = BoxPlotSummaryStatistics(medianData)
      boxPlot.middleQuantile shouldEqual median +- tol
    }

    it("correctly classifies as outliers elements outside lowerQ - 1.5*IQR < x < upperQ + 1.5*IQR") {
      val temperatureData = Seq(94.371, 94.304, 94.216, 94.130, 94.050, 93.961, 93.840, 93.666,
        93.430, 93.141, 92.824, 92.515, 92.249, 92.048, 91.920, 91.853, 91.824, 91.810, 91.788,
        91.747, 91.685, 91.612, 91.547, 91.511, 91.520, 91.585, 91.710, 91.015, 91.898, 92.146,
        92.451, 92.800, 93.178, 93.573, 93.972, 94.360, 94.717, 95.010, 95.211, 95.295, 95.261,
        95.127, 94.932, 94.729, 94.565, 94.465, 94.429, 94.440, 94.478, 94.538, 94.632, 94.775,
        94.973, 95.202, 95.416, 95.561, 95.592, 95.490, 95.263, 94.945, 94.590, 94.258, 94.003,
        93.866, 93.868, 94.015, 94.296, 94.677, 95.107, 95.520, 95.853, 96.058, 96.119, 96.053,
        98.032, 95.906, 95.741, 95.616, 95.566, 95.591, 95.668, 95.756, 95.817, 95.824, 95.759,
        95.623, 95.432, 95.214, 95.002, 94.819, 94.675, 94.573, 94.514, 94.507, 94.562, 94.682,
        94.858, 95.067, 95.278, 95.463, 95.598, 95.664)
      val outliers = Seq(91.015, 98.032)
      val boxPlot = BoxPlotSummaryStatistics(temperatureData)
      boxPlot.outliers.length shouldEqual outliers.length
      (boxPlot.outliers zip outliers).foreach {
        case (computed, actual) => computed shouldEqual actual +- tol
      }
    }
  }
} 
Example 84
Source File: ColoringSpec.scala    From evilplot   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.evilplot.colors

import com.cibo.evilplot.geometry.PointEquivalences
import com.cibo.evilplot.plot.aesthetics.DefaultTheme.{DefaultElements, DefaultFonts}
import com.cibo.evilplot.plot.aesthetics.{Colors, Elements, Fonts, Theme}
import org.scalatest.{FunSpec, Matchers}

class ColoringSpec extends FunSpec with Matchers {
  describe("multi color gradient construction") {
    import com.cibo.evilplot.plot.aesthetics.DefaultTheme._
    it("should return a function when Colors has only one element") {
      val min: Double = 0
      val max: Double = 100
      val coloring =
        GradientUtils.multiGradient(Seq(HTMLNamedColors.blue), min, max, GradientMode.Linear)
      Range.BigDecimal(min, max, 1.0)
        .map(_.toDouble)
        .foreach(datum => coloring(datum) shouldBe HTMLNamedColors.blue)
    }

    it("should throw an exception when Colors is empty") {
      an[IllegalArgumentException] shouldBe thrownBy(
        GradientUtils.multiGradient(Seq(), 0, 100, GradientMode.Linear))
    }

    it("should build multistop gradients") {
      import HTMLNamedColors.{red, yellow, green}
      val min = 0
      val max = 2
      val colors = Seq(red, yellow, green)
      val gradient = GradientUtils.multiGradient(colors, min, max, GradientMode.Linear)
      gradient(min) should ===(colors.head)
      gradient(1) should ===(colors(1))
      gradient(max) should ===(colors(2))
    }

    it("should return a function that works between min and max") {
      val data: Seq[Double] = Seq(0, 5, 20, 40, 70, 100)
      val gradient = ContinuousColoring.gradient(HTMLNamedColors.red, HTMLNamedColors.blue)

      val coloring = gradient(data)
      data.foreach(d => noException shouldBe thrownBy(coloring(d)))
    }

    it("should behave properly when asked to render past the edge") {
      val gradient = ContinuousColoring.gradient(HTMLNamedColors.red, HTMLNamedColors.blue)
      val coloring = gradient(Seq(1.0, 5.0))
      coloring(1.0) shouldBe HTMLNamedColors.red
      coloring(5.0) shouldBe HTMLNamedColors.blue
      coloring(6.0) shouldBe HTMLNamedColors.blue
    }
  }
  describe("coloring from the theme") {
    import com.cibo.evilplot.plot.aesthetics.DefaultTheme.{DefaultColors => AesColors}
    implicit val overriddenTheme: Theme = Theme(
      fonts = DefaultFonts,
      elements = DefaultElements,
      colors = AesColors.copy(stream = Seq(HTMLNamedColors.red))
    )
    it("should fail to color when the theme doesn't have enough colors") {
      val data = 0 to 5
      an[IllegalArgumentException] shouldBe thrownBy(CategoricalColoring.themed[Int].apply(data))
    }
  }
  describe("making a coloring out of a custom mapping") {
    import com.cibo.evilplot.plot.aesthetics.DefaultTheme._
    it("should actually use the mapping") {
      val f = (s: String) => if (s == "hello") HTMLNamedColors.blue else HTMLNamedColors.red
      val coloring = CategoricalColoring.fromFunction(Seq("hello", "world"), f)
      val extractedFunc = coloring(Seq("hello", "world"))
      extractedFunc("hello") shouldBe HTMLNamedColors.blue
      extractedFunc("world") shouldBe HTMLNamedColors.red
    }
  }
} 
Example 85
Source File: HelloAkkaSpec.scala    From akka-nbench   with Apache License 2.0 5 votes vote down vote up
import org.scalatest.{ BeforeAndAfterAll, FlatSpecLike, Matchers }
import akka.actor.{ Actor, Props, ActorSystem }
import akka.testkit.{ ImplicitSender, TestKit, TestActorRef }
import scala.concurrent.duration._

class HelloAkkaSpec(_system: ActorSystem)
  extends TestKit(_system)
  with ImplicitSender
  with Matchers
  with FlatSpecLike
  with BeforeAndAfterAll {

  def this() = this(ActorSystem("HelloAkkaSpec"))

  override def afterAll: Unit = {
    system.shutdown()
    system.awaitTermination(10.seconds)
  }

  "An HelloAkkaActor" should "be able to set a new greeting" in {
    val greeter = TestActorRef(Props[Greeter])
    greeter ! WhoToGreet("testkit")
    greeter.underlyingActor.asInstanceOf[Greeter].greeting should be("hello, testkit")
  }

  it should "be able to get a new greeting" in {
    val greeter = system.actorOf(Props[Greeter], "greeter")
    greeter ! WhoToGreet("testkit")
    greeter ! Greet
    expectMsgType[Greeting].message.toString should be("hello, testkit")
  }
} 
Example 86
Source File: ModeTest.scala    From kafka-jdbc-connector   with Apache License 2.0 5 votes vote down vote up
package com.agoda.kafka.connector.jdbc.models

import com.agoda.kafka.connector.jdbc.models.Mode.{IncrementingMode, TimestampIncrementingMode, TimestampMode}
import org.scalatest.{Matchers, WordSpec}

class ModeTest extends WordSpec with Matchers {

  "module" should {
    "convert Mode to its string representation" in {
      Mode.TimestampMode.entryName shouldEqual "timestamp"
      Mode.IncrementingMode.entryName shouldEqual "incrementing"
      Mode.TimestampIncrementingMode.entryName shouldEqual "timestamp+incrementing"
    }

    "convert string to corresponding Mode representation" in {
      Mode.withName("timestamp") shouldBe TimestampMode
      Mode.withName("incrementing") shouldBe IncrementingMode
      Mode.withName("timestamp+incrementing") shouldBe TimestampIncrementingMode
    }
  }
} 
Example 87
Source File: DatabaseProductTest.scala    From kafka-jdbc-connector   with Apache License 2.0 5 votes vote down vote up
package com.agoda.kafka.connector.jdbc.models

import com.agoda.kafka.connector.jdbc.models.DatabaseProduct.{MsSQL, MySQL}
import org.scalatest.{Matchers, WordSpec}

class DatabaseProductTest extends WordSpec with Matchers  {

  "module" should {
    "convert DatabaseProduct to its string representation" in {
      DatabaseProduct.MySQL.entryName shouldEqual "MySQL"
      DatabaseProduct.MsSQL.entryName shouldEqual "Microsoft SQL Server"
    }

    "convert string to corresponding DatabaseProduct representation" in {
      DatabaseProduct.withName("MySQL") shouldBe MySQL
      DatabaseProduct.withName("Microsoft SQL Server") shouldBe MsSQL
    }
  }
} 
Example 88
Source File: DataServiceTest.scala    From kafka-jdbc-connector   with Apache License 2.0 5 votes vote down vote up
package com.agoda.kafka.connector.jdbc.services

import java.sql.{Connection, PreparedStatement, ResultSet, ResultSetMetaData}

import com.agoda.kafka.connector.jdbc.utils.DataConverter
import org.apache.kafka.connect.data.Schema
import org.apache.kafka.connect.source.SourceRecord
import org.scalatest.mockito.MockitoSugar
import org.mockito.Mockito._
import org.scalatest.{Matchers, WordSpec}

import scala.concurrent.duration._
import scala.util.Success

class DataServiceTest extends WordSpec with Matchers with MockitoSugar {

  "Data Service" should {

    val spName = "stored-procedure"
    val connection = mock[Connection]
    val converter = mock[DataConverter]
    val sourceRecord1 = mock[SourceRecord]
    val sourceRecord2 = mock[SourceRecord]
    val resultSet = mock[ResultSet]
    val resultSetMetadata = mock[ResultSetMetaData]
    val preparedStatement = mock[PreparedStatement]
    val schema = mock[Schema]

    val dataService = new DataService {

      override def storedProcedureName: String = spName

      override protected def createPreparedStatement(connection: Connection) = Success(preparedStatement)

      override protected def extractRecords(resultSet: ResultSet, schema: Schema) = Success(Seq(sourceRecord1, sourceRecord2))

      override def dataConverter: DataConverter = converter
    }

    "get records" in {
      doNothing().when(preparedStatement).setQueryTimeout(1)
      when(preparedStatement.executeQuery).thenReturn(resultSet)
      when(resultSet.getMetaData).thenReturn(resultSetMetadata)
      when(converter.convertSchema(spName, resultSetMetadata)).thenReturn(Success(schema))

      dataService.getRecords(connection, 1.second) shouldBe Success(Seq(sourceRecord1, sourceRecord2))

      verify(preparedStatement).setQueryTimeout(1)
      verify(preparedStatement).executeQuery
      verify(resultSet).getMetaData
      verify(converter).convertSchema(spName, resultSetMetadata)
    }
  }
} 
Example 89
Source File: WordCountTestableSpec.scala    From kafka-streams   with Apache License 2.0 5 votes vote down vote up
package com.supergloo.examples

import com.supergloo.WordCountTestable
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.apache.kafka.streams.TopologyTestDriver
import org.apache.kafka.streams.state.KeyValueStore
import org.apache.kafka.streams.test.ConsumerRecordFactory
import org.scalatest.{FlatSpec, Matchers}

class WordCountTestableSpec extends FlatSpec with Matchers with KafkaTestSetup {

  val wordCountApplication = new WordCountTestable

  "Convert streaming data into lowercase and publish into output topic" should "push lower text to kafka" in {
    val driver = new TopologyTestDriver(wordCountApplication.toLowerCaseStream("input-topic", "output-topic"), config)
    val recordFactory = new ConsumerRecordFactory("input-topic", new StringSerializer(), new StringSerializer())
    val words = "Hello, WORLDY, World worlD Test"
    driver.pipeInput(recordFactory.create(words))
    val record: ProducerRecord[String, String] = driver.readOutput("output-topic", new StringDeserializer(), new StringDeserializer())
    record.value() shouldBe words.toLowerCase
    driver.close()
  }

  "WordCountTestable" should "count number of words" in {
    val driver = new TopologyTestDriver(wordCountApplication.countNumberOfWords("input-topic", "output-topic", "counts-store"), config)
    val recordFactory = new ConsumerRecordFactory("input-topic", new StringSerializer(), new StringSerializer())
    val words = "Hello Kafka Streams, All streams lead to Kafka"
    driver.pipeInput(recordFactory.create(words))
    val store: KeyValueStore[String, java.lang.Long] = driver.getKeyValueStore("counts-store")
    store.get("hello") shouldBe 1
    store.get("kafka") shouldBe 2
    store.get("streams") shouldBe 2
    store.get("lead") shouldBe 1
    store.get("to") shouldBe 1
    driver.close()

  }

} 
Example 90
Source File: TemplateLookupTest_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model

import com.monsanto.arch.cloudformation.model.resource._
import org.scalatest.FunSpec
import org.scalatest.Matchers

class TemplateLookupTest_UT extends FunSpec with Matchers {
  describe("CrazyLookup") {
    it("Should lookup resources with the correct type") {
      val expected = `AWS::EC2::VPC`(
        name = "TestVPC",
        CidrBlock = CidrBlock(0,0,0,0,0),
        Tags = Seq.empty[AmazonTag]
      )

      val template = Template.fromResource(expected)

      assert(expected === template.lookupResource[`AWS::EC2::VPC`]("TestVPC"))
    }

    it("Should throw exception when given the wrong type") {
      val expected = `AWS::EC2::VPC`(
        name = "TestVPC",
        CidrBlock = CidrBlock(0,0,0,0,0),
        Tags = Seq.empty[AmazonTag]
      )

      val template = Template.fromResource(expected)

      intercept[ClassCastException] {
        template.lookupResource[`AWS::EC2::Subnet`]("TestVPC")
      }
    }

    it("Should throw exception when resources is empty") {
      val template = Template.EMPTY

      intercept[RuntimeException] {
        template.lookupResource[`AWS::EC2::Subnet`]("TestVPC")
      }
    }

    it("Should throw exception when resource doesn't exist") {
      val otherThing = `AWS::EC2::VPC`(
        name = "TestVPC",
        CidrBlock = CidrBlock(0,0,0,0,0),
        Tags = Seq.empty[AmazonTag]
      )

      val template = Template.fromResource(otherThing)

      intercept[RuntimeException] {
        template.lookupResource[`AWS::EC2::VPC`]("NoVPC")
      }
    }

    it("Should throw exception when multiple resources of same name") {
      val expected = `AWS::EC2::VPC`(
        name = "TestVPC",
        CidrBlock = CidrBlock(0,0,0,0,0),
        Tags = Seq.empty[AmazonTag]
      )

      val template = Template.fromResource(expected) ++ expected

      intercept[RuntimeException] {
        template.lookupResource[`AWS::EC2::Subnet`]("TestVPC")
      }
    }
  }
} 
Example 91
Source File: HasTemplateSpec.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model

import org.scalatest.{FunSpec, Matchers}

class HasTemplateSpec extends FunSpec with Matchers {

  it("should concat two instances of HasTemplate") {
    object template1 extends HasTemplate {
      val param1 = StringParameter("test1")
      override def template: Template = Template.EMPTY ++ param1
    }
    object template2 extends TemplateBase {
      val param = StringParameter("test2")
    }

    val template = (template1 ++ template2).template
    template.Parameters.get.contains(template1.param1) should be(true)
    template.Parameters.get.contains(template2.param) should be(true)
  }

  it("should concat HasTemplate with Template") {
    object hasTemplate1 extends HasTemplate {
      val param1 = StringParameter("test1")
      override def template: Template = Template.EMPTY ++ param1
    }
    object hasTemplate2 extends TemplateBase {
      val param = StringParameter("test2")
    }

    val template = (hasTemplate1 ++ hasTemplate2.template).template
    template.Parameters.get.contains(hasTemplate1.param1) should be(true)
    template.Parameters.get.contains(hasTemplate2.param) should be(true)
  }

  it("should concat Template with HasTemplate") {
    object hasTemplate1 extends HasTemplate {
      val param1 = StringParameter("test1")
      override def template: Template = Template.EMPTY ++ param1
    }
    object hasTemplate2 extends TemplateBase {
      val param = StringParameter("test2")
    }

    val template : Template = hasTemplate1.template ++ hasTemplate2
    template.Parameters.get.contains(hasTemplate1.param1) should be(true)
    template.Parameters.get.contains(hasTemplate2.param) should be(true)
  }
} 
Example 92
Source File: FnSplit_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model

import com.monsanto.arch.cloudformation.model.resource.LambdaVpcConfig
import org.scalatest.{FunSpec, Matchers}
import spray.json._

class FnSplit_UT extends FunSpec with Matchers {

  import Token._

  describe("Fn::Split"){

    it("Should serialize correctly with simple string arguments") {
      val split: TokenSeq[String] = `Fn::Split`(",", "one,two")

      val expected = JsObject(
        "Fn::Split" → JsArray(
          JsString(","),
          JsString("one,two")
        )
      )

      split.toJson should be(expected)
    }

    it("Should serialize correctly with complex argument types") {
      val split: TokenSeq[String] = `Fn::Split`(",", `Fn::ImportValue`("importKey"))

      val expected = JsObject(
        "Fn::Split" → JsArray(
          JsString(","),
          JsObject(
            "Fn::ImportValue" → JsString("importKey")
          )
        )
      )

      split.toJson should be(expected)
    }

    it("Should serialize correctly when used inside a resource") {
      val resource = LambdaVpcConfig(Seq("sg-groupid"), `Fn::Split`(",", `Fn::ImportValue`("importKey")))

      val expected = JsObject(
        "SecurityGroupIds" → JsArray(JsString("sg-groupid")),
        "SubnetIds" → JsObject(
          "Fn::Split" → JsArray(
            JsString(","),
            JsObject(
              "Fn::ImportValue" → JsString("importKey")
            )
          )
        )
      )

      resource.toJson should be(expected)
    }

    it("Should implicitly convert inside a seq") {
      val resource: TokenSeq[String] = Seq("test")

      val expected = JsArray(JsString("test"))

      resource.toJson should be(expected)
    }

    it("Should implicitly convert inside a seq with a function call") {
      val resource: TokenSeq[String] = Seq(`Fn::Join`(",", Seq("test")))

      val expected = JsArray(JsObject(
        "Fn::Join" → JsArray(JsString(","), JsArray(JsString("test")))
      ))

      resource.toJson should be(expected)
    }
  }
} 
Example 93
Source File: AwsTokenSpec.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model

import com.monsanto.arch.cloudformation.model.AwsStringInterpolation.Zipper
import org.scalatest.{Matchers, FunSpec}

class AwsTokenSpec extends FunSpec with Matchers {

  it("should generate simple string if no expressions") {
    val fun = aws"test"

    fun shouldEqual StringToken("test")
  }

  it("should generate simple string if no only string substitutions") {
    val lost : Token[String] = "lost"
    val world : Token[String] = "world"
    val fun = aws"hello$lost$world"

    fun shouldEqual StringToken("hellolostworld")
  }

  it("should join ParameterRef tokens") {
    val param = ParameterRef(StringParameter("that"))
    val fun = aws"test$param"

    fun shouldEqual FunctionCallToken(`Fn::Join`("", Seq(
      StringToken("test"),
      param
    )))
  }

  it("should join Parameter") {
    val param = StringParameter("that")
    val fun = aws"test$param"

    fun shouldEqual FunctionCallToken(`Fn::Join`("", Seq(
      StringToken("test"),
      ParameterRef(param)
    )))
  }

  it("should join multiple Parameters") {
    val param1 = StringParameter("that")
    val param2 = StringParameter("this")
    val param3 = StringParameter("these")
    val fun = aws"test$param1${param2}hello$param3"

    fun shouldEqual FunctionCallToken(`Fn::Join`("", Seq(
      StringToken("test"),
      ParameterRef(param1),
      ParameterRef(param2),
      StringToken("hello"),
      ParameterRef(param3)
    )))
  }

  it("should join Fn::GetAtt ref tokens") {
    val getAtt = `Fn::GetAtt`(Seq("that"))
    val fun = aws"test${getAtt}something"

    fun shouldEqual FunctionCallToken(`Fn::Join`("", Seq(
      StringToken("test"),
      getAtt,
      StringToken("something")
    )))
  }
  it("should join tokens") {
    val getAtt : Token[String] = `Fn::GetAtt`(Seq("that"))
    val fun = aws"test${getAtt}something"

    fun shouldEqual FunctionCallToken(`Fn::Join`("", Seq(
      StringToken("test"),
      getAtt,
      StringToken("something")
    )))
  }

  it("should optimize join tokens") {
    val getAtt : Token[String] = `Fn::GetAtt`(Seq("that"))
    val test1 = "test1"
    val test2 = "test2"
    val fun = aws"test$getAtt${test1}something$test2"

    fun shouldEqual FunctionCallToken(`Fn::Join`("", Seq(
      StringToken("test"),
      getAtt,
      StringToken(s"${test1}something$test2")
    )))
  }

  describe("zipper") {
    it("should combine unevent lists") {
      val zipper = Zipper(Seq("a", "b", "c"), Seq("d", "e"), Seq("f"))
      zipper.toSeq shouldEqual Seq(
        "a", "d", "f", "b", "e", "c"
      )
    }
  }
} 
Example 94
Source File: TemplateBaseSpec.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model
import scala.language.reflectiveCalls
import com.monsanto.arch.cloudformation.model.resource.`AWS::SQS::Queue`
import org.scalatest.{FunSpec, Matchers}

class TemplateBaseSpec extends FunSpec with Matchers {

  it("should find components of templates") {
    object MyTemplate extends TemplateBase {
      val param1 = StringParameter("test1", "desc1")
      def resource1 = `AWS::SQS::Queue`(
        name = "resource1",
        QueueName = "test1",
        DelaySeconds = 5,
        MessageRetentionPeriod = 2,
        ReceiveMessageWaitTimeSeconds = 9,
        VisibilityTimeout = 4
      )
      lazy val out1 = Output(name = "out1", Description = "desc", Value = `AWS::AccountId`)
    }

    MyTemplate.template.Outputs.toSeq.flatten should contain(MyTemplate.out1)
    MyTemplate.template.Parameters.toSeq.flatten should contain(MyTemplate.param1)
    MyTemplate.template.Resources should contain(MyTemplate.resource1)
  }

  it("should find instances of HasTemplate") {
    object MyTemplate extends TemplateBase {

      lazy val anotherTemplate = new TemplateBase {
        def resource1 = `AWS::SQS::Queue`(
          name = "resource1",
          QueueName = "test1",
          DelaySeconds = 5,
          MessageRetentionPeriod = 2,
          ReceiveMessageWaitTimeSeconds = 9,
          VisibilityTimeout = 4
        )
      }

      lazy val anotherTemplate2 = new TemplateBase {
        def resource = `AWS::SQS::Queue`(
          name = "resource2",
          QueueName = "test2",
          DelaySeconds = 5,
          MessageRetentionPeriod = 2,
          ReceiveMessageWaitTimeSeconds = 9,
          VisibilityTimeout = 4
        )
      }
    }

    MyTemplate.template.Resources should contain(MyTemplate.anotherTemplate.resource1)
    MyTemplate.template.Resources should contain(MyTemplate.anotherTemplate2.resource)
  }

  it("should find instances of Template") {
    val queue = `AWS::SQS::Queue`(
      name = "resource1",
      QueueName = "test1",
      DelaySeconds = 5,
      MessageRetentionPeriod = 2,
      ReceiveMessageWaitTimeSeconds = 9,
      VisibilityTimeout = 4
    )
    object MyTemplate extends TemplateBase {

      lazy val anotherTemplate = Template.EMPTY ++ queue

    }

    MyTemplate.template.Resources should contain(queue)
  }

} 
Example 95
Source File: CodeCommit_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model.resource

import com.monsanto.arch.cloudformation.model.{ ResourceRef, Template, Token }
import org.scalatest.{ FunSpec, Matchers }
import spray.json._

class CodeCommit_UT extends FunSpec with Matchers {
  val repo = `AWS::CodeCommit::Repository`(
    name = "RepoFoo",
    RepositoryDescription = Some(""),
    RepositoryName = "RepoBar",
    Triggers = Some(Seq(
      CodeCommitTrigger(
        Branches = Some(Seq("foo")),
        CustomData = Some("bar"),
        DestinationArn = Some("arn::::baz"),
        Events = Some(Seq(
          CodeCommitEvent.updateReference,
          CodeCommitEvent.deleteReference
        )),
        Name = "BarTrigger"
      )
    ))
  )

  describe("UsagePlan"){
    it ("should serialize as expected") {
      val expectedJson =
        """
          |{
          |  "Resources": {
          |    "RepoFoo": {
          |      "Properties": {
          |        "RepositoryDescription": "",
          |        "RepositoryName": "RepoBar",
          |        "Triggers": [
          |          {
          |            "Branches": [
          |              "foo"
          |            ],
          |            "CustomData": "bar",
          |            "DestinationArn": "arn::::baz",
          |            "Events": [
          |              "updateReference",
          |              "deleteReference"
          |            ],
          |            "Name": "BarTrigger"
          |          }
          |        ]
          |      },
          |      "Type": "AWS::CodeCommit::Repository"
          |    }
          |  }
          |}
        """.stripMargin.parseJson
      Template.fromResource(repo).toJson should be (expectedJson)
    }
  }
} 
Example 96
Source File: EmrSpec.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model.resource

import org.scalatest.{FunSpec, Matchers}
import spray.json.{JsArray, JsObject, JsString, JsonWriter}

class EmrSpec extends FunSpec with Matchers {

  describe("ClusterConfiguration") {
    it("should write non recursive") {
      val clusterConfiguration = ClusterConfiguration(
        Classification = Some("hello"),
        ConfigurationProperties = Some(Map("hello" -> "world")),
        Configurations = None
      )
      val json = implicitly[JsonWriter[ClusterConfiguration]].write(clusterConfiguration)
      json should equal(JsObject(Map(
        "Classification" -> JsString("hello"),
        "ConfigurationProperties" -> JsObject(
          "hello" -> JsString("world")
        )
      )))
    }

    it("should write and read recursive") {
      val clusterConfiguration = ClusterConfiguration(
        Classification = Some("hello"),
        ConfigurationProperties = Some(Map("hello" -> "world")),
        Configurations = Some(Seq(
          ClusterConfiguration(
            Classification = Some("hello1"),
            ConfigurationProperties = Some(Map("hello2" -> "world3")),
            Configurations = None
          )
        ))
      )
      val json = implicitly[JsonWriter[ClusterConfiguration]].write(clusterConfiguration)
      json should equal(JsObject(Map(
        "Classification" -> JsString("hello"),
        "ConfigurationProperties" -> JsObject(
          "hello" -> JsString("world")
        ),
        "Configurations" -> JsArray(
          JsObject(Map(
            "Classification" -> JsString("hello1"),
            "ConfigurationProperties" -> JsObject(
              "hello2" -> JsString("world3")
            )
          ))
        )
      )))
    }
  }

} 
Example 97
Source File: Events_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model.resource

import com.monsanto.arch.cloudformation.model.{ Token, `Fn::Sub` }
import org.scalatest.{ FunSpec, Matchers }
import spray.json._

class Events_UT extends FunSpec with Matchers {
  describe("RuleTarget") {
    it("Should serialize") {
      val t = RuleTarget(
        Arn = "arn",
        Id = "id",
        Input = Some(JsObject(
          "a" -> JsNumber(5),
          "b" -> JsBoolean(false)
        ).compactPrint))
      t.toJson.compactPrint shouldEqual
        raw"""{"Arn":"arn","Id":"id","Input":"{\"a\":5,\"b\":false}"}"""
    }

    it("Should serialize sub") {
      val sub: Token[String] =
        `Fn::Sub`(
          JsObject(
            "a" -> JsString(raw"$${AWS::Region}"),
            "b" -> JsString(raw"$${FOO}")
          ).compactPrint,
          Some(Map("FOO" -> "BAR"))
        )
      val t = RuleTarget(
        Arn = "arn",
        Id = "id",
        Input = Some(sub)
      )
      t.toJson.compactPrint shouldEqual
        raw"""{"Arn":"arn","Id":"id","Input":{"Fn::Sub":["{\"a\":\"$${AWS::Region}\",\"b\":\"$${FOO}\"}",{"FOO":"BAR"}]}}"""
    }
  }
} 
Example 98
Source File: Kinesis_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model.resource

import com.monsanto.arch.cloudformation.model._
import com.monsanto.arch.cloudformation.model.resource.S3VersioningStatus.Enabled
import org.scalatest.{FunSpec, Matchers}
import spray.json._

class Kinesis_UT extends FunSpec with Matchers {
  describe("Stream") {
    val streamName = "stream"
    val shardCount = 1
    val retentionPeriodHours = 5
    val stream = `AWS::Kinesis::Stream`(
      name = streamName,
      Name = Some("Foo"),
      RetentionPeriodHours = Some(retentionPeriodHours),
      ShardCount = shardCount,
      Tags = Seq(AmazonTag("Name", streamName))
    )

    it("should write a valid Kinesis stream") {
      stream.toJson shouldEqual JsObject(Map(
        "name" -> JsString("stream"),
        "Name" -> JsString("Foo"),
        "RetentionPeriodHours" -> JsNumber(5),
        "ShardCount" -> JsNumber(1),
        "Tags" -> JsArray(JsObject(Map("Key" -> JsString("Name"), "Value" -> JsString("stream"))))
      ))
    }

    it("should have properly set public fields") {
      stream.name shouldEqual streamName
      stream.ShardCount shouldEqual IntToken(shardCount)
      stream.RetentionPeriodHours foreach (_ shouldEqual IntToken(retentionPeriodHours))
      stream.Tags.get shouldEqual Seq(AmazonTag("Name", streamName))
    }
  }

  describe("FirehoseDeliveryStream") {
    it("should create a plausible S3 firehose stream config") {
      val bucket = `AWS::S3::Bucket`("s3bucket", None,
        VersioningConfiguration = Some(S3VersioningConfiguration(Enabled)))
      val deliveryRole = `AWS::IAM::Role`("deliveryRole",
        PolicyDocument(Seq(PolicyStatement(
          "Allow",
          Some(DefinedPrincipal(Map("Service" -> Token.fromString("firehose.amazonaws.com")))),
          Seq("sts:AssumeRole"),
          Sid = Some(" "),
          Condition = Some(Map("StringEquals" -> Map("sts:ExternalId" -> SimplePolicyConditionValue("AWS::AccountId"))))
        ))))
      val policy = `AWS::IAM::Policy`("deliveryPolicy",
        PolicyDocument(Seq(
          PolicyStatement("Allow",
            Action = Seq("s3:AbortMultipartUpload",
              "s3:GetBucketLocation",
              "s3:GetObject",
              "s3:ListBucket",
              "s3:ListBucketMultipartUploads",
              "s3:PutObject"
            ),
            Resource = Some(Seq(`Fn::Join`("", Seq(s"arn:aws:s3:::", ResourceRef(bucket))),
              `Fn::Join`("", Seq(s"arn:aws:s3:::", ResourceRef(bucket), "/*"))))

          ))
        ), "firehose_delivery_policy",
        Roles = Some(Seq(ResourceRef(deliveryRole))))

      val stream = `AWS::KinesisFirehose::DeliveryStream`.s3(
        "deliveryStream",
        ExtendedS3DestinationConfiguration(`Fn::Join`("", Seq(s"arn:aws:s3:::", ResourceRef(bucket))),
          ResourceRef(deliveryRole),
          Some(BufferingHints(Some(60), Some(50))), None,
          Some(CompressionFormat.UNCOMPRESSED), None, Some("firehose/"),
          Some(ProcessingConfiguration.enabled(
            Seq(Processor(
              Seq(ProcessorParameter("LambdaArn",
                Token.fromFunction(`Fn::GetAtt`(Seq("myLambda","Arn")))))
            ))))
        ), DependsOn = Some(Seq(policy.name)))
      val tJson = Template(Resources = Seq(stream, policy,deliveryRole,bucket))

      val deliveryJson = tJson.toJson.asJsObject.fields("Resources")
        .asJsObject.fields("deliveryStream").asJsObject()
        deliveryJson.fields("DependsOn") shouldBe JsArray(JsString("deliveryPolicy"))

      deliveryJson.fields("Properties").asJsObject().fields("ExtendedS3DestinationConfiguration")
        .asJsObject.fields("BufferingHints") shouldBe JsObject(
        Map("IntervalInSeconds" -> JsNumber(60), "SizeInMBs" -> JsNumber(50))
      )
    }
  }
} 
Example 99
Source File: ApiGateway_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model.resource

import com.monsanto.arch.cloudformation.model.{ResourceRef, Template, Token}
import org.scalatest.{FunSpec, Matchers}
import spray.json._

class ApiGateway_UT extends FunSpec with Matchers {
  val api = `AWS::ApiGateway::RestApi`(
    name = "RestApi",
    Name = Token.fromString("RestApi")
  )

  val stage = `AWS::ApiGateway::Stage`(
    name = "Stage",
    DeploymentId = Token.fromString("123"),
    Variables = Map()
  )

  val apiKey = `AWS::ApiGateway::ApiKey`(
    name = "ApiKey"
  )

  val usagePlan = `AWS::ApiGateway::UsagePlan`(
    name = "UsagePlan",
    ApiStages = Some(Seq(
      ApiStage(
        ResourceRef(api),
        ResourceRef(stage)
      )
    )),
    Description = Some("UsagePlanDescription"),
    Quota = Some(QuotaSettings(
      Limit = Some(1),
      Offset = Some(2),
      Period = Some(Period.WEEK))
    ),
    Throttle = Some(ThrottleSettings(
      BurstLimit = Some(1),
      RateLimit = Some(2.0)
    )),
    UsagePlanName = Some(Token.fromString("UsagePlanName"))
  )

  val usagePlanKey = `AWS::ApiGateway::UsagePlanKey`(
    name = "UsagePlanKey",
    KeyId = ResourceRef(apiKey),
    KeyType = UsagePlanKeyType.API_KEY,
    UsagePlanId = ResourceRef(usagePlan)
  )

  describe("UsagePlan"){
    it ("should serialize as expected") {
      val expectedJson =
        """
          |{
          |  "Resources": {
          |    "UsagePlan": {
          |      "Properties": {
          |        "ApiStages": [{"ApiId": {"Ref": "RestApi"}, "Stage": {"Ref": "Stage"}}],
          |        "Description": "UsagePlanDescription",
          |        "Quota": {"Limit": 1, "Offset": 2, "Period": "WEEK"},
          |        "Throttle": {"BurstLimit": 1, "RateLimit": 2.0},
          |        "UsagePlanName": "UsagePlanName"
          |      },
          |      "Type": "AWS::ApiGateway::UsagePlan"
          |    }
          |  }
          |}
        """.stripMargin.parseJson
      Template.fromResource(usagePlan).toJson should be (expectedJson)
    }
  }

  describe("UsagePlanKey"){
    it ("should serialize as expected") {
      val expectedJson =
        """
          |{
          |  "Resources": {
          |    "UsagePlanKey": {
          |      "Properties": {
          |        "KeyId": {"Ref": "ApiKey"},
          |        "KeyType": "API_KEY",
          |        "UsagePlanId": {"Ref": "UsagePlan"}
          |      },
          |      "Type": "AWS::ApiGateway::UsagePlanKey"
          |    }
          |  }
          |}
        """.stripMargin.parseJson
      Template.fromResource(usagePlanKey).toJson should be (expectedJson)
    }
  }
} 
Example 100
Source File: Lambda_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model.resource

import com.monsanto.arch.cloudformation.model._
import org.scalatest.{ FunSpec, Matchers }
import spray.json._

class Lambda_UT extends FunSpec with Matchers {
  describe("TracingConfig") {
    it("should serialize") {
      def test(t: TracingConfig, rep: String) = {
        t.toJson shouldEqual JsString(rep)
        s""" "${rep}" """.parseJson.convertTo[TracingConfig] shouldEqual t
      }
      test(TracingConfig.Active, "Active")
      test(TracingConfig.PassThrough, "PassThrough")
    }
  }
} 
Example 101
Source File: ECR_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model.resource

import com.monsanto.arch.cloudformation.model._
import org.scalatest.{FunSpec, Matchers}
import spray.json._

class ECR_UT extends FunSpec with Matchers {
  describe("AWS::ECR::Repository") {

    val fakePolicyDoc = PolicyDocument(Seq(
      PolicyStatement(
        "Allow",
        Some(DefinedPrincipal(Map("Service" -> Seq("fakePrincipal")))),
        Seq("fakeAction")
      )
    ))

    val repositoryName = "repository"
    val repository = `AWS::ECR::Repository`(
      repositoryName,
      Some("myFakeDockerRepository"),
      Some(fakePolicyDoc)
    )

    it("should create a valid new ECR repository") {
      val expected = JsObject(
        repositoryName -> JsObject(
          "Type" -> JsString("AWS::ECR::Repository"),
          "Properties" -> JsObject(
            "RepositoryName" -> JsString("myFakeDockerRepository"),
            "RepositoryPolicyText" -> fakePolicyDoc.toJson
          )))
      Seq[Resource[_]](repository).toJson should be(expected)
    }
  }
} 
Example 102
Source File: ElasticSearchDomain_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model.resource

import com.monsanto.arch.cloudformation.model.UNSAFEToken
import org.scalatest.{FunSpec, Matchers}
import spray.json._

class ElasticSearchDomain_UT extends FunSpec with Matchers {
  describe("An Elasticsearch::Domain") {
    it("should be serialize correctly") {
      val domain = `AWS::Elasticsearch::Domain`(
        name="testdomain",
        DomainName="testDomainName",
        Tags=Some(Seq(AmazonTag("testkey", "testValue"))),
        VPCOptions = Some(VPCOptions(Seq(UNSAFEToken("sg-1234567")), Seq("subnet-f1234567")))
      )

      val result = """{
                     |  "name": "testdomain",
                     |  "DomainName": "testDomainName",
                     |  "Tags": [{
                     |    "Key": "testkey",
                     |    "Value": "testValue"
                     |  }],
                     |  "VPCOptions": {
                     |    "SecurityGroupIds": ["sg-1234567"],
                     |    "SubnetIds": ["subnet-f1234567"]
                     |  }
                     |}""".stripMargin.parseJson

      domain.toJson shouldBe result
    }
  }
} 
Example 103
Source File: IAMRole_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model.resource

import com.monsanto.arch.cloudformation.model.ResourceRef
import org.scalatest.{FunSpec, Matchers}
import spray.json.{JsObject, JsString, _}


class IAMRole_UT extends FunSpec with Matchers {
  describe("AWS::IAM::Role") {

    it("should handle both AWS Managed and Customer policies into valid json") {
      val customerPolicy = `AWS::IAM::ManagedPolicy`("customer-policy", PolicyDocument(Seq()))
      val awsPolicy = AWSManagedPolicy("AdministratorAccess")

      val fakePolicyDoc = PolicyDocument(Seq(
        PolicyStatement(
          "Allow",
          Some(DefinedPrincipal(Map("Service" -> Seq("config.amazonaws.com")))),
          Seq("sts:AssumeRole")
        )
      ))

      val expectedJson = JsObject(
        "name" -> JsString("role"),
        "AssumeRolePolicyDocument" -> fakePolicyDoc.toJson,
        "ManagedPolicyArns" -> JsArray(
          JsObject("Ref" -> JsString("customer-policy")),
          JsString("arn:aws:iam::aws:policy/AdministratorAccess")
        )
      )

      val role = `AWS::IAM::Role`(
        "role",
        fakePolicyDoc,
        ManagedPolicyArns = Some(Seq(ResourceRef(customerPolicy), awsPolicy))
      )

      role.toJson should be(expectedJson)
    }
  }
} 
Example 104
Source File: EFS_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model.resource

import com.monsanto.arch.cloudformation.model.ResourceRef
import org.scalatest.{FunSpec, Matchers}
import spray.json._

class EFS_UT extends FunSpec with Matchers {
  describe("AWS::EFS::FileSystem") {
    val resource = `AWS::EFS::FileSystem`(
      "test",
      FileSystemTags = Some(
        List(AmazonTag("Foo", "Bar"))
      ),
      Encrypted = Some(true),
      KmsKeyId = Some(`AWS::KMS::Key`(
        name = "test",
        KeyPolicy = PolicyDocument(
          Statement = List(
            PolicyStatement(
              Effect = "Allow",
              Action = List("dynamodb:*")
            )
          )
        )
      )),
      PerformanceMode = PerformanceMode.generalPurpose
    )

    it("should serialize to JSON") {
      resource.toJson shouldBe """{
                                             |  "name": "test",
                                             |  "KmsKeyId": {
                                             |    "Ref": "test"
                                             |  },
                                             |  "Encrypted": true,
                                             |  "PerformanceMode": "generalPurpose",
                                             |  "FileSystemTags": [{
                                             |    "Key": "Foo",
                                             |    "Value": "Bar"
                                             |  }]
                                             |}""".stripMargin.parseJson
    }

    it("throws an exception when KmsKeyId is set but Encrypted is false") {
      an [IllegalArgumentException] should be thrownBy resource.copy(Encrypted = None)
      an [IllegalArgumentException] should be thrownBy resource.copy(Encrypted = Some(false))
    }
  }

  describe("AWS::EFS::MountTarget") {
    val vpc = `AWS::EC2::VPC`(
      "vpc",
      CidrBlock(198,51,100,0,24),
      List()
    )
    val subnet = `AWS::EC2::Subnet`(
      "test",
      VpcId = ResourceRef(vpc),
      CidrBlock = CidrBlock(198,51,100,129,25),
      Tags = List()
    )
    val sg = `AWS::EC2::SecurityGroup`(
      "test",
      GroupDescription = "Test",
      VpcId = ResourceRef(vpc),
      None,
      None,
      List()
    )

    val resource = `AWS::EFS::MountTarget`(
      "test",
      FileSystemId = ResourceRef(`AWS::EFS::FileSystem`("test")),
      IpAddress = Some("198.51.100.1"),
      SecurityGroups = List(ResourceRef(sg)),
      SubnetId = ResourceRef(subnet)
    )
    it("should serialize to JSON") {
      resource.toJson shouldBe """{
                                             |  "name": "test",
                                             |  "SecurityGroups": [{
                                             |    "Ref": "test"
                                             |  }],
                                             |  "IpAddress": "198.51.100.1",
                                             |  "FileSystemId": {
                                             |    "Ref": "test"
                                             |  },
                                             |  "SubnetId": {
                                             |    "Ref": "test"
                                             |  }
                                             |}""".stripMargin.parseJson
    }
  }
} 
Example 105
Source File: CodePipeline_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model.resource

import com.monsanto.arch.cloudformation.model.{ Token, `Fn::GetArtifactAtt`, `Fn::GetParam` }
import org.scalatest.{ FunSpec, Matchers }
import spray.json._

class CodePipeline_UT extends FunSpec with Matchers {
  describe("Fns"){
    it ("Fn::GetParam should serialize as expected") {
      val s: Token[String] = `Fn::GetParam`("foo", "bar", "baz")
      val expected = JsObject("Fn::GetParam" -> JsArray(JsString("foo"), JsString("bar"), JsString("baz")))
      s.toJson should be (expected)
    }

    it ("Fn::GetArtifactAtt should serialize as expected") {
      val s: Token[String] = `Fn::GetArtifactAtt`("foo", "bar")
      val expected = JsObject("Fn::GetArtifactAtt" -> JsArray(JsString("foo"), JsString("bar")))
      s.toJson should be (expected)
    }
  }
} 
Example 106
Source File: EKS_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model.resource

import com.monsanto.arch.cloudformation.model.UNSAFEToken
import org.scalatest.{FunSpec, Matchers}
import spray.json._

class EKS_UT extends FunSpec with Matchers {
  describe("AWS::EKS::Cluster") {

    val resourceVpcConfig: ResourcesVpcConfig = ResourcesVpcConfig(
      SecurityGroupIds = Seq(UNSAFEToken("sg-01234567")),
      SubnetIds = Seq(UNSAFEToken("subnet-12345678"))
    )

    val clusterName = "cluster"
    val cluster = `AWS::EKS::Cluster`(
      clusterName,
      "Name",
      ResourcesVpcConfig = resourceVpcConfig,
      RoleArn = "ARN"
    )

    it("should create a valid new EKS cluster") {
      val expected = JsObject(
        clusterName -> JsObject(
          "Properties" -> JsObject(
            "Name" -> JsString("Name"),
            "ResourcesVpcConfig" -> resourceVpcConfig.toJson,
            "RoleArn" -> JsString("ARN")
          ),
          "Type" -> JsString("AWS::EKS::Cluster")
        ))
      Seq[Resource[_]](cluster).toJson should be(expected)
    }
  }
} 
Example 107
Source File: CloudWatchSpec.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model.resource

import org.scalatest.{FunSpec, Matchers}
import spray.json.{JsonFormat, JsString}

class CloudWatchSpec extends FunSpec with Matchers {

  it("should format AWS/EC2") {
    implicitly[JsonFormat[`AWS::CloudWatch::Alarm::Namespace`]].write(`AWS::CloudWatch::Alarm::Namespace`.`AWS/EC2`) should equal(JsString("AWS/EC2"))
  }

  it("should format custom namespace") {
    implicitly[JsonFormat[`AWS::CloudWatch::Alarm::Namespace`]].write(`AWS::CloudWatch::Alarm::Namespace`("hello")) should equal(JsString("hello"))
  }

  it("should format implicit custom namespace") {
    implicitly[JsonFormat[`AWS::CloudWatch::Alarm::Namespace`]].write("hello" : `AWS::CloudWatch::Alarm::Namespace`) should equal(JsString("hello"))
  }
} 
Example 108
Source File: Route53_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model.resource

import com.monsanto.arch.cloudformation.model.{Template, `Fn::GetAtt`, ResourceRef}
import org.scalatest.{FunSpec, Matchers}
import spray.json
import spray.json._

class Route53_UT extends FunSpec with Matchers {

  describe("Custom::RemoteRecordSet"){
    it ("should serialize as expected") {
      val record = `Custom::RemoteRoute53RecordSet`.generalRecord(
      "TestRecord",
      "TestServiceToken",
      "TestDestinationRole",
      "TestHostName",
      Route53RecordType.CNAME,
      "TestZone",
      Seq("cnn.com"),
      "60")

      val expectedJson =
        """
          |{
          |  "Resources": {
          |    "TestRecord": {
          |      "Properties": {
          |        "DestinationRole": "TestDestinationRole",
          |        "Name": "TestHostName",
          |        "ServiceToken": "TestServiceToken",
          |        "HostedZoneName": "TestZone",
          |        "ResourceRecords": [
          |          "cnn.com"
          |        ],
          |        "TTL": "60",
          |        "Type": "CNAME"
          |      },
          |      "Type": "Custom::RemoteRoute53RecordSet"
          |    }
          |  }
          |}
        """.stripMargin.parseJson
      Template.fromResource(record).toJson should be (expectedJson)
    }
  }
} 
Example 109
Source File: Subnet_Parameter_List_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model.resource

import com.monsanto.arch.cloudformation.model._
import org.scalatest.{FunSpec, Matchers}
import spray.json.{JsNumber, JsString, _}

class Subnet_Parameter_List_UT extends FunSpec with Matchers {
  describe("AWS::EC2::Subnet_Parameter_List") {

    it("should serialize into valid json") {
      val subnetListParam = `AWS::EC2::Subnet_Parameter_List`("subnets", "Select subnets where the RDS instances should be created")
      val expectedJson = JsObject(
        "subnets" -> JsObject(
          "Description" -> JsString("Select subnets where the RDS instances should be created"),
          "Type" -> JsString("List<AWS::EC2::Subnet::Id>")
        )
      )
      Seq[Parameter](subnetListParam).toJson should be (expectedJson)
    }

    it("should serialize into valid json as InputParameter") {
      val subnetListParam = `AWS::EC2::Subnet_Parameter_List`("subnets", "Select subnets where the RDS instances should be created")
      val expectedJson = JsObject(
        "ParameterKey" -> JsString("subnets"),
        "ParameterValue" -> JsString("")
      )
      val inputParam = InputParameter.templateParameterToInputParameter(Some(Seq(subnetListParam)))
      inputParam.get(0).toJson should be (expectedJson)
    }

    it("can be passed as ParameterRef to AWS::RDS::DBSubnetGroup") {
      val subnetListParam = `AWS::EC2::Subnet_Parameter_List`("subnets", "Select subnets where the RDS instances should be created")
      val dbSubnetGroup = `AWS::RDS::DBSubnetGroup`(
        name = "dbSubnetGroup",
        DBSubnetGroupDescription = "DB subnet group",
        SubnetIds = ParameterRef(subnetListParam)
      )
      val expected = JsObject(
        "dbSubnetGroup" -> JsObject(
          "Type" -> JsString("AWS::RDS::DBSubnetGroup"),
          "Properties" -> JsObject(
            "DBSubnetGroupDescription" -> JsString("DB subnet group"),
            "SubnetIds" -> JsObject("Ref" -> JsString("subnets"))
          )
        )
      )
      Seq[Resource[_]](dbSubnetGroup).toJson should be (expected)
    }
  }
} 
Example 110
Source File: SecretsManager_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model.resource

import com.monsanto.arch.cloudformation.model.{Template, Token, `Fn::Join`}
import org.scalatest.{FunSpec, Matchers}
import spray.json._

class SecretManager_UT extends FunSpec with Matchers{

  describe("secret target attachment"){
    it("should generate code like the documentation example"){
      val secret = `AWS::SecretsManager::Secret`("MyRDSSecret",
        Description = Some("This is a Secrets Manager secret for an RDS DB instance"),
        GenerateSecretString = GenerateSecretString(
          SecretStringTemplate = Some("{\"username\": \"admin\"}"),
          GenerateStringKey = Some("password"),
          PasswordLength = Some(16),
          ExcludeCharacters = Some("\"@/\\")
        )
      )
      val rdsInstance = `AWS::RDS::DBInstance`("MyRDSInstance",Some(Left(20)),Token.fromString("db.t2.micro"),None,
        None,None,Some(Token.fromString("0")),Some("rotation-instance"),None,None,None,None,None,
        Some(`AWS::RDS::DBInstance::Engine`.MySQL),None,None,None,None,
        Some(`Fn::Join`("",Seq(
          Token.fromString("{{resolve:secretsmanager:"),
          Token.fromString(secret.name),
          Token.fromString(":SecretString:username}}")))),
        Some(`Fn::Join`("",Seq(
          Token.fromString("{{resolve:secretsmanager:"),
          Token.fromString(secret.name),
          Token.fromString(":SecretString:password}}")))),
        None,None,None,None,None,None,None,None,None,None,None,None,None,None
      )

      val attachment = `AWS::SecretsManager::SecretTargetAttachment`(
        "SecretRDSInstanceAttachment",
        Token.fromResource(secret),
        Token.fromResource(rdsInstance))
      val json = Template(Resources = Seq(secret,rdsInstance,attachment)).toJson
      val attachmentJson = json.asJsObject.fields("Resources").asJsObject.fields("SecretRDSInstanceAttachment")
      attachmentJson shouldBe JsObject(Map(
        "Properties" -> JsObject( Map(
          "SecretId" -> JsObject(Map("Ref" -> JsString("MyRDSSecret"))),
          "TargetId" -> JsObject(Map("Ref" -> JsString("MyRDSInstance"))),
          "TargetType" -> JsString("AWS::RDS::DBInstance")
        )),
        "Type" -> JsString("AWS::SecretsManager::SecretTargetAttachment")
      ))
    }
  }
} 
Example 111
Source File: ApplicationAutoScaling_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model.resource

import com.monsanto.arch.cloudformation.model.{JsonWritingMatcher, ResourceRef}
import org.scalatest.{FunSpec, Matchers}

class ApplicationAutoScaling_UT extends FunSpec with Matchers with JsonWritingMatcher {

  val scalableTarget = `AWS::ApplicationAutoScaling::ScalableTarget`(
    name = "myScalableTarget",
    MaxCapacity = 100,
    MinCapacity = 1,
    ResourceId = "myResourceId",
    RoleARN = "myRoleArn",
    ScalableDimension = ApplicationAutoScaling.ScalableDimension.`custom-resource:ResourceType:Property`,
    ServiceNamespace = ApplicationAutoScaling.ServiceNamespace.`custom-resource`)


  it("should generate scalable target policy document") {

    val resource: Resource[`AWS::ApplicationAutoScaling::ScalableTarget`] = scalableTarget

    resource shouldMatch
      """
        |{
        |  "Type": "AWS::ApplicationAutoScaling::ScalableTarget",
        |  "Properties": {
        |    "MaxCapacity": 100,
        |    "MinCapacity": 1,
        |    "ResourceId": "myResourceId",
        |    "RoleARN": "myRoleArn",
        |    "ScalableDimension": "custom-resource:ResourceType:Property",
        |    "ServiceNamespace": "custom-resource"
        |  }
        |}
      """.stripMargin
  }

  it("should error if ScalableDimension doesn't match the ServiceNamespace") {
    an [java.lang.AssertionError] should be thrownBy
      `AWS::ApplicationAutoScaling::ScalableTarget`(
        name = "myScalableTarget",
        MaxCapacity = 100,
        MinCapacity = 1,
        ResourceId = "myResourceId",
        RoleARN = "myRoleArn",
        ScalableDimension = ApplicationAutoScaling.ScalableDimension.`custom-resource:ResourceType:Property`,
        ServiceNamespace = ApplicationAutoScaling.ServiceNamespace.dynamodb
      )
  }

  it("should generate scaling policy document") {
    val scalingPolicy = `AWS::ApplicationAutoScaling::ScalingPolicy`(
      name = "myScalingPolicy",
      PolicyName = "myPolicyName",
      PolicyType = ApplicationAutoScaling.PolicyType.StepScaling,
      ScalingTargetId = Some(ResourceRef(scalableTarget))
    )

    val resource: Resource[`AWS::ApplicationAutoScaling::ScalingPolicy`] = scalingPolicy

    resource shouldMatch
      """
        |{
        |  "Type": "AWS::ApplicationAutoScaling::ScalingPolicy",
        |  "Properties": {
        |    "PolicyName": "myPolicyName",
        |    "PolicyType": "StepScaling",
        |    "ScalingTargetId": { "Ref":"myScalableTarget" }
        |  }
        |}
      """.stripMargin
  }
} 
Example 112
Source File: IntrinsicFunctions_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model

import com.monsanto.arch.cloudformation.model.AmazonFunctionCall._
import com.monsanto.arch.cloudformation.model._
import com.monsanto.arch.cloudformation.model.resource._
import Token._
import org.scalatest.{FunSpec, Matchers}
import spray.json._
import DefaultJsonProtocol._


class IntrinsicFunctions_UT extends FunSpec with Matchers {

  describe("Fn::Sub"){

    it("no args"){

      val test: Token[String] = `Fn::Sub`(s"This is a $${test} template")

      val expected = JsObject(
        "Fn::Sub"-> JsString(s"This is a $${test} template")
      )

      test.toJson should be(expected)
    }

    it("one arg"){

      val test: Token[String] = `Fn::Sub`(
        s"This is a $${test} template",
        Some(Map("test" -> "value"))
      )

      val expected = JsObject(
        "Fn::Sub"-> JsArray(
          JsString(s"This is a $${test} template"),
          JsObject("test" -> JsString("value"))
        )
      )

      test.toJson should be(expected)
    }

    it("two args"){

      val test: Token[String] = `Fn::Sub`(
        s"This is a $${test} template",
        Some(Map("test" -> "value", "test2" -> "value2"))
      )

      val expected = JsObject(
        "Fn::Sub"-> JsArray(
          JsString(s"This is a $${test} template"),
          JsObject("test" -> JsString("value"), "test2" -> JsString("value2"))
        )
      )

      test.toJson should be(expected)
    }
  }

  describe("Fn::ImportValue") {
    it("should serialize with static string") {
      val test: Token[String] = `Fn::ImportValue`("Test-Import-Value")

      val expected = JsObject(
        "Fn::ImportValue" -> JsString("Test-Import-Value")
      )
      test.toJson should be(expected)
    }

    it("should serialize with an embedded function") {
      val test: Token[String] = `Fn::ImportValue`(`Fn::Join`("", Seq("str1", "str2")))

      val expected = JsObject(
        "Fn::ImportValue" -> JsObject("Fn::Join" -> JsArray(
          JsString(""),
          JsArray(JsString("str1"), JsString("str2"))
        )
      ))
      test.toJson should be(expected)
    }
  }
} 
Example 113
Source File: JsonWritingMatcher.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model

import com.monsanto.arch.cloudformation.model.resource.Resource
import org.scalatest.Matchers
import spray.json.{JsonWriter, JsObject, JsValue, JsonFormat}


trait JsonWritingMatcher extends Matchers {

  implicit class JsonMatchResource(val value : Resource[_]) extends JsonMatcher[Resource[_]] {
    val format = Resource.seqFormat.format
  }

  implicit class JsonMatch[A](val value : A)(implicit val format: JsonWriter[A]) extends JsonMatcher[A]

  sealed trait JsonMatcher[A] {
    def value : A
    def format : JsonWriter[A]
    def shouldMatch(policy : String): Unit = {

      import spray.json._

      val jsonPolicy = value.toJson(format)
      val parsedPolicy = policy.parseJson
      jsonEquals(Seq(), jsonPolicy, parsedPolicy)
    }
  }

  def jsonEquals(path : Seq[String], v1 : JsValue, v2 : JsValue): Unit = withClue("Path: [" + path.mkString(" -> ") + "]") {
    (v1, v2) match {
      case (JsObject(o1), JsObject(o2)) =>
        o1.seq.keySet shouldEqual o2.seq.keySet
        for {
          key <- o1.seq.keySet
        } {
          jsonEquals(path ++ Seq(key), o1.seq(key), o2.seq(key))
        }
      case (j1, j2) => {
        j1 shouldEqual j2
      }
    }
  }
} 
Example 114
Source File: FnGetAZs_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model

import org.scalatest.{FunSpec, Matchers}
import spray.json._
import DefaultJsonProtocol._

class FnGetAZs_UT extends FunSpec with Matchers {

  describe("Fn::GetAZs") {
    it("should serialize correctly") {
      val p = StringParameter("pAZ")
      val c = Condition("cAZ", `Fn::Equals`(ParameterRef(p), ""))
      val ps = StringListParameter("s")
      val az: Token[String] = `Fn::If`[String](
        ConditionRef(c),
        `Fn::Select`(StringBackedInt(0), `Fn::GetAZs`(`AWS::Region`)),
        ParameterRef(p)
      )
      val expected = JsObject(
        "Fn::If"-> JsArray(
          JsString("cAZ"),
          JsObject("Fn::Select" -> JsArray(
            JsString("0"),
            JsObject("Fn::GetAZs" -> JsObject("Ref" -> JsString("AWS::Region")))
          )),
          JsObject("Ref" -> JsString("pAZ"))
        )
      )
      az.toJson should be(expected)
    }
  }

} 
Example 115
Source File: FnIf_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model

import com.monsanto.arch.cloudformation.model.resource._
import org.scalatest.{Matchers, FunSpec}
import spray.json._


class FnIf_UT extends FunSpec with Matchers {

  describe("Fn::If"){

    it("Should serialize correctly with complex argument types"){

      val cond = Condition(
        name = "ServiceELBSSLCertNameIsNotDefined",
        function = `Fn::Equals`(a = StringToken("true"), b = StringToken("false"))
      )

      val vpcToken: VpcId = "vpc-b5f389d0"

      val gatewayELBSecGroupResource = `AWS::EC2::SecurityGroup`(
        "GatewayELBSecurityGroup",
        GroupDescription = "Rules for allowing access to/from service gateway ELB",
        VpcId = vpcToken,
        SecurityGroupEgress = None,
        SecurityGroupIngress = Some(Seq(
          CidrIngressSpec(
            IpProtocol = "tcp",
            CidrIp = CidrBlock(0, 0, 0, 0, 32),
            FromPort = "80",
            ToPort = "80"
          ),
          CidrIngressSpec(
            IpProtocol = "tcp",
            CidrIp = CidrBlock(0, 0, 0, 0, 32),
            FromPort = "443",
            ToPort = "443"
          )
        )
        ),
        Tags = Seq[AmazonTag](),
        Condition = Some(ConditionRef(cond))
      )

      val test: Token[ResourceRef[`AWS::EC2::SecurityGroup`]] = `Fn::If`[ResourceRef[`AWS::EC2::SecurityGroup`]](
        ConditionRef(cond),
        ResourceRef(gatewayELBSecGroupResource),
        ResourceRef(gatewayELBSecGroupResource)
      )

      val expected = JsObject(
        "Fn::If"-> JsArray(
          JsString("ServiceELBSSLCertNameIsNotDefined"),
          JsObject("Ref" -> JsString("GatewayELBSecurityGroup")),
          JsObject("Ref" -> JsString("GatewayELBSecurityGroup"))
        )
      )

      test.toJson should be(expected)
    }
  }
} 
Example 116
Source File: ResourceRef_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
import com.monsanto.arch.cloudformation.model._
import com.monsanto.arch.cloudformation.model.resource._
import org.scalatest.FunSpec
import org.scalatest.Matchers
import spray.json._


class ResourceRef_UT extends FunSpec with Matchers {

  describe("ResourceRefs"){

    it("should serialize correctly"){


      val vpcToken: VpcId = "vpc-b5f389d0"

      val privateDBSubnet1CidrParam = CidrBlockParameter(
        name        = "PrivateSubnet1",
        Description = Some("CIDR address range for the private subnet to be created in the second AZ"),
        Default     = Some(CidrBlock(10,56,0,0,25))
      )

      val DBPriSubnet1Resource = `AWS::EC2::Subnet`(
        "DBPriSubnet1",
        VpcId = vpcToken,
        AvailabilityZone = Some("us-east-1a"),
        CidrBlock = ParameterRef(privateDBSubnet1CidrParam),
        Tags = Seq[AmazonTag]()
      )

      val DBPriSubnet2Resource = `AWS::EC2::Subnet`(
        "DBPriSubnet2",
        VpcId = vpcToken,
        AvailabilityZone = Some("us-east-1b"),
        CidrBlock = ParameterRef(privateDBSubnet1CidrParam),
        Tags = Seq[AmazonTag]()
      )

      val dbSubnetGroupResource = `AWS::RDS::DBSubnetGroup`(
        name = "DBSubnetGroup",
        SubnetIds = Seq( ResourceRef(DBPriSubnet1Resource), ResourceRef(DBPriSubnet2Resource)),
        DBSubnetGroupDescription =  "DB Subnet Group"
      )

      val expected = JsObject(
        "DBSubnetGroup" -> JsObject(
          "Type" -> JsString("AWS::RDS::DBSubnetGroup"),
          "Properties" -> JsObject(
            "SubnetIds" -> JsArray(
              JsObject("Ref" -> JsString("DBPriSubnet1")),
              JsObject("Ref" -> JsString("DBPriSubnet2"))
            ),
            "DBSubnetGroupDescription" -> JsString("DB Subnet Group")
          )
        )
      )

      Seq[Resource[_]](dbSubnetGroupResource).toJson should be (expected)
    }
  }
} 
Example 117
Source File: AbstractCompositionTest.scala    From study-category-theory   with Apache License 2.0 5 votes vote down vote up
package com.github.dnvriend.scalaz

import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{ FlatSpec, Matchers }

import scala.concurrent.Future
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.language.higherKinds
import scalaz._
import Scalaz._

case class StringStats(length: Int, palindrome: Boolean)

class AbstractCompositionTest extends FlatSpec with Matchers with ScalaFutures {
  implicit val pc: PatienceConfig = PatienceConfig(timeout = 30.seconds, interval = 300.millis)

  def compose[F[_]: Monad, A: Numeric](effectOne: => F[A], effectTwo: => F[A]): F[A] = for {
    x <- effectOne
    y <- effectTwo
  } yield implicitly[Numeric[A]].plus(x, y)

  it should "compose" in {
    compose[Option, Long](Option(1), Option(2)) shouldBe Option(3)
    compose[Future, Long](Future(3), Future(4)).futureValue shouldBe 7
  }

  def stringStats(calcLength: String => Int, isPalindrome: String => Boolean): String => StringStats = for {
    length <- calcLength
    palindrome <- isPalindrome
  } yield StringStats(length, palindrome)
} 
Example 118
Source File: OkKoTest.scala    From study-category-theory   with Apache License 2.0 5 votes vote down vote up
package com.github.dnvriend.hamsters

import org.scalatest.{ FlatSpec, Matchers }

import io.github.hamsters.Validation
import Validation._

class OkKoTest extends FlatSpec with Matchers {
  info(
    """
      | ############################################
      | # Validation and monadic OK/KO
      | ############################################
      | #
      | Statements can be OK or KO. Then you can get all successes and failures.
      |
    """.stripMargin
  )

  "OK/KO instances" should "be monadic" in {
    val e1: Either[String, Int] = OK(1)
    val e2: Either[String, Int] = KO("nan")
    val e3: Either[String, Int] = KO("nan2")

    // Stop at first error
    val result = for {
      v1 <- e1
      v2 <- e2
      v3 <- e3
    } yield s"$v1-$v2-$v3"

    result shouldBe KO("nan")
  }

  it should "throw when the .get method is used" in {
    val e1: Either[String, Int] = KO("nan")
    intercept[java.util.NoSuchElementException] {
      e1.get
    }
  }

  it should "catch exceptions in a KO object" in {
    def compute(x: Int): Int = 2 / x

    Validation.fromCatchable(compute(1)) shouldBe OK(2)
    Validation.fromCatchable(compute(0)) shouldBe KO("/ by zero")
    Validation.fromCatchable(compute(0), (t: Throwable) => t.getClass.getSimpleName) shouldBe KO("ArithmeticException")
  }

  "Validation" should "accrue / aggregate / concat / the OK/KO instances" in {
    val e1: Either[String, Int] = OK(1)
    val e2: Either[String, Int] = KO("error 1")
    val e3: Either[String, Int] = KO("error 2")

    // Validation only has a hasFailures and
    val validation: Validation[String] = Validation(e1, e2, e3)
    validation.hasFailures shouldBe true
    validation.failures shouldBe List("error 1", "error 2")

    val successes: Validation[String] = Validation(e1)
    successes.hasFailures shouldBe false
    successes.failures shouldBe 'empty
  }
} 
Example 119
Source File: TestSpec.scala    From study-category-theory   with Apache License 2.0 5 votes vote down vote up
package com.github.dnvriend

import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{ FlatSpec, Matchers }

import scala.concurrent.{ ExecutionContext, Future }
import scala.concurrent.duration._
import scala.util.Try
import scalaz.{ NonEmptyList, _ }

abstract class TestSpec extends FlatSpec with Matchers with ScalaFutures {
  implicit def SymbolToString(sym: Symbol): String = sym.toString()
  implicit val ec: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global
  implicit val pc: PatienceConfig = PatienceConfig(timeout = 5.minutes)

  implicit class PimpedFuture[T](self: Future[T]) {
    def toTry: Try[T] = Try(self.futureValue)
  }

  type DisjunctionNel[A, +B] = Disjunction[NonEmptyList[A], B]

  implicit class StringOps(val str: String) {
    def toNel: NonEmptyList[String] = NonEmptyList(str)
    def leftNel[A]: DisjunctionNel[String, A] = Disjunction.left[NonEmptyList[String], A](str.toNel)
  }

  implicit class EitherOps[A](val self: A) {
    def rightNel: DisjunctionNel[String, A] = Disjunction.right[NonEmptyList[String], A](self)
  }
} 
Example 120
Source File: SelectorSpec.scala    From reftree   with GNU General Public License v3.0 5 votes vote down vote up
package reftree.svg.api

import org.scalatest.{FlatSpec, Matchers}

class SelectorSpec extends FlatSpec with Matchers {
  it should "parse selectors correctly" in {
    Selector.fromString("g.node, .edge.edgier, path") shouldEqual
      Selector(Set(
        Selector.Clause(Some("g"), Set("node")),
        Selector.Clause(None, Set("edge", "edgier")),
        Selector.Clause(Some("path"), Set.empty)
      ))
  }
} 
Example 121
Source File: ColorSpec.scala    From reftree   with GNU General Public License v3.0 5 votes vote down vote up
package reftree.geometry

import org.scalacheck.Gen
import org.scalatest.prop.PropertyChecks
import org.scalatest.{FlatSpec, Matchers}

class ColorSpec extends FlatSpec with Matchers with PropertyChecks {
  val e = 0.005

  def sameRgba(left: Color.RGBA, right: Color.RGBA) =
    (Color.rgbaComponents.get(left) zip Color.rgbaComponents.get(right)) foreach {
      case (x, y) ⇒ x shouldEqual y +- e
    }

  def sameHsla(left: Color.HSLA, right: Color.HSLA) =
    (Color.hslaComponents.get(left) zip Color.hslaComponents.get(right)) foreach {
      case (x, y) ⇒ x shouldEqual y +- e
    }

  val genColor = for {
    a ← Gen.choose(0.0, 1.0)
    b ← Gen.choose(0.0, 1.0)
    c ← Gen.choose(0.0, 1.0)
    d ← Gen.choose(0.0, 1.0)
  } yield Seq(a, b, c, d)

  val genRgba = genColor.map(Color.rgbaComponents.apply)
  val genHsla = genColor.map(Color.hslaComponents.apply)

  it should "parse RGBA colors" in {
    val colors = Table(
      "string" → "color",
      "#104E8b" → Color.RGBA(0x10 / 255.0, 0x4e / 255.0, 0x8b / 255.0, 1.0),
      "#228b22" → Color.RGBA(0x22 / 255.0, 0x8b / 255.0, 0x22 / 255.0, 1.0)
    )
    forAll(colors)((string, color) ⇒ sameRgba(color, Color.fromRgbaString(string)))
  }

  it should "convert RGBA to HSLA" in {
    val colors = Table(
      "rgba" → "hsla",
      "#104E8b" → Color.HSLA(210 / 360.0, 0.79, 0.3, 1.0),
      "#228b22" → Color.HSLA(120 / 360.0, 0.61, 0.34, 1.0)
    )
    forAll(colors)((string, color) ⇒ sameHsla(color, Color.fromRgbaString(string).toHsla))
  }

  it should "convert from RGBA to string and back" in {
    forAll(genRgba)(color ⇒ sameRgba(color, Color.fromRgbaString(color.toRgbString, color.a)))
  }

  it should "convert from RGBA to HSLA and back" in {
    forAll(genRgba)(color ⇒ sameRgba(color, color.toHsla.toRgba))
  }

  it should "convert from HSLA to RGBA and back" in {
    forAll(genHsla)(color ⇒ sameHsla(color, color.toRgba.toHsla))
  }
} 
Example 122
Source File: PathSpec.scala    From reftree   with GNU General Public License v3.0 5 votes vote down vote up
package reftree.geometry

import org.scalatest.{Matchers, FlatSpec}

class PathSpec extends FlatSpec with Matchers {
  it should "parse SVG paths" in {
    val pathString = "M89,-288.5C89,-202.361 57.5417,-169.948 100,-95 123.033,-54.3423 184.732,-78.8751 191.411,-39.9227"

    val path = Path(Seq(
      PathSegment.Move(Point(89, -288.5)),
      PathSegment.Bezier(
        Point(89, -288.5), Point(89, -202.361),
        Point(57.5417, -169.948), Point(100, -95)
      ),
      PathSegment.Bezier(
        Point(100, -95), Point(123.033, -54.3423),
        Point(184.732, -78.8751), Point(191.411, -39.9227)
      )
    ))

    Path.fromString(pathString) shouldEqual path
  }
} 
Example 123
Source File: AnimationSpec.scala    From reftree   with GNU General Public License v3.0 5 votes vote down vote up
package reftree.diagram

import org.scalacheck.Gen
import org.scalatest.prop.PropertyChecks
import org.scalatest.{FlatSpec, Matchers}

class AnimationSpec extends FlatSpec with PropertyChecks with Matchers {
  it should "correctly iterate to fixpoint" in {
    forAll(Gen.posNum[Int]) { seed ⇒
      val builder = Animation.startWith(seed).iterate(_ + 1).iterateToFixpoint(_ / 2)
      val expectedSize = (math.log(seed + 1) / math.log(2)) + 3

      builder.frames should have size expectedSize.toInt
    }
  }

  it should "correctly iterate to fixpoint or max, when specified" in {
    forAll(Gen.posNum[Int]) { seed ⇒
      val max = 5
      val builder = Animation.startWith(seed).iterate(_ + 1).iterateToFixpointAtMost(max)(_ / 2)
      val expectedSize = math.min(max + 2, (math.log(seed + 1) / math.log(2)) + 3)

      builder.frames should have size expectedSize.toInt
    }
  }

  it should "correctly iterate while some predicate is true" in {
    val builder = Animation.startWith(-1).iterate(_ + 1).iterateWhile(_ < 100)(_ + 13)
    builder.frames shouldEqual Vector(-1, 0, 13, 26, 39, 52, 65, 78, 91)
  }

  it should "correctly iterate while some predicate is true or max is reached" in {
    val max = 5

    val builder1 = Animation.startWith(-1).iterate(_ + 1).iterateWhileAtMost(max)(_ < 100)(_ + 13)
    builder1.frames shouldEqual Vector(-1, 0, 13, 26, 39, 52, 65)

    val builder2 = Animation.startWith(-1).iterate(_ + 1).iterateWhileAtMost(max)(_ < 30)(_ + 13)
    builder2.frames shouldEqual Vector(-1, 0, 13, 26)
  }
} 
Example 124
Source File: ToRefTreeSpec.scala    From reftree   with GNU General Public License v3.0 5 votes vote down vote up
package reftree.core

import org.scalatest.{FlatSpec, Matchers}

case class Person(age: Int, name: String)

class ToRefTreeSpec extends FlatSpec with Matchers {
  it should "auto-derive ToRefTree instances" in {
    Person(3, "Nick").refTree should matchPattern {
      case RefTree.Ref(
        "Person", _,
        Seq(
          RefTree.Ref.Field(RefTree.Val(3, "3", false), Some("age"), false),
          RefTree.Ref.Field(RefTree.Ref(
            "String", _,
            Seq(
              RefTree.Ref.Field(RefTree.Val('N', "N", false), None, false),
              RefTree.Ref.Field(RefTree.Val('i', "i", false), None, false),
              RefTree.Ref.Field(RefTree.Val('c', "c", false), None, false),
              RefTree.Ref.Field(RefTree.Val('k', "k", false), None, false)
            ),
            false
          ), Some("name"), false)
        ),
        false
      ) ⇒
    }
  }

  it should "allow to configure automatic derivation" in {
    implicit val personDerivationConfig = ToRefTree.DerivationConfig[Person]
      .renameWith(_.name)
      .omitField("name")
      .tweakField("age", _.withName("years").withTreeHighlight(true))

    Person(3, "Nick").refTree should matchPattern {
      case RefTree.Ref(
        "Nick", _,
        Seq(
          RefTree.Ref.Field(RefTree.Val(3, "3", true), Some("years"), false)
        ),
        false
      ) ⇒
    }
  }
} 
Example 125
Source File: SpatialIndexTest.scala    From dbscan-scala   with Apache License 2.0 5 votes vote down vote up
package com.esri.dbscan

import com.esri.euclid.Euclid
import org.scalatest.{FlatSpec, Matchers}


class SpatialIndexTest extends FlatSpec with Matchers {

  case class TestPoint(x: Double, y: Double) extends Euclid

  it should "find 3 points" in {
    val arr = Array(
      TestPoint(4, 4),
      TestPoint(5, 5),
      TestPoint(6, 6),
      TestPoint(1, 1),
      TestPoint(9, 9)
    )
    val si = arr.foldLeft(SpatialIndex[TestPoint](2.0))(_ + _)
    val result = si.findNeighbors(TestPoint(5.5, 5.5))
    result.length shouldBe 3
    result should contain allOf(arr(0), arr(1), arr(2))
  }

  it should "not find points" in {
    val arr = Array(
      TestPoint(1, 1),
      TestPoint(9, 9)
    )
    val si = arr.foldLeft(SpatialIndex[TestPoint](2.0))(_ + _)
    si.findNeighbors(TestPoint(5.5, 5.5)) shouldBe empty
  }
} 
Example 126
Source File: PnpUtilSpec.scala    From pnp   with Apache License 2.0 5 votes vote down vote up
package org.allenai.pnp

import scala.collection.JavaConverters._

import org.scalatest._
import org.scalatest.Matchers

import com.jayantkrish.jklol.ccg.lambda.ExpressionParser

class PnpUtilSpec extends FlatSpec with Matchers {

  val TOLERANCE = 0.0001
  val parser = ExpressionParser.expression2

  def flip(p: Double): Pnp[Boolean] = {
    Pnp.chooseMap(Seq((true, p), (false, 1.0 - p)))
  }

  val bindings = Map[String, AnyRef](
    "true" -> true.asInstanceOf[AnyRef],
    "false" -> false.asInstanceOf[AnyRef],
    "coin" -> Pnp.chooseMap(Seq((true, 0.6), (false, 0.4))),
    "flipProb" -> 0.6.asInstanceOf[AnyRef],
    "flipProb2" -> 0.55.asInstanceOf[AnyRef],
    "flip" -> PnpUtil.wrap(flip _),
    "filter" -> PnpUtil.wrap(PnpUtil.filter _),
    "list" -> { x: Vector[AnyRef] => Pnp.value(x.toList) },
    "concat" -> PnpUtil.wrap2({ (x: String, y: String) => x ++ y })
  )

  def runTest[A](exprString: String, expected: Seq[(A, Double)]): Unit = {
    val expr = parser.parse(exprString)
    val pp = PnpUtil.lfToPnp(expr, bindings)

    val values = pp.beamSearch(100).executions.map(x => (x.value, x.prob))

    for ((value, expected) <- values.zip(expected)) {
      value._1 should be(expected._1)
      value._2 should be(expected._2 +- TOLERANCE)
    }
  }

  "PpUtil" should "correctly interpret constants" in {
    runTest("coin", Seq((true, 0.6), (false, 0.4)))
  }

  it should "correctly interpret string constants" in {
    runTest("\"foo\"", Seq(("foo", 1.0)))
  }

  it should "correctly interpret applications" in {
    runTest("(flip flipProb)", Seq((true, 0.6), (false, 0.4)))
  }

  it should "correctly interpret applications (2)" in {
    runTest("(list flipProb)", Seq((List(0.6), 1.0)))
  }

  it should "correctly interpret applications (3)" in {
    runTest("(concat \"foo\" \"bar\")", Seq(("foobar", 1.0)))
  }

  it should "correctly interpret filters" in {
    runTest(
      "(filter (lambda (x) (flip x)) (list flipProb flipProb2))",
      Seq((List(0.6, 0.55), 0.6 * 0.55), (List(0.6), 0.6 * 0.45),
        (List(0.55), 0.4 * 0.55), (List(), 0.4 * 0.45))
    )
  }
} 
Example 127
Source File: SemanticParserSpec.scala    From pnp   with Apache License 2.0 5 votes vote down vote up
package org.allenai.pnp.semparse

import scala.collection.JavaConverters._
import org.allenai.pnp.{Env, Pnp, PnpInferenceContext, PnpModel}

import org.scalatest.FlatSpec
import org.scalatest.Matchers
import com.jayantkrish.jklol.ccg.lambda.ExplicitTypeDeclaration
import com.jayantkrish.jklol.ccg.lambda.ExpressionParser
import com.jayantkrish.jklol.training.NullLogFunction
import com.jayantkrish.jklol.util.IndexedList
import edu.cmu.dynet._

class SemanticParserSpec extends FlatSpec with Matchers {
  
  Initialize.initialize()
 
  val dataStrings = List(
      ("state", "state:<e,t>"),
      ("city", "city:<e,t>"),
      ("biggest city", "(argmax:<<e,t>,e> city:<e,t>)"),
      ("texas", "texas:e"),
      ("major city", "(lambda ($0) (and:<t*,t> (city:<e,t> $0) (major:<e,t> $0)))")
  )

  val exprParser = ExpressionParser.expression2()
  val typeDeclaration = ExplicitTypeDeclaration.getDefault()

  val data = dataStrings.map(x => (x._1.split(" "), exprParser.parse(x._2)))

  val lexicon = ActionSpace.fromExpressions(data.map(_._2), typeDeclaration, true)
  val vocab = IndexedList.create[String]
  for (d <- data) {
    vocab.addAll(d._1.toList.asJava)
  }
  val model = PnpModel.init(true)
  val parser = SemanticParser.create(lexicon, vocab, model)

  "SemanticParser" should "generate application templates" in {
    println(lexicon.typeTemplateMap)
  }

  it should "decode expressions to template sequences" in {
    val e = exprParser.parse(
        "(argmax:<<e,t>,e> (lambda ($0) (and:<t*,t> (city:<e,t> $0) (major:<e,t> $0))))")
    // This method will throw an error if it can't decode the expression properly. 
    val templates = parser.generateActionSequence(e, EntityLinking(List()), typeDeclaration)
  }
  
  it should "condition on expressions" in {
    val label = exprParser.parse("(lambda ($0) (and:<t*,t> (city:<e,t> $0) (major:<e,t> $0)))")
    val entityLinking = EntityLinking(List())
    val oracle = parser.getLabelScore(label, entityLinking, typeDeclaration).get
    val exprs = parser.generateExpression(Array("major", "city").map(vocab.getIndex(_)),
        entityLinking)

    ComputationGraph.renew()
    val context = PnpInferenceContext.init(model).addExecutionScore(oracle)

    val results = exprs.beamSearch(1, -1, Env.init, context).executions
    results.length should be(1)
    results(0).value should equal(label)
  }
  
  it should "condition on multiple expressions" in {
    val label1 = exprParser.parse("(lambda ($0) (and:<t*,t> (city:<e,t> $0) (major:<e,t> $0)))")
    val label2 = exprParser.parse("(lambda ($0) (state:<e,t> $0))")
    val labels = Set(label1, label2)
    val entityLinking = EntityLinking(List())
    val oracle = parser.getMultiLabelScore(labels, entityLinking, typeDeclaration).get
    
    val exprs = parser.generateExpression(Array("major", "city").map(vocab.getIndex(_)),
        entityLinking)

    ComputationGraph.renew()
    val context = PnpInferenceContext.init(model).addExecutionScore(oracle)

    val results = exprs.beamSearch(2, -1, Env.init, context).executions
    results.length should be(2)
    results.map(_.value).toSet should equal(labels)
  }
} 
Example 128
Source File: GlobalLoglikelihoodTrainerSpec.scala    From pnp   with Apache License 2.0 5 votes vote down vote up
package org.allenai.pnp

import org.allenai.pnp.ExecutionScore.ExecutionScore
import org.scalatest.FlatSpec
import org.scalatest.Matchers

import com.jayantkrish.jklol.training.NullLogFunction

import edu.cmu.dynet._
import com.jayantkrish.jklol.util.IndexedList
import com.jayantkrish.jklol.training.NullLogFunction

class GlobalLoglikelihoodTrainerSpec extends FlatSpec with Matchers {
    
  Initialize.initialize()

  val TOLERANCE = 0.01



  "GlobalLoglikelihoodTrainer" should "train" in {
    val vocab = Array(0,1,2)

    val model = PnpModel.init(false)
    val startParam = model.addParameter("start", Dim(vocab.length))
    val transitionParam = model.addParameter("transition", Dim(vocab.length * vocab.length))

    def lm(k: Int): Pnp[Array[Int]] = {
      if (k == 1) {
        for {
          params <- Pnp.param("start")
          choice <- Pnp.choose(vocab, params, k - 1)
        } yield {
          Array(choice)
        }
      } else {
        for {
          rest <- lm(k - 1)
          previous = rest.last
          transition <- Pnp.param("transition")
          params = Expression.pickrange(
            transition, previous * vocab.length, (previous + 1) * vocab.length)
          choice <- Pnp.choose(vocab, params, k - 1)
        } yield {
          rest ++ Array(choice)
        }
      }
    }

    def makeOracle(label: Array[Int]): ExecutionScore = {
      new ExecutionScore() {
        def apply(tag: Any, choice: Any, env: Env): Double = {
          if (tag != null && tag.isInstanceOf[Int]) {
            val tagInt = tag.asInstanceOf[Int]
            if (tagInt >= 0 && tagInt < label.length) {
              if (choice == label(tagInt)) {
                0.0
              } else {
                Double.NegativeInfinity
              }
            } else {
              Double.NegativeInfinity
            }
          } else {
            0.0
          }
        }
      }
    }
    

    val examples = List(
        PnpExample(lm(3), lm(3), Env.init, makeOracle(Array(0,1,0))),
        PnpExample(lm(3), lm(3), Env.init, makeOracle(Array(0,1,2)))
    )

    val sgd = new SimpleSGDTrainer(model.model, 0.1f, 0.1f)
    val trainer = new GlobalLoglikelihoodTrainer(1000, 100, -1, model, sgd, new NullLogFunction())
    // val trainer = new BsoTrainer(100, 1, -1, model, sgd, new NullLogFunction())
    
    trainer.train(examples)
  }
} 
Example 129
Source File: BatchReadWrapperTest.scala    From aerospike-scala   with Apache License 2.0 5 votes vote down vote up
package ru.tinkoff.aerospikemacro.converters

import java.util

import com.aerospike.client.BatchRead
import org.scalatest.{FlatSpec, Matchers}
import ru.tinkoff.aerospike.dsl.batchread.BatchReadWrapper
import ru.tinkoff.aerospikemacro.domain.DBCredentials

import scala.collection.JavaConverters._


class BatchReadWrapperTest extends FlatSpec with Matchers {

  def getList(kws: List[BatchReadWrapper]): util.List[BatchRead] = {
    kws.view
      .map(_.apply)
      .toList
      .asJava
  }

  it should "create BatchReads of different Key types" in {

    val b1 = new BatchReadWrapper {
      val keyValue     = "str"
      val binNames     = Array("s1", "s2")
      implicit val dbc = DBCredentials("test", "test")
    }
    val b2 = new BatchReadWrapper {
      val keyValue     = 2
      implicit val dbc = DBCredentials("ns", "setName")
      val binNames     = Array("s3", "s4")
    }

    val brs = getList(List(b1, b2))
    brs.get(0).key.namespace shouldBe "test"
    brs.get(0).key.setName shouldBe "test"
    brs.get(0).binNames shouldBe Array("s1", "s2")

    brs.get(1).key.namespace shouldBe "ns"
    brs.get(1).key.setName shouldBe "setName"
    brs.get(1).binNames shouldBe Array("s3", "s4")

  }

} 
Example 130
Source File: PackageParserSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.deprecated.packageMocks.PackageParserSpec

import cool.graph.deprecated.packageMocks.PackageParser
import org.scalatest.{FlatSpec, Matchers}

class PackageParserSpec extends FlatSpec with Matchers {
  "PackageParser" should "work" in {
    val packageYaml =
      """
        |name: anonymous-auth-provider
        |
        |functions:
        |  authenticateAnonymousUser:
        |    schema: >
        |      type input {
        |        secret: String!
        |      }
        |      type output {
        |        token: String!
        |      }
        |    type: webhook
        |    url: https://some-webhook
        |
        |interfaces:
        |  AnonymousUser:
        |    schema: >
        |      interface AnonymousUser {
        |        secret: String
        |        isVerified: Boolean!
        |      }
        |
        |# This is configured by user when installing
        |install:
        |  - type: mutation
        |    binding: functions.authenticateAnonymousUser
        |    name: authenticateAnonymousCustomer
        |  - type: interface
        |    binding: interfaces.AnonymousUser
        |    onType: Customer
        |
      """.stripMargin

    val importedPackage = PackageParser.parse(packageYaml)

    println(importedPackage)
  }
} 
Example 131
Source File: JsonStringExtensionsSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.util

import cool.graph.util.json.Json._
import org.scalatest.{Matchers, WordSpec}
import spray.json._

class JsonStringExtensionsSpec extends WordSpec with Matchers {

  "pathAs" should {
    "get string" in {
      """{"a": "b"}""".parseJson.pathAsString("a") should be("b")
    }

    "get string nested in array" in {
      val json = """{"a": ["b", "c"]}""".parseJson
      json.pathAsString("a.[0]") should be("b")
      json.pathAsString("a.[1]") should be("c")
    }

    "get string nested in object in array" in {
      val json = """{"a": [{"b":"c"}, {"b":"d"}]}""".parseJson
      json.pathAsString("a.[0].b") should be("c")
      json.pathAsString("a.[1].b") should be("d")
    }
  }

} 
Example 132
Source File: LambdaLogsSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.functions.lambda

import cool.graph.shared.functions.lambda.LambdaFunctionEnvironment
import org.scalatest.{FlatSpec, Matchers}
import spray.json.{JsObject, JsString}

class LambdaLogsSpec extends FlatSpec with Matchers {
  "Logs parsing for lambda" should "return the correct aggregation of lines" in {
    val testString =
      """
        |START RequestId:	fb6c1b70-afef-11e7-b988-db72e0053f77	Version: $LATEST
        |2017-10-13T08:24:50.856Z	fb6c1b70-afef-11e7-b988-db72e0053f77	getting event {}
        |2017-10-13T08:24:50.856Z	fb6c1b70-afef-11e7-b988-db72e0053f77	requiring event => {
        |  return {
        |    data: {
        |      message: "msg"
        |    }
        |  }
        |}
        |2017-10-13T08:24:50.857Z	fb6c1b70-afef-11e7-b988-db72e0053f77	{"errorMessage":"Cannot read property 'name' of undefined","errorType":"TypeError","stackTrace":["module.exports.event (/var/task/src/hello2.js:6:47)","executeFunction (/var/task/src/hello2-lambda.js:14:19)","exports.handle (/var/task/src/hello2-lambda.js:9:3)"]}
        |END RequestId: fb6c1b70-afef-11e7-b988-db72e0053f77
        |REPORT RequestId: fb6c1b70-afef-11e7-b988-db72e0053f77	Duration: 1.10 ms	Billed Duration: 100 ms	Memory Size: 128 MB	Max Memory Used: 26 MB
      """.stripMargin

    val testString2 =
      """
        |2017-10-23T10:05:04.839Z	a426c566-b7d9-11e7-a701-7b78cbef51e9	20
        |2017-10-23T10:05:04.839Z	a426c566-b7d9-11e7-a701-7b78cbef51e9	null
        |2017-10-23T10:05:04.839Z	a426c566-b7d9-11e7-a701-7b78cbef51e9	{ big: 'OBJECT' }
      """.stripMargin

    val logs = LambdaFunctionEnvironment.parseLambdaLogs(testString)
    logs should contain(JsObject("2017-10-13T08:24:50.856Z" -> JsString("getting event {}")))
    logs should contain(
      JsObject("2017-10-13T08:24:50.856Z" -> JsString("requiring event => {\n  return {\n    data: {\n      message: \"msg\"\n    }\n  }\n}")))
    logs should contain(JsObject("2017-10-13T08:24:50.857Z" -> JsString(
      """{"errorMessage":"Cannot read property 'name' of undefined","errorType":"TypeError","stackTrace":["module.exports.event (/var/task/src/hello2.js:6:47)","executeFunction (/var/task/src/hello2-lambda.js:14:19)","exports.handle (/var/task/src/hello2-lambda.js:9:3)"]}""")))

    val logs2 = LambdaFunctionEnvironment.parseLambdaLogs(testString2)

    logs.length shouldEqual 3

    logs2.length shouldEqual 3
    logs2 should contain(JsObject("2017-10-23T10:05:04.839Z" -> JsString("20")))
    logs2 should contain(JsObject("2017-10-23T10:05:04.839Z" -> JsString("null")))
    logs2 should contain(JsObject("2017-10-23T10:05:04.839Z" -> JsString("{ big: 'OBJECT' }")))
  }
} 
Example 133
Source File: TransactionSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph

import cool.graph.client.database.DataResolver
import cool.graph.shared.database.Databases
import org.scalatest.{FlatSpec, Matchers}

import scala.concurrent.Future
import scala.util.{Failure, Random, Success, Try}

class TransactionSpec extends FlatSpec with Matchers {
  import cool.graph.util.AwaitUtils._

  import scala.language.reflectiveCalls

  val dataResolver: DataResolver = null // we don't need it for those tests

  "Transaction.verify" should "return a success if it contains no Mutactions at all" in {
    val transaction = Transaction(List.empty, dataResolver)
    val result      = await(transaction.verify())
    result should be(Success(MutactionVerificationSuccess()))
  }

  "Transaction.verify" should "return a success if all Mutactions succeed" in {
    val mutactions  = List(successfulMutaction, successfulMutaction, successfulMutaction)
    val transaction = Transaction(mutactions, dataResolver)
    val result      = await(transaction.verify())
    result should be(Success(MutactionVerificationSuccess()))
  }

  "Transaction.verify" should "return the failure of the first failed Mutaction" in {
    for (i <- 1 to 10) {
      val failedMutactions =
        Random.shuffle(List(failedMutaction("error 1"), failedMutaction("error 2"), failedMutaction("error 3")))
      val mutactions  = List(successfulMutaction) ++ failedMutactions
      val transaction = Transaction(mutactions, dataResolver)
      val result      = await(transaction.verify())
      result.isFailure should be(true)
      result.failed.get.getMessage should be(failedMutactions.head.errorMessage)
    }
  }

  def failedMutaction(errorMsg: String) = {
    new ClientSqlMutaction {
      val errorMessage = errorMsg

      override def execute = ???

      override def verify(): Future[Try[MutactionVerificationSuccess]] = {
        Future.successful(Failure(new Exception(errorMessage)))
      }
    }
  }

  def successfulMutaction = {
    new ClientSqlMutaction {
      override def execute = ???

      override def verify(): Future[Try[MutactionVerificationSuccess]] = {
        Future.successful(Success(MutactionVerificationSuccess()))
      }
    }
  }
} 
Example 134
Source File: GraphQlResponseSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.graphql

import org.scalatest.{FlatSpec, Matchers}

class GraphQlResponseSpec extends FlatSpec with Matchers {
  val exampleError = errorJson(code = 1111, message = "something did not workout")

  "isSuccess" should "return true if there are NO errors in the response body" in {
    val response = GraphQlResponse(status = 200, body = """ {"data": {"title":"My Todo"} } """)
    response.isSuccess should be(true)
  }

  "isSuccess" should "return false if there are errors in the response body" in {
    val response = GraphQlResponse(status = 200, body = s""" {"data": null, "errors": [$exampleError] } """)
    response.isSuccess should be(false)
  }

  "isFailure" should "return false if there are NO errors in the response body" in {
    val response = GraphQlResponse(status = 200, body = """ {"data": {"title":"My Todo"} } """)
    response.isFailure should be(false)
  }

  "isFailure" should "return true if there are errors in the response body" in {
    val response = GraphQlResponse(status = 200, body = s""" {"data": null, "errors": [$exampleError] } """)
    response.isFailure should be(true)
  }

  "firstError" should "return the first error in a failed response" in {
    val errorCode    = 2222
    val errorMessage = "this is the message of the error"
    val firstError   = errorJson(errorCode, errorMessage)
    val response     = GraphQlResponse(status = 200, body = s""" {"data": null, "errors": [$firstError, $exampleError] } """)

    val error = response.firstError
    error.code should equal(errorCode)
    error.message should equal(errorMessage)
  }

  def errorJson(code: Int, message: String): String = s"""{"code":$code, "message":"$message"}"""
} 
Example 135
Source File: GraphQlClientSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.graphql

import org.scalatest.{FlatSpec, Matchers}

import scala.concurrent.{Await, Awaitable}

class GraphQlClientSpec extends FlatSpec with Matchers {
  import cool.graph.stub.Import._
  import scala.concurrent.ExecutionContext.Implicits.global

  val stub = Request("POST", "/graphql-endpoint").stub(200, """{"data": {"id": "1234"}}""").ignoreBody

  "sendQuery" should "send the correct the correct JSON structure to the server" in {
    withStubServer(List(stub)).withArg { server =>
      val uri    = s"http://localhost:${server.port}${stub.path}"
      val client = GraphQlClient(uri)
      val query  = """ { mutation { createTodo(title:"the title"){id} }} """
      val result = await(client.sendQuery(query))

      val expectedBody = s"""{"query":"${escapeQuery(query)}"}"""
      server.lastRequest.body should equal(expectedBody)

      result.status should equal(stub.stubbedResponse.status)
      result.body should equal(stub.stubbedResponse.body)
    }
  }

  "sendQuery" should "send the specified headers to the server" in {
    withStubServer(List(stub)).withArg { server =>
      val uri     = s"http://localhost:${server.port}${stub.path}"
      val header1 = "Header1" -> "Header1Value"
      val header2 = "Header2" -> "Header2Value"
      val headers = Map(header1, header2)
      val client  = GraphQlClient(uri, headers)
      val query   = """ { mutation { createTodo(title:"the title"){id} }} """
      val result  = await(client.sendQuery(query))

      server.lastRequest.headers should contain(header1)
      server.lastRequest.headers should contain(header2)

      result.status should equal(stub.stubbedResponse.status)
      result.body should equal(stub.stubbedResponse.body)
    }
  }

  def escapeQuery(query: String) = query.replace("\"", "\\\"")

  def await[T](awaitable: Awaitable[T]): T = {
    import scala.concurrent.duration._
    Await.result(awaitable, 5.seconds)
  }
} 
Example 136
Source File: MetricsTagSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.metrics

import cool.graph.metrics.utils.{TestLiveMetricsManager, TestMetricsManager}
import org.scalatest.{FlatSpec, Matchers}

class MetricsTagSpec extends FlatSpec with Matchers {
  it should "have the correct metrics tags without extra custom tags" in {
    val manager = new TestMetricsManager()
    val counter = manager.defineCounter("testCounter")

    counter.constructMetricString(0, Seq("1", "2")) should equal("TestService.testCounter#env=test,instance=local,container=none")
  }

  it should "have the correct metrics tags with custom metrics set" in {
    val manager = new TestMetricsManager()
    val counter = manager.defineCounter("testCounter", CustomTag("testCustomTag1"), CustomTag("testCustomTag2"))

    counter.constructMetricString(0, Seq("1", "2")) should equal(
      "TestService.testCounter#env=test,instance=local,container=none,testCustomTag1=1,testCustomTag2=2")
  }

  it should "have the correct metrics tags for gauges" in {
    val manager = new TestMetricsManager()
    val gauge   = manager.defineGauge("testCounter", (CustomTag("testCustomTag1"), "1"), (CustomTag("testCustomTag2"), "2"))

    gauge.constructedMetricName should equal("TestService.testCounter#env=test,instance=local,container=none,testCustomTag1=1,testCustomTag2=2")
  }

  it should "have the correct metrics tags for timers" in {
    val manager = new TestMetricsManager()
    val timer   = manager.defineTimer("testTimer", CustomTag("projectId"))

    timer.constructMetricString(0, Seq("1234")) should equal("TestService.testTimer#env=test,instance=local,container=none,projectId=1234")
  }

  it should "ignore custom metric tags if the number of provided values doesn't match" in {
    val manager = new TestMetricsManager()
    val counter = manager.defineCounter("testCounter", CustomTag("testCustomTag1"), CustomTag("testCustomTag2"))

    counter.constructMetricString(0, Seq("1")) should equal("TestService.testCounter#env=test,instance=local,container=none")
  }

  it should "not record a custom tag value if the recorded value is above the specified threshold" in {
    val manager = new TestMetricsManager()
    val timer   = manager.defineTimer("testTimer", CustomTag("projectId", recordingThreshold = 100))

    timer.constructMetricString(90, Seq("1234")) should equal("TestService.testTimer#env=test,instance=local,container=none,projectId=-")
  }

  // Only run if you want some live metrics in librato
  ignore should "do some live metrics against librato" in {
    val manager = new TestLiveMetricsManager

    val counter       = manager.defineCounter("testCounter")
    val counterCustom = manager.defineCounter("testCounterWithTags", CustomTag("tag1"), CustomTag("tag2"))
    val gauge         = manager.defineGauge("testGauge")
    val gaugeCustom   = manager.defineGauge("testGaugeWithTags", (CustomTag("tag1"), "constantVal"))
    val timer         = manager.defineTimer("testTimer")
    val timerCustom   = manager.defineTimer("testTimerWithTags", CustomTag("tag1"))

    gauge.set(100)
    gaugeCustom.set(50)
    counter.inc()
    counterCustom.inc("val1", "val2")

    timer.time() {
      Thread.sleep(500)
    }

    timerCustom.time("val1") {
      Thread.sleep(800)
    }

    Thread.sleep(10000)
  }
} 
Example 137
Source File: FutureUtilSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.utils.future

import org.scalatest.{Matchers, WordSpec}
import cool.graph.utils.future.FutureUtils._
import org.scalatest.concurrent.ScalaFutures._
import org.scalatest.time.{Millis, Seconds, Span}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class FutureUtilSpec extends WordSpec with Matchers {
  val patienceConfig = PatienceConfig(timeout = Span(5, Seconds), interval = Span(5, Millis))

  "runSequentially" should {
    "run all given futures in sequence" in {

      val testList = List[() => Future[Long]](
        () => { Thread.sleep(500); Future.successful(System.currentTimeMillis()) },
        () => { Thread.sleep(250); Future.successful(System.currentTimeMillis()) },
        () => { Thread.sleep(100); Future.successful(System.currentTimeMillis()) }
      )

      val values: Seq[Long] = testList.runSequentially.futureValue(patienceConfig)
      (values, values.tail).zipped.forall((a, b) => a < b)
    }
  }

  "andThenFuture" should {

    "Should work correctly in error and success cases" in {
      val f1 = Future.successful(100)
      val f2 = Future.failed(new Exception("This is a test"))

      whenReady(
        f1.andThenFuture(
          handleSuccess = x => Future.successful("something"),
          handleFailure = e => Future.successful("another something")
        )) { res =>
        res should be(100)
      }

      whenReady(
        f2.andThenFuture(
            handleSuccess = (x: Int) => Future.successful("something"),
            handleFailure = e => Future.successful("another something")
          )
          .failed) { res =>
        res shouldBe a[Exception]
      }
    }
  }

} 
Example 138
Source File: JavascriptExecutorSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.javascriptEngine

import org.scalatest.concurrent.PatienceConfiguration.Timeout
import org.scalatest.{FlatSpec, Matchers}
import org.scalatest.concurrent.ScalaFutures._

import scala.concurrent.Future
import scala.concurrent.duration.Duration
import scala.concurrent.ExecutionContext.Implicits.global

class JavascriptExecutorSpec extends FlatSpec with Matchers {
  "engine" should "execute simple script" in {

    val before = System.currentTimeMillis()

    JavascriptExecutor.execute("""
        |console.log(42)
        |
        |console.log(43 + 2 + "lalala")
      """.stripMargin).futureValue(Timeout(Duration.Inf)) should be(Result("42\n45lalala\n", ""))

    println("1 (initial): " + (System.currentTimeMillis() - before))

    val before2 = System.currentTimeMillis()

    JavascriptExecutor.execute("""
                                 |console.log(42)
                                 |
                                 |console.log(43 + 2 + "lalala")
                               """.stripMargin).futureValue(Timeout(Duration.Inf)) should be(Result("42\n45lalala\n", ""))

    println("1 (warm): " + (System.currentTimeMillis() - before2))

    val before3 = System.currentTimeMillis()

    (1 to 10).foreach(_ => JavascriptExecutor.execute("""
                                                        |console.log(42)
                                                        |
                                                        |console.log(43 + 2 + "lalala")
                                                      """.stripMargin).futureValue(Timeout(Duration.Inf)) should be(Result("42\n45lalala\n", "")))

    println("10 (seq): " + (System.currentTimeMillis() - before3))

    val before4 = System.currentTimeMillis()

    Future.sequence((1 to 10).map(_ => JavascriptExecutor.execute("""
                                     |console.log(42)
                                     |
                                     |console.log(43 + 2 + "lalala")
                                   """.stripMargin))).futureValue(Timeout(Duration.Inf))

    println("10 (par): " + (System.currentTimeMillis() - before4))

    val before5 = System.currentTimeMillis()

    Future.sequence((1 to 100).map(_ => JavascriptExecutor.execute("""
                                                                    |console.log(42)
                                                                    |
                                                                    |console.log(43 + 2 + "lalala")
                                                                  """.stripMargin))).futureValue(Timeout(Duration.Inf))

    println("100 (par): " + (System.currentTimeMillis() - before5))

    val before6 = System.currentTimeMillis()

    Future
      .sequence((1 to 1000).map(_ => JavascriptExecutor.execute("""
                                                                     |console.log(42)
                                                                     |
                                                                     |console.log(43 + 2 + "lalala")
                                                                   """.stripMargin)))
      .futureValue(Timeout(Duration.Inf))

    println("1000 (par): " + (System.currentTimeMillis() - before6))

  }
} 
Example 139
Source File: CaffeineImplForSyncCacheSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.cache

import org.scalatest.{FlatSpec, Matchers}

class CaffeineImplForSyncCacheSpec extends FlatSpec with Matchers {

  def newCache = CaffeineImplForCache.lfu[String, String](initialCapacity = 100, maxCapacity = 100)

  "it" should "handle None results correctly" in {
    val cache  = newCache
    val result = cache.getOrUpdateOpt("key", () => None)
    result should be(None)
    cache.underlying.estimatedSize() should be(0)

    val foo     = Some("foo")
    val result2 = cache.getOrUpdateOpt("key", () => foo)
    result2 should be(foo)
    cache.underlying.estimatedSize() should be(1)
  }
} 
Example 140
Source File: PubSubRouterAltSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.messagebus.pubsub.inmemory

import akka.actor.Props
import akka.testkit.{TestActorRef, TestKit, TestProbe}
import cool.graph.akkautil.SingleThreadedActorSystem
import cool.graph.messagebus.pubsub.PubSubProtocol.{Publish, Subscribe, Unsubscribe}
import cool.graph.messagebus.pubsub.PubSubRouterAlt
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Matchers, WordSpecLike}

import scala.concurrent.duration._

class PubSubRouterAltSpec
    extends TestKit(SingleThreadedActorSystem("pubsub-router-spec"))
    with WordSpecLike
    with Matchers
    with BeforeAndAfterAll
    with BeforeAndAfterEach {
  override def afterAll = shutdown(verifySystemShutdown = true)

  "The PubSubRouter implementation" should {
    "subscribe subscribers correctly and route messages" in {
      val routerActor = TestActorRef(Props[PubSubRouterAlt])
      val router      = routerActor.underlyingActor.asInstanceOf[PubSubRouterAlt]
      val probe       = TestProbe()
      val topic       = "testTopic"

      routerActor ! Subscribe(topic, probe.ref)
      router.router.routees.length shouldEqual 1

      routerActor ! Publish(topic, "test")
      probe.expectMsg("test")
      probe.expectNoMsg(max = 1.second)

      routerActor ! Publish("testTopic2", "test2")
      probe.expectNoMsg(max = 1.second)
    }

    "unsubscribe subscribers correctly" in {
      val routerActor = TestActorRef(Props[PubSubRouterAlt])
      val router      = routerActor.underlyingActor.asInstanceOf[PubSubRouterAlt]
      val probe       = TestProbe()
      val topic       = "testTopic"

      routerActor ! Subscribe(topic, probe.ref)
      router.router.routees.length shouldEqual 1

      routerActor ! Unsubscribe(topic, probe.ref)
      router.router.routees.length shouldEqual 0

      routerActor ! Publish(topic, "test")
      probe.expectNoMsg(max = 1.second)
    }

    "handle actor terminations" in {
      val routerActor = TestActorRef(Props[PubSubRouterAlt])
      val router      = routerActor.underlyingActor.asInstanceOf[PubSubRouterAlt]
      val probe       = TestProbe()
      val topic       = "testTopic"

      routerActor ! Subscribe(topic, probe.ref)
      router.router.routees.length shouldEqual 1

      system.stop(probe.ref)
      Thread.sleep(50)
      router.router.routees.length shouldEqual 0
    }
  }
} 
Example 141
Source File: JeroMQSocketSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.communication.socket

import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.scalatest.{Matchers, BeforeAndAfter, OneInstancePerTest, FunSpec}
import org.scalatest.mock.MockitoSugar
import org.mockito.Mockito._
import org.zeromq.ZMsg

class JeroMQSocketSpec extends FunSpec with MockitoSugar
  with OneInstancePerTest with BeforeAndAfter with Matchers
{
  private val runnable = mock[ZeroMQSocketRunnable]
  @volatile private var running = true
  //  Mock the running of the runnable for the tests
  doAnswer(new Answer[Unit] {
    override def answer(invocation: InvocationOnMock): Unit = while (running) {
      Thread.sleep(1)
    }
  }).when(runnable).run()


  //  Mock the close of the runnable to shutdown
  doAnswer(new Answer[Unit] {
    override def answer(invocation: InvocationOnMock): Unit = running = false
  }).when(runnable).close()

  private val socket: JeroMQSocket = new JeroMQSocket(runnable)

  after {
    running = false
  }

  describe("JeroMQSocket") {
    describe("#send") {
      it("should offer a message to the runnable") {
        val message: String = "Some Message"
        val expected = ZMsg.newStringMsg(message)

        socket.send(message.getBytes)
        verify(runnable).offer(expected)
      }

      it("should thrown and AssertionError when socket is no longer alive") {
        socket.close()

        intercept[AssertionError] {
          socket.send("".getBytes)
        }
      }
    }

    describe("#close") {
      it("should close the runnable") {
        socket.close()

        verify(runnable).close()
      }

      it("should close the socket thread") {
        socket.close()

        socket.isAlive should be (false)
      }
    }

    describe("#isAlive") {
      it("should evaluate to true when the socket thread is alive") {
        socket.isAlive should be (true)
      }

      it("should evaluate to false when the socket thread is dead") {
        socket.close()

        socket.isAlive should be (false)
      }
    }
  }
} 
Example 142
Source File: HmacSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.communication.security

import java.security.NoSuchAlgorithmException

import org.scalatest.{FunSpec, Matchers}

class HmacSpec extends FunSpec with Matchers {
  describe("Hmac Object") {
    describe("#apply") {
      it("should fail if the algorithm is not available") {
        val nonEmptyKey = "FILL"
        val badAlgorithm = "One day, I want to be a real algorithm"

        intercept[NoSuchAlgorithmException] {
          val hmac = Hmac(nonEmptyKey, HmacAlgorithm(badAlgorithm))
        }
      }

      it("should succeed if the algorithm is available") {
        val goodAlgorithm = HmacAlgorithm.SHA256

        val hmac = Hmac("", goodAlgorithm)
        hmac.algorithm should be (goodAlgorithm)
      }
    }

    describe("#newMD5") {
      it("should produce an Hmac with the algorithm set to MD5") {
        val hmac = Hmac.newMD5("")

        hmac.algorithm should be(HmacAlgorithm.MD5)
      }
    }

    describe("#newSHA1") {
      it("should produce an Hmac with the algorithm set to SHA1") {
        val hmac = Hmac.newSHA1("")

        hmac.algorithm should be(HmacAlgorithm.SHA1)
      }
    }

    describe("#newSHA256") {
      it("should produce an Hmac with the algorithm set to SHA256") {
        val hmac = Hmac.newSHA256("")

        hmac.algorithm should be(HmacAlgorithm.SHA256)
      }
    }
  }

  describe("Hmac Class") {
    describe("#apply") {
      // TODO: This should really be mocked since we don't care about the
      //       results, just the send/receive to the underlying implementation
      it("should produce the correct digest") {
        val key = "12345"
        val message = "This is a test of SHA256 in action."
        val expected =
          "e60e1494b0650875fa5eb8384e357d731358c3559c1f223d69dc43ffe13570bc"
        val hmac = new Hmac(key, HmacAlgorithm.SHA256)

        hmac(message) should be(expected)
      }
    }

    describe("#digest") {
      // TODO: This should really be mocked since we don't care about the
      //       results, just the send/receive to the underlying implementation
      it("should produce the correct digest") {
        val key = "12345"
        val message = List("This is a test of SHA256 in action.")
        val expected =
          "e60e1494b0650875fa5eb8384e357d731358c3559c1f223d69dc43ffe13570bc"
        val hmac = new Hmac(key, HmacAlgorithm.SHA256)

        hmac.digest(message) should be(expected)
      }
    }
  }

  describe("HmacAlgorithm") {
    describe("#apply") {
      it("should return a value wrapping the string input") {
        val resultTypeName = HmacAlgorithm("").getClass.getName

        // NOTE: Test written this way since unable to check directly against
        //       the Scala enumeration value
        resultTypeName should be ("scala.Enumeration$Val")
      }
    }
  }
} 
Example 143
Source File: OrderedSupportSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.communication.utils

import akka.actor._
import akka.testkit.{ImplicitSender, TestKit}
import org.scalatest.mock.MockitoSugar
import org.scalatest.{FunSpecLike, Matchers}

case class OrderedType()
case class NotOrderedType()
case class FinishProcessingMessage()
case class ReceiveMessageCount(count: Int)

class TestOrderedSupport extends OrderedSupport {
  var receivedCounter = 0
  override def orderedTypes(): Seq[Class[_]] = Seq(classOf[OrderedType])

  override def receive: Receive = {
    case OrderedType() =>
      startProcessing()
      receivedCounter = receivedCounter + 1
      sender ! ReceiveMessageCount(receivedCounter)
    case NotOrderedType() =>
      receivedCounter = receivedCounter + 1
      sender ! ReceiveMessageCount(receivedCounter)
    case FinishProcessingMessage() =>
      finishedProcessing()
  }
}

class OrderedSupportSpec extends TestKit(ActorSystem("OrderedSupportSystem"))
  with ImplicitSender with Matchers with FunSpecLike
  with MockitoSugar  {

  describe("OrderedSupport"){
    describe("#waiting"){
      it("should wait for types defined in orderedTypes"){
      val testOrderedSupport = system.actorOf(Props[TestOrderedSupport])

        // Send a message having a type in orderedTypes
        // Starts processing and is handled with receive()
        testOrderedSupport ! new OrderedType
        // This message should be handled with waiting()
        testOrderedSupport ! new OrderedType

        // Verify receive was not called for the second OrderedType
        expectMsg(ReceiveMessageCount(1))

      }

      it("should process types not defined in orderedTypes"){
        val testOrderedSupport = system.actorOf(Props[TestOrderedSupport])

        // Send a message that starts the processing
        testOrderedSupport ! new OrderedType

        // Send a message having a type not in orderedTypes
        testOrderedSupport ! new NotOrderedType

        // Verify receive did get called for NotOrderedType
        expectMsg(ReceiveMessageCount(1))
        expectMsg(ReceiveMessageCount(2))
      }
    }
    describe("#finishedProcessing"){
      it("should switch actor to receive method"){
        val testOrderedSupport = system.actorOf(Props[TestOrderedSupport])
        
        //  Switch actor to waiting mode
        testOrderedSupport ! new OrderedType

        //  Call finishedProcessing
        testOrderedSupport ! new FinishProcessingMessage

        //  Sending something that would match in receive, and is in orderedTypes
        testOrderedSupport ! new OrderedType

        expectMsg(ReceiveMessageCount(1))
        expectMsg(ReceiveMessageCount(2))

      }

    }
  }

} 
Example 144
Source File: JVMReprSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package integration.interpreter.scala

import java.util
import java.io.ByteArrayOutputStream
import jupyter.{Displayer, Displayers, MIMETypes}
import org.apache.toree.global.StreamState
import org.apache.toree.interpreter.Interpreter
import org.apache.toree.interpreter.Results.Success
import org.apache.toree.kernel.api.{DisplayMethodsLike, KernelLike}
import org.apache.toree.kernel.interpreter.scala.ScalaInterpreter
import org.mockito.Mockito.doReturn
import org.scalatest.{BeforeAndAfter, FunSpec, Matchers}
import org.scalatest.mock.MockitoSugar
import scala.util.Random

class JVMReprSpec extends FunSpec with Matchers with MockitoSugar with BeforeAndAfter {

  private val outputResult = new ByteArrayOutputStream()
  private var interpreter: Interpreter = _

  before {
    val mockKernel = mock[KernelLike]
    val mockDisplayMethods = mock[DisplayMethodsLike]
    doReturn(mockDisplayMethods).when(mockKernel).display

    interpreter = new ScalaInterpreter().init(mockKernel)

    StreamState.setStreams(outputStream = outputResult)
  }

  after {
    interpreter.stop()
    outputResult.reset()
  }

  describe("ScalaInterpreter") {
    describe("#interpret") {
      it("should display Scala int as a text representation") {
        val (result, outputOrError) = interpreter.interpret("val a = 12")

        result should be(Success)
        outputOrError.isLeft should be(true)
        outputOrError.left.get should be(Map(MIMETypes.TEXT -> "12"))
      }

      it("should display Scala Some(str) as a text representation") {
        val (result, outputOrError) = interpreter.interpret("""val a = Some("str")""")

        result should be(Success)
        outputOrError.isLeft should be(true)
        outputOrError.left.get should be(Map(MIMETypes.TEXT -> "Some(str)"))
      }

      ignore("should use the Jupyter REPR API for display representation") {
        Displayers.register(classOf[DisplayerTest], new Displayer[DisplayerTest] {
          override def display(t: DisplayerTest): util.Map[String, String] = {
            val output = new util.HashMap[String, String]()
            output.put("text/plain", s"test object: ${t.id}")
            output.put("application/json", s"""{"id": ${t.id}""")
            output
          }
        })

        val inst = DisplayerTest()
        interpreter.bind("inst", classOf[DisplayerTest].getName, inst, List())

        val (result, outputOrError) = interpreter.interpret("""inst""")

        result should be(Success)
        outputOrError.isLeft should be(true)
        outputOrError.left.get should be(Map(
          MIMETypes.TEXT -> s"test object: ${inst.id}",
          "application/json" -> s"""{"id": ${inst.id}"""
        ))
      }
    }
  }
}

case class DisplayerTest(id: Long = new Random().nextLong()) 
Example 145
Source File: ClientCommManagerSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.comm

import org.apache.toree.kernel.protocol.v5
import org.apache.toree.kernel.protocol.v5._
import org.apache.toree.kernel.protocol.v5.client.ActorLoader
import org.apache.toree.kernel.protocol.v5.content.CommContent
import org.scalatest.mock.MockitoSugar
import org.mockito.Mockito._
import org.mockito.Matchers._
import org.scalatest.{BeforeAndAfter, FunSpec, Matchers}

class ClientCommManagerSpec extends FunSpec with Matchers with BeforeAndAfter
  with MockitoSugar
{
  private val TestTargetName = "some target"

  private var mockActorLoader: ActorLoader = _
  private var mockKMBuilder: KMBuilder = _
  private var mockCommRegistrar: CommRegistrar = _
  private var clientCommManager: ClientCommManager = _

  private var generatedCommWriter: CommWriter = _

  before {
    mockActorLoader = mock[ActorLoader]
    mockKMBuilder = mock[KMBuilder]
    mockCommRegistrar = mock[CommRegistrar]

    clientCommManager = new ClientCommManager(
      mockActorLoader,
      mockKMBuilder,
      mockCommRegistrar
    ) {
      override protected def newCommWriter(commId: UUID): CommWriter = {
        val commWriter = super.newCommWriter(commId)

        generatedCommWriter = commWriter

        val spyCommWriter = spy(commWriter)
        doNothing().when(spyCommWriter)
          .sendCommKernelMessage(any[KernelMessageContent with CommContent])

        spyCommWriter
      }
    }
  }

  describe("ClientCommManager") {
    describe("#open") {
      it("should return a wrapped instance of ClientCommWriter") {
        clientCommManager.open(TestTargetName, v5.MsgData.Empty)

        // Exposed hackishly for testing
        generatedCommWriter shouldBe a [ClientCommWriter]
      }
    }
  }
} 
Example 146
Source File: ShellClientSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.client.socket

import java.util.UUID

import akka.actor.{ActorRef, ActorSystem, Props}
import akka.testkit.{TestProbe, ImplicitSender, TestKit}
import org.apache.toree.communication.ZMQMessage
import org.apache.toree.communication.security.SecurityActorType
import org.apache.toree.kernel.protocol.v5._
import org.apache.toree.kernel.protocol.v5.client.ActorLoader
import org.apache.toree.kernel.protocol.v5.content.ExecuteRequest
import org.scalatest.mock.MockitoSugar
import org.scalatest.{Matchers, FunSpecLike}
import org.mockito.Mockito._
import org.mockito.Matchers._
import play.api.libs.json.Json

class ShellClientSpec extends TestKit(ActorSystem("ShellActorSpec"))
  with ImplicitSender with FunSpecLike with Matchers with MockitoSugar {
  private val SignatureEnabled = true

  describe("ShellClientActor") {
    val socketFactory = mock[SocketFactory]
    val mockActorLoader = mock[ActorLoader]
    val probe : TestProbe = TestProbe()
    when(socketFactory.ShellClient(
      any(classOf[ActorSystem]), any(classOf[ActorRef])
    )).thenReturn(probe.ref)

    val signatureManagerProbe = TestProbe()
    doReturn(system.actorSelection(signatureManagerProbe.ref.path.toString))
      .when(mockActorLoader).load(SecurityActorType.SignatureManager)

    val shellClient = system.actorOf(Props(
      classOf[ShellClient], socketFactory, mockActorLoader, SignatureEnabled
    ))

    describe("send execute request") {
      it("should send execute request") {
        val request = ExecuteRequest(
          "foo", false, true, UserExpressions(), true
        )
        val header = Header(
          UUID.randomUUID().toString, "spark",
          UUID.randomUUID().toString, MessageType.Incoming.ExecuteRequest.toString,
          "5.0"
        )
        val kernelMessage = KernelMessage(
          Seq[Array[Byte]](), "",
          header, HeaderBuilder.empty,
          Metadata(), Json.toJson(request).toString
        )
        shellClient ! kernelMessage

        // Echo back the kernel message sent to have a signature injected
        signatureManagerProbe.expectMsgClass(classOf[KernelMessage])
        signatureManagerProbe.reply(kernelMessage)

        probe.expectMsgClass(classOf[ZMQMessage])
      }
    }
  }
} 
Example 147
Source File: HeartbeatClientSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.client.socket

import akka.actor.{ActorRef, ActorSystem, Props}
import akka.testkit.{TestProbe, ImplicitSender, TestKit}
import org.apache.toree.communication.ZMQMessage
import org.apache.toree.kernel.protocol.v5.client.ActorLoader
import org.scalatest.mock.MockitoSugar
import org.scalatest.{Matchers, FunSpecLike}
import org.mockito.Matchers._
import org.mockito.Mockito._

class HeartbeatClientSpec extends TestKit(ActorSystem("HeartbeatActorSpec"))
  with ImplicitSender with FunSpecLike with Matchers with MockitoSugar {

  describe("HeartbeatClientActor") {
    val socketFactory = mock[SocketFactory]
    val mockActorLoader = mock[ActorLoader]
    val probe : TestProbe = TestProbe()
    when(socketFactory.HeartbeatClient(any(classOf[ActorSystem]), any(classOf[ActorRef]))).thenReturn(probe.ref)

    val heartbeatClient = system.actorOf(Props(
      classOf[HeartbeatClient], socketFactory, mockActorLoader, true
    ))

    describe("send heartbeat") {
      it("should send ping ZMQMessage") {
        heartbeatClient ! HeartbeatMessage
        probe.expectMsgClass(classOf[ZMQMessage])
      }
    }
  }
} 
Example 148
Source File: SparkKernelClientSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.client

import akka.actor.ActorSystem
import akka.testkit.{TestKit, TestProbe}
import org.apache.toree.comm.{CommCallbacks, CommStorage, CommRegistrar}
import org.apache.toree.kernel.protocol.v5
import org.apache.toree.kernel.protocol.v5._
import org.apache.toree.kernel.protocol.v5.client.execution.ExecuteRequestTuple
import scala.concurrent.duration._
import org.mockito.Mockito._
import org.mockito.Matchers.{eq => mockEq, _}
import org.scalatest.mock.MockitoSugar
import org.scalatest.{BeforeAndAfter, FunSpecLike, Matchers}

class SparkKernelClientSpec
  extends TestKit(ActorSystem("SparkKernelClientActorSystem"))
  with Matchers with MockitoSugar with FunSpecLike with BeforeAndAfter
{
  private val TestTargetName = "some target"

  private var mockActorLoader: ActorLoader = _
  private var mockCommRegistrar: CommRegistrar = _
  private var sparkKernelClient: SparkKernelClient = _
  private var executeRequestProbe: TestProbe = _
  private var shellClientProbe: TestProbe = _

  before {
    mockActorLoader = mock[ActorLoader]
    mockCommRegistrar = mock[CommRegistrar]

    executeRequestProbe = TestProbe()
    when(mockActorLoader.load(MessageType.Incoming.ExecuteRequest))
      .thenReturn(system.actorSelection(executeRequestProbe.ref.path.toString))

    shellClientProbe = TestProbe()
    when(mockActorLoader.load(SocketType.ShellClient))
      .thenReturn(system.actorSelection(shellClientProbe.ref.path.toString))

    sparkKernelClient = new SparkKernelClient(
      mockActorLoader, system, mockCommRegistrar)
  }

  describe("SparkKernelClient") {
    describe("#execute") {
      it("should send an ExecuteRequest message") {
        val func = (x: Any) => println(x)
        sparkKernelClient.execute("val foo = 2")
        executeRequestProbe.expectMsgClass(classOf[ExecuteRequestTuple])
      }
    }
  }
} 
Example 149
Source File: ExecutionCounterSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.global

import org.scalatest.{FunSpec, Matchers}

class ExecutionCounterSpec extends FunSpec with Matchers {
  describe("ExecutionCounter") {
    describe("#increment( String )"){
      it("should increment value when key is not present"){
        ExecutionCounter incr "foo" should be(1)
      }
      it("should increment value for key when it is present"){
        ExecutionCounter incr "bar" should be(1)
        ExecutionCounter incr "bar" should be(2)
      }

    }
  }
} 
Example 150
Source File: DataFrameConverterSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.utils

import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.{DataFrame, Row}
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{BeforeAndAfterAll, FunSpec, Matchers}
import play.api.libs.json.{JsArray, JsString, Json}
import test.utils.SparkContextProvider

import scala.collection.mutable

class DataFrameConverterSpec extends FunSpec with MockitoSugar with Matchers with BeforeAndAfterAll {

  lazy val spark = SparkContextProvider.sparkContext

  override protected def afterAll(): Unit = {
    spark.stop()
    super.afterAll()
  }

  val dataFrameConverter: DataFrameConverter = new DataFrameConverter
  val mockDataFrame = mock[DataFrame]
  val mockRdd = spark.parallelize(Seq(Row(new mutable.WrappedArray.ofRef(Array("test1", "test2")), 2, null)))
  val mockStruct = mock[StructType]
  val columns = Seq("foo", "bar").toArray

  doReturn(mockStruct).when(mockDataFrame).schema
  doReturn(columns).when(mockStruct).fieldNames
  doReturn(mockRdd).when(mockDataFrame).rdd

  describe("DataFrameConverter") {
    describe("#convert") {
      it("should convert to a valid JSON object") {
        val someJson = dataFrameConverter.convert(mockDataFrame, "json")
        val jsValue = Json.parse(someJson.get)
        jsValue \ "columns" should be (JsArray(Seq(JsString("foo"), JsString("bar"))))
        jsValue \ "rows" should be (JsArray(Seq(
          JsArray(Seq(JsString("[test1, test2]"), JsString("2"), JsString("null")))
        )))
      }
      it("should convert to csv") {
        val csv = dataFrameConverter.convert(mockDataFrame, "csv").get
        val values = csv.split("\n")
        values(0) shouldBe "foo,bar"
        values(1) shouldBe "[test1, test2],2,null"
      }
      it("should convert to html") {
        val html = dataFrameConverter.convert(mockDataFrame, "html").get
        html.contains("<th>foo</th>") should be(true)
        html.contains("<th>bar</th>") should be(true)
        html.contains("<td>[test1, test2]</td>") should be(true)
        html.contains("<td>2</td>") should be(true)
        html.contains("<td>null</td>") should be(true)
      }
      it("should convert limit the selection") {
        val someLimited = dataFrameConverter.convert(mockDataFrame, "csv", 1)
        val limitedLines = someLimited.get.split("\n")
        limitedLines.length should be(2)
      }
      it("should return a Failure for invalid types") {
        val result = dataFrameConverter.convert(mockDataFrame, "Invalid Type")
        result.isFailure should be(true)
      }
    }
  }
} 
Example 151
Source File: KernelCommManagerSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.comm

import org.apache.toree.kernel.protocol.v5
import org.apache.toree.kernel.protocol.v5._
import org.apache.toree.kernel.protocol.v5.content.CommContent
import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader
import org.scalatest.mock.MockitoSugar
import org.mockito.Mockito._
import org.mockito.Matchers._
import org.scalatest.{BeforeAndAfter, FunSpec, Matchers}

class KernelCommManagerSpec extends FunSpec with Matchers with BeforeAndAfter
  with MockitoSugar
{
  private val TestTargetName = "some target"

  private var mockActorLoader: ActorLoader = _
  private var mockKMBuilder: KMBuilder = _
  private var mockCommRegistrar: CommRegistrar = _
  private var kernelCommManager: KernelCommManager = _

  private var generatedCommWriter: CommWriter = _

  before {
    mockActorLoader = mock[ActorLoader]
    mockKMBuilder = mock[KMBuilder]
    mockCommRegistrar = mock[CommRegistrar]

    kernelCommManager = new KernelCommManager(
      mockActorLoader,
      mockKMBuilder,
      mockCommRegistrar
    ) {
      override protected def newCommWriter(commId: UUID): CommWriter = {
        val commWriter = super.newCommWriter(commId)

        generatedCommWriter = commWriter

        val spyCommWriter = spy(commWriter)
        doNothing().when(spyCommWriter)
          .sendCommKernelMessage(any[KernelMessageContent with CommContent])

        spyCommWriter
      }
    }
  }

  describe("KernelCommManager") {
    describe("#open") {
      it("should return a wrapped instance of KernelCommWriter") {
        kernelCommManager.open(TestTargetName, v5.MsgData.Empty)

        // Exposed hackishly for testing
        generatedCommWriter shouldBe a [KernelCommWriter]
      }
    }
  }
} 
Example 152
Source File: LSMagicSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.magic.builtin

import java.io.OutputStream
import java.net.URL

import org.apache.toree.interpreter.Interpreter
import org.apache.toree.magic.dependencies.{IncludeOutputStream, IncludeInterpreter}
import org.apache.toree.magic.{CellMagic, LineMagic}
import org.apache.spark.SparkContext
import org.scalatest.{Matchers, FunSpec}
import org.scalatest.mock.MockitoSugar

import org.mockito.Mockito._
import org.mockito.Matchers._

class TestLSMagic(sc: SparkContext, intp: Interpreter, os: OutputStream)
  extends LSMagic
  with IncludeInterpreter
  with IncludeOutputStream
  {
    override val interpreter: Interpreter = intp
    override val outputStream: OutputStream = os
  }

class LSMagicSpec extends FunSpec with Matchers with MockitoSugar {
  describe("LSMagic") {

    describe("#execute") {
      it("should call println with a magics message") {
        val lsm = spy(new TestLSMagic(
          mock[SparkContext], mock[Interpreter], mock[OutputStream])
        )
        val classList = new BuiltinLoader().loadClasses()
        lsm.execute("")
        verify(lsm).magicNames("%", classOf[LineMagic], classList)
        verify(lsm).magicNames("%%", classOf[CellMagic], classList)
      }
    }

    describe("#magicNames") {
      it("should filter classnames by interface") {
        val prefix = "%"
        val interface = classOf[LineMagic]
        val classes : List[Class[_]] = List(classOf[LSMagic], classOf[Integer])
        val lsm = new TestLSMagic(
          mock[SparkContext], mock[Interpreter], mock[OutputStream])
        lsm.magicNames(prefix, interface, classes).length should be(1)
      }
      it("should prepend prefix to each name"){
        val prefix = "%"
        val className = classOf[LSMagic].getSimpleName
        val interface = classOf[LineMagic]
        val expected = s"${prefix}${className}"
        val classes : List[Class[_]] = List(classOf[LSMagic], classOf[Integer])
        val lsm = new TestLSMagic(
          mock[SparkContext], mock[Interpreter], mock[OutputStream])
        lsm.magicNames(prefix, interface, classes) should be(List(expected))
      }
    }

  }

} 
Example 153
Source File: BuiltinLoaderSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.magic.builtin

import org.scalatest.mock.MockitoSugar
import org.scalatest.{Matchers, FunSpec}

class BuiltinLoaderSpec extends FunSpec with Matchers with MockitoSugar {
  describe("BuiltinLoader") {
    describe("#getClasses") {
      it("should return classes in a package") {
        val pkg = this.getClass.getPackage.getName
        val classes = new BuiltinLoader().getClasses(pkg)
        classes.size shouldNot be(0)
      }
    }

    describe("#loadClasses") {
      it("should return class objects for classes in a package") {
        val pkg = this.getClass.getPackage.getName
        val classes = new BuiltinLoader().loadClasses(pkg).toList
        classes.contains(this.getClass) should be (true)
      }
    }
  }
} 
Example 154
Source File: HtmlSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.magic.builtin

import org.apache.toree.kernel.protocol.v5.MIMEType
import org.apache.toree.magic.CellMagicOutput
import org.scalatest.mock.MockitoSugar
import org.scalatest.{FunSpec, Matchers}

class HtmlSpec extends FunSpec with Matchers with MockitoSugar {
  describe("Html"){
    describe("#execute") {
      it("should return the entire cell's contents with the MIME type of " +
         "text/html") {
        val htmlMagic = new Html

        val code = "some code on a line\nanother line"
        val expected = CellMagicOutput(MIMEType.TextHtml -> code)
        htmlMagic.execute(code) should be (expected)
      }
    }
  }
} 
Example 155
Source File: JavaScriptSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.magic.builtin

import org.scalatest.mock.MockitoSugar
import org.scalatest.{FunSpec, Matchers}
import org.apache.toree.magic.CellMagicOutput
import org.apache.toree.kernel.protocol.v5.MIMEType

class JavaScriptSpec extends FunSpec with Matchers with MockitoSugar {
  describe("JavaScript"){
    describe("#execute") {
      it("should return the entire cell's contents with the MIME type of text/javascript") {
        val javaScriptMagic = new JavaScript

        val code = "some code on a line\nmore code on another line"
        val expected = CellMagicOutput(MIMEType.ApplicationJavaScript -> code)
        javaScriptMagic.execute(code) should be (expected)
      }
    }
  }
} 
Example 156
Source File: CodeCompleteHandlerSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.handler

import akka.actor._
import akka.testkit.{TestProbe, ImplicitSender, TestKit}
import org.apache.toree.Main
import org.apache.toree.kernel.protocol.v5._
import org.apache.toree.kernel.protocol.v5.content.CompleteRequest
import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader
import org.apache.toree.kernel.protocol.v5Test._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{FunSpecLike, BeforeAndAfter, Matchers}
import org.mockito.Mockito._
import test.utils.MaxAkkaTestTimeout

class CodeCompleteHandlerSpec extends TestKit(
  ActorSystem("CodeCompleteHandlerSpec", None, Some(Main.getClass.getClassLoader))
) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar
  with BeforeAndAfter {

  var actorLoader: ActorLoader = _
  var handlerActor: ActorRef = _
  var kernelMessageRelayProbe: TestProbe = _
  var interpreterProbe: TestProbe = _
  var statusDispatchProbe: TestProbe = _

  before {
    actorLoader = mock[ActorLoader]

    handlerActor = system.actorOf(Props(classOf[CodeCompleteHandler], actorLoader))

    kernelMessageRelayProbe = TestProbe()
    when(actorLoader.load(SystemActorType.KernelMessageRelay))
      .thenReturn(system.actorSelection(kernelMessageRelayProbe.ref.path.toString))

    interpreterProbe = new TestProbe(system)
    when(actorLoader.load(SystemActorType.Interpreter))
      .thenReturn(system.actorSelection(interpreterProbe.ref.path.toString))

    statusDispatchProbe = new TestProbe(system)
    when(actorLoader.load(SystemActorType.StatusDispatch))
      .thenReturn(system.actorSelection(statusDispatchProbe.ref.path.toString))
  }

  def replyToHandlerWithOkAndResult() = {
    val expectedClass = classOf[CompleteRequest]
    interpreterProbe.expectMsgClass(expectedClass)
    interpreterProbe.reply((0, List[String]()))
  }

  def replyToHandlerWithOkAndBadResult() = {
    val expectedClass = classOf[CompleteRequest]
    interpreterProbe.expectMsgClass(expectedClass)
    interpreterProbe.reply("hello")
  }

  describe("CodeCompleteHandler (ActorLoader)") {
    it("should send a CompleteRequest") {
      handlerActor ! MockCompleteRequestKernelMessage
      replyToHandlerWithOkAndResult()
      kernelMessageRelayProbe.fishForMessage(MaxAkkaTestTimeout) {
        case KernelMessage(_, _, header, _, _, _) =>
          header.msg_type == MessageType.Outgoing.CompleteReply.toString
      }
    }

    it("should throw an error for bad JSON") {
      handlerActor ! MockKernelMessageWithBadJSON
      var result = false
      try {
        replyToHandlerWithOkAndResult()
      }
      catch {
        case t: Throwable => result = true
      }
      result should be (true)
    }

    it("should throw an error for bad code completion") {
      handlerActor ! MockCompleteRequestKernelMessage
      try {
        replyToHandlerWithOkAndBadResult()
      }
      catch {
        case error: Exception => error.getMessage should be ("Parse error in CodeCompleteHandler")
      }
    }

    it("should send an idle message") {
      handlerActor ! MockCompleteRequestKernelMessage
      replyToHandlerWithOkAndResult()
      statusDispatchProbe.fishForMessage(MaxAkkaTestTimeout) {
        case Tuple2(status, _) =>
          status == KernelStatusType.Idle
      }
    }
  }
} 
Example 157
Source File: GenericSocketMessageHandlerSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.handler

import akka.actor.{ActorSystem, Props, ActorRef, ActorSelection}
import akka.testkit.{ImplicitSender, TestKit, TestProbe}
import org.apache.toree.kernel.protocol.v5._
import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader
import org.apache.toree.kernel.protocol.v5Test._
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{Matchers, FunSpecLike}
import test.utils.MaxAkkaTestTimeout

class GenericSocketMessageHandlerSpec extends TestKit(
  ActorSystem(
    "GenericSocketMessageHandlerSystem",
    None,
    Some(org.apache.toree.Main.getClass.getClassLoader)
  ))
with ImplicitSender with FunSpecLike with Matchers with MockitoSugar {
  describe("GenericSocketMessageHandler( ActorLoader, SocketType )") {
    //  Create a mock ActorLoader for the Relay we are going to test
    val actorLoader: ActorLoader = mock[ActorLoader]

    //  Create a probe for the ActorSelection that the ActorLoader will return
    val selectionProbe: TestProbe = TestProbe()
    val selection: ActorSelection = system.actorSelection(selectionProbe.ref.path.toString)
    when(actorLoader.load(SocketType.Control)).thenReturn(selection)

    //  The Relay we are going to be testing against
    val genericHandler: ActorRef = system.actorOf(
      Props(classOf[GenericSocketMessageHandler], actorLoader, SocketType.Control)
    )

    describe("#receive( KernelMessage )") {
      genericHandler ! MockKernelMessage

      it("should send the message to the selected actor"){
        selectionProbe.expectMsg(MaxAkkaTestTimeout, MockKernelMessage)
      }
    }
  }
} 
Example 158
Source File: KernelInfoRequestHandlerSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.handler
import akka.actor.{ActorSelection, ActorSystem, Props}
import akka.testkit.{ImplicitSender, TestKit, TestProbe}
import org.apache.toree.Main
import org.apache.toree.kernel.protocol.v5.content.KernelInfoReply
import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader
import org.apache.toree.kernel.protocol.v5._
import org.mockito.AdditionalMatchers.{not => mockNot}
import org.mockito.Matchers.{eq => mockEq}
import com.typesafe.config.ConfigFactory
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{FunSpecLike, Matchers}
import play.api.libs.json.Json
import test.utils.MaxAkkaTestTimeout

object KernelInfoRequestHandlerSpec {
  val config = """
    akka {
      loglevel = "WARNING"
    }"""
}

class KernelInfoRequestHandlerSpec extends TestKit(
  ActorSystem("KernelInfoRequestHandlerSpec",
    ConfigFactory.parseString(KernelInfoRequestHandlerSpec.config),
    Main.getClass.getClassLoader)
) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar {
  val actorLoader: ActorLoader =  mock[ActorLoader]
  val actor = system.actorOf(Props(classOf[KernelInfoRequestHandler], actorLoader, LanguageInfo("test", "1.0.0", Some(".test"))))

  val relayProbe : TestProbe = TestProbe()
  val relaySelection : ActorSelection =
    system.actorSelection(relayProbe.ref.path)
  when(actorLoader.load(SystemActorType.KernelMessageRelay))
    .thenReturn(relaySelection)
  when(actorLoader.load(mockNot(mockEq(SystemActorType.KernelMessageRelay))))
    .thenReturn(system.actorSelection(""))

  val header = Header("","","","","")
  val kernelMessage = new KernelMessage(
    Seq[Array[Byte]](), "test message", header, header, Metadata(), "{}"
  )

  describe("Kernel Info Request Handler") {
    it("should return a KernelMessage containing kernel info response") {
      actor ! kernelMessage
      val reply = relayProbe.receiveOne(MaxAkkaTestTimeout).asInstanceOf[KernelMessage]
      val kernelInfo = Json.parse(reply.contentString).as[KernelInfoReply]
      kernelInfo.implementation should be ("spark")
    }
  }
} 
Example 159
Source File: ShellSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.kernel.socket

import java.nio.charset.Charset

import akka.actor.{ActorSelection, ActorRef, ActorSystem, Props}
import akka.testkit.{ImplicitSender, TestKit, TestProbe}
import akka.util.ByteString
import org.apache.toree.communication.ZMQMessage
import org.apache.toree.kernel.protocol.v5._
import org.apache.toree.kernel.protocol.v5.kernel.{ActorLoader, Utilities}
import org.apache.toree.kernel.protocol.v5Test._
import Utilities._
import com.typesafe.config.ConfigFactory
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{FunSpecLike, Matchers}
import test.utils.MaxAkkaTestTimeout

object ShellSpec {
  val config ="""
    akka {
      loglevel = "WARNING"
    }"""
}

class ShellSpec extends TestKit(
  ActorSystem(
    "ShellActorSpec",
    ConfigFactory.parseString(ShellSpec.config),
    org.apache.toree.Main.getClass.getClassLoader
  ))
  with ImplicitSender with FunSpecLike with Matchers with MockitoSugar {

  describe("Shell") {
    val socketFactory = mock[SocketFactory]
    val actorLoader = mock[ActorLoader]
    val socketProbe : TestProbe = TestProbe()
    when(socketFactory.Shell(any(classOf[ActorSystem]), any(classOf[ActorRef]))).thenReturn(socketProbe.ref)

    val relayProbe : TestProbe = TestProbe()
    val relaySelection : ActorSelection = system.actorSelection(relayProbe.ref.path)
    when(actorLoader.load(SystemActorType.KernelMessageRelay)).thenReturn(relaySelection)

    val shell = system.actorOf(Props(classOf[Shell], socketFactory, actorLoader))

    describe("#receive") {
      it("( KernelMessage ) should reply with a ZMQMessage via the socket") {
        //  Use the implicit to convert the KernelMessage to ZMQMessage
        val MockZMQMessage : ZMQMessage = MockKernelMessage

        shell ! MockKernelMessage
        socketProbe.expectMsg(MockZMQMessage)
      }

      it("( ZMQMessage ) should forward ZMQ Strings and KernelMessage to Relay") {
        //  Use the implicit to convert the KernelMessage to ZMQMessage
        val MockZMQMessage : ZMQMessage = MockKernelMessage

        shell ! MockZMQMessage

        // Should get the last four (assuming no buffer) strings in UTF-8
        val zmqStrings = MockZMQMessage.frames.map((byteString: ByteString) =>
          new String(byteString.toArray, Charset.forName("UTF-8"))
        ).takeRight(4)

        val kernelMessage: KernelMessage = MockZMQMessage

        relayProbe.expectMsg(MaxAkkaTestTimeout, (zmqStrings, kernelMessage))
      }
    }
  }
} 
Example 160
Source File: SocketConfigSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.kernel.socket

import com.typesafe.config.ConfigFactory
import org.scalatest.{FunSpec, Matchers}
import org.slf4j.LoggerFactory
import play.api.data.validation.ValidationError
import play.api.libs.json.{JsPath, JsValue, Json}

class SocketConfigSpec extends FunSpec with Matchers {
  val logger = LoggerFactory.getLogger("jt4")
  //logger.error("WOOT!")

  private val jsonString: String =
    """
    {
      "stdin_port": 10000,
      "control_port": 10001,
      "hb_port": 10002,
      "shell_port": 10003,
      "iopub_port": 10004,
      "ip": "1.2.3.4",
      "transport": "tcp",
      "signature_scheme": "hmac-sha256",
      "key": ""
    }
    """.stripMargin

  val socketConfigJson: JsValue = Json.parse(jsonString)

  val socketConfigFromConfig = SocketConfig.fromConfig(ConfigFactory.parseString(jsonString))

  val socketConfig = SocketConfig(
    10000, 10001, 10002, 10003, 10004, "1.2.3.4", "tcp", "hmac-sha256", ""
  )

  describe("SocketConfig") {
    describe("implicit conversions") {
      it("should implicitly convert from valid json to a SocketConfig instance") {
        // This is the least safe way to convert as an error is thrown if it fails
        socketConfigJson.as[SocketConfig] should be (socketConfig)
      }

      it("should also work with asOpt") {
        // This is safer, but we lose the error information as it returns
        // None if the conversion fails
        val newCompleteRequest = socketConfigJson.asOpt[SocketConfig]

        newCompleteRequest.get should be (socketConfig)
      }

      it("should also work with validate") {
        // This is the safest as it collects all error information (not just first error) and reports it
        val CompleteRequestResults = socketConfigJson.validate[SocketConfig]

        CompleteRequestResults.fold(
          (invalid: Seq[(JsPath, Seq[ValidationError])]) => println("Failed!"),
          (valid: SocketConfig) => valid
        ) should be (socketConfig)
      }

      it("should implicitly convert from a SocketConfig instance to valid json") {
        Json.toJson(socketConfig) should be (socketConfigJson)
      }
    }
    describe("#toConfig") {
      it("should implicitly convert from valid json to a SocketConfig instance") {
        // This is the least safe way to convert as an error is thrown if it fails
        socketConfigFromConfig should be (socketConfig)
      }
      
      it("should convert json file to SocketConfig object") {
        socketConfigFromConfig.stdin_port should be (10000)
      }
    }
  }
} 
Example 161
Source File: IOPubSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.kernel.socket

import akka.actor.{ActorSystem, Props}
import akka.testkit.{ImplicitSender, TestKit, TestProbe}
import org.apache.toree.communication.ZMQMessage
import org.apache.toree.kernel.protocol.v5.kernel.Utilities
import org.apache.toree.kernel.protocol.v5Test._
import Utilities._
import com.typesafe.config.ConfigFactory
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{FunSpecLike, Matchers}
import test.utils.MaxAkkaTestTimeout

object IOPubSpec {
  val config ="""
    akka {
      loglevel = "WARNING"
    }"""
}

class IOPubSpec extends TestKit(
  ActorSystem("IOPubActorSpec",
    ConfigFactory.parseString(IOPubSpec.config),
    org.apache.toree.Main.getClass.getClassLoader
  ))
with ImplicitSender with FunSpecLike with Matchers with MockitoSugar {

  describe("IOPubActor") {
    val socketFactory = mock[SocketFactory]
    val probe : TestProbe = TestProbe()
    when(socketFactory.IOPub(any(classOf[ActorSystem]))).thenReturn(probe.ref)

    val socket = system.actorOf(Props(classOf[IOPub], socketFactory))

    // TODO test that the response type changed
    describe("#receive") {
      it("should reply with a ZMQMessage") {
        //  Use the implicit to convert the KernelMessage to ZMQMessage
        val MockZMQMessage : ZMQMessage = MockKernelMessage

        socket ! MockKernelMessage
        probe.expectMsg(MaxAkkaTestTimeout, MockZMQMessage)
      }
    }
  }
} 
Example 162
Source File: SocketConnectionSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.kernel.socket

import org.scalatest.{FunSpec, Matchers}

class SocketConnectionSpec extends FunSpec with Matchers {
  describe("SocketConnection"){
   describe("#toString"){
   	it("should properly format connection string"){
      val connection: SocketConnection = SocketConnection("tcp", "127.0.0.1", 1234)
      connection.toString should be ("tcp://127.0.0.1:1234")
   	}
   }
  }

} 
Example 163
Source File: SocketFactorySpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.kernel.socket

import org.scalatest.{FunSpec, Matchers}

class SocketFactorySpec extends FunSpec with Matchers {
  describe("SocketFactory"){
    describe("HeartbeatConnection"){
    	it("should be composed of transport ip and heartbeat port"){
        val config: SocketConfig = SocketConfig(-1,-1,8000,-1, -1, "<STRING-IP>", "<STRING-TRANSPORT>","<STRING-SCHEME>","<STRING-KEY>")
        val factory: SocketFactory = SocketFactory(config)
        factory.HeartbeatConnection.toString should be ("<STRING-TRANSPORT>://<STRING-IP>:8000")
    	}
    }
  }
} 
Example 164
Source File: HeartbeatSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.kernel.socket

import akka.actor.{ActorRef, ActorSystem, Props}
import akka.testkit.{ImplicitSender, TestKit, TestProbe}
import akka.util.ByteString
import org.apache.toree.communication.ZMQMessage
import com.typesafe.config.ConfigFactory
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{FunSpecLike, Matchers}
import test.utils.MaxAkkaTestTimeout

object HeartbeatSpec {
  val config = """
    akka {
      loglevel = "WARNING"
    }"""
}

class HeartbeatSpec extends TestKit(
  ActorSystem(
    "HeartbeatActorSpec",
    ConfigFactory.parseString(HeartbeatSpec.config),
    org.apache.toree.Main.getClass.getClassLoader
  ))
with ImplicitSender with FunSpecLike with Matchers with MockitoSugar {
  val SomeMessage: String = "some message"
  val SomeZMQMessage: ZMQMessage = ZMQMessage(ByteString(SomeMessage.getBytes))

  describe("HeartbeatActor") {
    val socketFactory = mock[SocketFactory]
    val probe : TestProbe = TestProbe()
    when(socketFactory.Heartbeat(any(classOf[ActorSystem]), any(classOf[ActorRef]))).thenReturn(probe.ref)

    val heartbeat = system.actorOf(Props(classOf[Heartbeat], socketFactory))

    describe("send heartbeat") {
      it("should receive and send same ZMQMessage") {
        heartbeat ! SomeZMQMessage
        probe.expectMsg(MaxAkkaTestTimeout, SomeZMQMessage)
      }
    }
  }
} 
Example 165
Source File: StdinSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.kernel.socket

import java.nio.charset.Charset

import akka.actor.{Props, ActorSelection, ActorRef, ActorSystem}
import akka.testkit.{TestProbe, ImplicitSender, TestKit}
import akka.util.ByteString
import org.apache.toree.communication.ZMQMessage
import org.apache.toree.kernel.protocol.v5.kernel.Utilities._
import org.apache.toree.kernel.protocol.v5Test._
import org.apache.toree.kernel.protocol.v5.{KernelMessage, SystemActorType}
import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader
import com.typesafe.config.ConfigFactory
import org.scalatest.mock.MockitoSugar
import org.scalatest.{Matchers, FunSpecLike}
import org.mockito.Mockito._
import org.mockito.Matchers._
import test.utils.MaxAkkaTestTimeout

object StdinSpec {
  val config ="""
    akka {
      loglevel = "WARNING"
    }"""
}

class StdinSpec extends TestKit(ActorSystem(
  "StdinActorSpec",
  ConfigFactory.parseString(StdinSpec.config),
  org.apache.toree.Main.getClass.getClassLoader
)) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar {
  describe("Stdin") {
    val socketFactory = mock[SocketFactory]
    val actorLoader = mock[ActorLoader]
    val socketProbe : TestProbe = TestProbe()
    when(socketFactory.Stdin(any(classOf[ActorSystem]), any(classOf[ActorRef]))).thenReturn(socketProbe.ref)

    val relayProbe : TestProbe = TestProbe()
    val relaySelection : ActorSelection = system.actorSelection(relayProbe.ref.path)
    when(actorLoader.load(SystemActorType.KernelMessageRelay)).thenReturn(relaySelection)

    val stdin = system.actorOf(Props(classOf[Stdin], socketFactory, actorLoader))

    describe("#receive") {
      it("( KernelMessage ) should reply with a ZMQMessage via the socket") {
        //  Use the implicit to convert the KernelMessage to ZMQMessage
        val MockZMQMessage : ZMQMessage = MockKernelMessage

        stdin ! MockKernelMessage
        socketProbe.expectMsg(MockZMQMessage)
      }

      it("( ZMQMessage ) should forward ZMQ Strings and KernelMessage to Relay") {
        //  Use the implicit to convert the KernelMessage to ZMQMessage
        val MockZMQMessage : ZMQMessage = MockKernelMessage

        stdin ! MockZMQMessage

        // Should get the last four (assuming no buffer) strings in UTF-8
        val zmqStrings = MockZMQMessage.frames.map((byteString: ByteString) =>
          new String(byteString.toArray, Charset.forName("UTF-8"))
        ).takeRight(4)

        val kernelMessage: KernelMessage = MockZMQMessage

        relayProbe.expectMsg(MaxAkkaTestTimeout, (zmqStrings, kernelMessage))
      }
    }
  }
} 
Example 166
Source File: ActorLoaderSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.kernel

import akka.actor.{ActorSystem, Props}
import akka.testkit.{ImplicitSender, TestKit, TestProbe}
import org.apache.toree.kernel.protocol.v5.{MessageType, SocketType}
import org.scalatest.mock.MockitoSugar
import org.scalatest.{FunSpecLike, Matchers}
import test.utils.TestProbeProxyActor
import test.utils.MaxAkkaTestTimeout

class ActorLoaderSpec extends TestKit(
  ActorSystem(
    "ActorLoaderSpecSystem",
    None,
    Some(org.apache.toree.Main.getClass.getClassLoader)
  ))
with ImplicitSender with FunSpecLike with Matchers with MockitoSugar {
  describe("ActorLoader"){
    describe("#load( MessageType )"){
      it("should load an ActorSelection that has been loaded into the system"){
        val testProbe: TestProbe = TestProbe()
        system.actorOf(Props(classOf[TestProbeProxyActor], testProbe),
          MessageType.Outgoing.ClearOutput.toString)
        val actorLoader: ActorLoader = SimpleActorLoader(system)
        actorLoader.load(MessageType.Outgoing.ClearOutput) ! "<Test Message>"
        testProbe.expectMsg("<Test Message>")
      }

      it("should expect no message when there is no actor"){
        val testProbe: TestProbe = TestProbe()
        val actorLoader: ActorLoader = SimpleActorLoader(system)
        actorLoader.load(MessageType.Outgoing.CompleteReply) ! "<Test Message>"
        testProbe.expectNoMessage(MaxAkkaTestTimeout)
        // This is to test to see if there the messages go to the actor inbox or the dead mail inbox
        system.actorOf(Props(classOf[TestProbeProxyActor], testProbe),
          MessageType.Outgoing.CompleteReply.toString)
        testProbe.expectNoMessage(MaxAkkaTestTimeout)
      }
    }
    describe("#load( SocketType )"){
      it("should load an ActorSelection that has been loaded into the system"){
        val testProbe: TestProbe = TestProbe()
        system.actorOf(Props(classOf[TestProbeProxyActor], testProbe), SocketType.Shell.toString)
        val actorLoader: ActorLoader = SimpleActorLoader(system)
        actorLoader.load(SocketType.Shell) ! "<Test Message>"
        testProbe.expectMsg("<Test Message>")
      }

      it("should expect no message when there is no actor"){
        val testProbe: TestProbe = TestProbe()
        val actorLoader: ActorLoader = SimpleActorLoader(system)
        actorLoader.load(SocketType.IOPub) ! "<Test Message>"
        testProbe.expectNoMessage(MaxAkkaTestTimeout)
        // This is to test to see if there the messages go to the actor inbox or the dead mail inbox
        system.actorOf(Props(classOf[TestProbeProxyActor], testProbe), SocketType.IOPub.toString)
        testProbe.expectNoMessage(MaxAkkaTestTimeout)
      }

    }
  }
} 
Example 167
Source File: SimpleActorLoaderSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.kernel

import akka.actor.{ActorSelection, ActorSystem, Props}
import akka.testkit.{TestKit, TestProbe}
import org.apache.toree.kernel.protocol.v5.MessageType
import org.scalatest.{FunSpecLike, Matchers}
import test.utils.TestProbeProxyActor
import test.utils.MaxAkkaTestTimeout

class SimpleActorLoaderSpec extends TestKit(
  ActorSystem(
    "SimpleActorLoaderSpecSystem",
    None,
    Some(org.apache.toree.Main.getClass.getClassLoader)
  )
)
  with FunSpecLike with Matchers
{
  describe("SimpleActorLoader") {
    //val system = ActorSystem("SimpleActorLoaderSystem")
    val testMessage: String = "Hello Message"

    describe("#load( MessageType )") {
      it("should load a MessageType Actor"){
        //  Create a new test probe to verify our selection works
        val messageTypeProbe: TestProbe = new TestProbe(system)

        //  Add an actor to the system to send a message to
        system.actorOf(
          Props(classOf[TestProbeProxyActor], messageTypeProbe),
          name = MessageType.Outgoing.ExecuteInput.toString
        )

        //  Create the ActorLoader with our test system
        val actorLoader: SimpleActorLoader = SimpleActorLoader(system)

        //  Get the actor and send it a message
        val loadedMessageActor: ActorSelection =
          actorLoader.load(MessageType.Outgoing.ExecuteInput)

        loadedMessageActor ! testMessage

        //  Assert the probe received the message
        messageTypeProbe.expectMsg(MaxAkkaTestTimeout, testMessage)
      }
    }

  }
} 
Example 168
Source File: UtilitiesSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.kernel

import akka.util.ByteString
import org.apache.toree.communication.ZMQMessage
import org.apache.toree.kernel.protocol.v5._
import org.scalatest.{FunSpec, Matchers}


class UtilitiesSpec extends FunSpec with Matchers {
  val header: Header = Header(
    "<UUID>", "<STRING>", "<UUID>", "<STRING>", "<FLOAT>"
  )
  val parentHeader : ParentHeader = ParentHeader(
    "<PARENT-UUID>", "<PARENT-STRING>", "<PARENT-UUID>", "<PARENT-STRING>", "<PARENT-FLOAT>"
  )
  val kernelMessage = KernelMessage(
    Seq("<STRING-1>","<STRING-2>").map(x => x.getBytes),
    "<SIGNATURE>", header, parentHeader, Map(), "<STRING>"
  )

  val zmqMessage = ZMQMessage(
    ByteString("<STRING-1>".replaceAll("""\s""", "").getBytes),
    ByteString("<STRING-2>".replaceAll("""\s""", "").getBytes),
    ByteString("<IDS|MSG>".replaceAll("""\s""", "").getBytes),
    ByteString("<SIGNATURE>".replaceAll("""\s""", "").getBytes),
    ByteString(
      """
      {
          "msg_id": "<UUID>",
          "username": "<STRING>",
          "session": "<UUID>",
          "msg_type": "<STRING>",
          "version": "<FLOAT>"
      }
      """.stripMargin.replaceAll("""\s""", "").getBytes),
    ByteString(
      """
      {
          "msg_id": "<PARENT-UUID>",
          "username": "<PARENT-STRING>",
          "session": "<PARENT-UUID>",
          "msg_type": "<PARENT-STRING>",
          "version": "<PARENT-FLOAT>"
      }
      """.stripMargin.replaceAll("""\s""", "").getBytes),
    ByteString("{}".replaceAll("""\s""", "").getBytes),
    ByteString("<STRING>".replaceAll("""\s""", "").getBytes)
  )

  describe("Utilities") {
    describe("implicit #KernelMessageToZMQMessage") {
      it("should correctly convert a kernel message to a ZMQMessage") {
        Utilities.KernelMessageToZMQMessage(kernelMessage) should equal (zmqMessage)
      }
    }

    describe("implicit #ZMQMessageToKernelMessage") {
      it("should correctly convert a ZMQMessage to a kernel message") {
        Utilities.ZMQMessageToKernelMessage(zmqMessage) should equal (kernelMessage)
      }
    }

    describe("implicit conversions should be inverses of each other") {
      it("should convert back to the original message, ZMQ -> Kernel -> ZMQ") {
        Utilities.KernelMessageToZMQMessage(
          Utilities.ZMQMessageToKernelMessage(zmqMessage)
        ) should equal (zmqMessage)
      }
      it("should convert back to the original message, Kernel -> ZMQ -> Kernel") {
        Utilities.ZMQMessageToKernelMessage(
          Utilities.KernelMessageToZMQMessage(kernelMessage)
        ) should equal (kernelMessage)
      }
    }

    describe("implicit #StringToByteString") {
      it("should correctly convert a string to a ByteString") {
        val someString = "some content"
        val expected = ByteString(someString)

        Utilities.StringToByteString(someString) should be (expected)
      }
    }

    describe("implicit #ByteStringToString") {
      it("should correctly convert a ByteString to a string") {
        val expected = "some content"
        val byteString = ByteString(expected)

        Utilities.ByteStringToString(byteString) should be (expected)
      }
    }
  }
} 
Example 169
Source File: StatusDispatchSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.dispatch

import akka.actor.{ActorRef, ActorSystem, Props}
import akka.testkit.{TestKit, TestProbe}
import org.apache.toree.kernel.protocol.v5._
import org.apache.toree.kernel.protocol.v5.content.KernelStatus
import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{BeforeAndAfter, FunSpecLike, Matchers}
import play.api.libs.json.Json
import test.utils.MaxAkkaTestTimeout

class StatusDispatchSpec extends TestKit(
  ActorSystem(
    "StatusDispatchSystem",
    None,
    Some(org.apache.toree.Main.getClass.getClassLoader)
  )
)
with FunSpecLike with Matchers with MockitoSugar with BeforeAndAfter{
  var statusDispatchRef: ActorRef = _
  var relayProbe: TestProbe = _
  before {
    //  Mock the relay with a probe
    relayProbe = TestProbe()
    //  Mock the ActorLoader
    val mockActorLoader: ActorLoader = mock[ActorLoader]
    when(mockActorLoader.load(SystemActorType.KernelMessageRelay))
      .thenReturn(system.actorSelection(relayProbe.ref.path.toString))

    statusDispatchRef = system.actorOf(Props(classOf[StatusDispatch],mockActorLoader))
  }


  describe("StatusDispatch") {
    describe("#receive( KernelStatusType )") {
      it("should send a status message to the relay") {
        statusDispatchRef ! KernelStatusType.Busy
        //  Check the kernel message is the correct type
        val statusMessage: KernelMessage = relayProbe.receiveOne(MaxAkkaTestTimeout).asInstanceOf[KernelMessage]
        statusMessage.header.msg_type should be (MessageType.Outgoing.Status.toString)
        //  Check the status is what we sent
        val status: KernelStatus = Json.parse(statusMessage.contentString).as[KernelStatus]
         status.execution_state should be (KernelStatusType.Busy.toString)
      }
    }

    describe("#receive( KernelStatusType, Header )") {
      it("should send a status message to the relay") {
        val tuple = Tuple2(KernelStatusType.Busy, mock[Header])
        statusDispatchRef ! tuple
        //  Check the kernel message is the correct type
        val statusMessage: KernelMessage = relayProbe.receiveOne(MaxAkkaTestTimeout).asInstanceOf[KernelMessage]
        statusMessage.header.msg_type should be (MessageType.Outgoing.Status.toString)
        //  Check the status is what we sent
        val status: KernelStatus = Json.parse(statusMessage.contentString).as[KernelStatus]
        status.execution_state should be (KernelStatusType.Busy.toString)
      }
    }
  }
} 
Example 170
Source File: StreamMethodsSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.api

import akka.actor.ActorSystem
import akka.testkit.{ImplicitSender, TestKit, TestProbe}
import org.apache.toree.kernel.protocol.v5
import org.apache.toree.kernel.protocol.v5.KernelMessage
import org.scalatest.mock.MockitoSugar
import org.scalatest.{FunSpecLike, BeforeAndAfter, Matchers}
import play.api.libs.json.Json
import test.utils.MaxAkkaTestTimeout
import org.mockito.Mockito._

class StreamMethodsSpec extends TestKit(
  ActorSystem(
    "StreamMethodsSpec",
    None,
    Some(org.apache.toree.Main.getClass.getClassLoader)
  )
) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar
  with BeforeAndAfter
{

  private var kernelMessageRelayProbe: TestProbe = _
  private var mockParentHeader: v5.ParentHeader = _
  private var mockActorLoader: v5.kernel.ActorLoader = _
  private var mockKernelMessage: v5.KernelMessage = _
  private var streamMethods: StreamMethods = _

  before {
    kernelMessageRelayProbe = TestProbe()

    mockParentHeader = mock[v5.ParentHeader]

    mockActorLoader = mock[v5.kernel.ActorLoader]
    doReturn(system.actorSelection(kernelMessageRelayProbe.ref.path))
      .when(mockActorLoader).load(v5.SystemActorType.KernelMessageRelay)

    mockKernelMessage = mock[v5.KernelMessage]
    doReturn(mockParentHeader).when(mockKernelMessage).header

    streamMethods = new StreamMethods(mockActorLoader, mockKernelMessage)
  }

  describe("StreamMethods") {
    describe("#()") {
      it("should put the header of the given message as the parent header") {
        val expected = mockKernelMessage.header
        val actual = streamMethods.kmBuilder.build.parentHeader

        actual should be (expected)
      }
    }

    describe("#sendAll") {
      it("should send a message containing all of the given text") {
        val expected = "some text"

        streamMethods.sendAll(expected)

        val outgoingMessage = kernelMessageRelayProbe.receiveOne(MaxAkkaTestTimeout)
        val kernelMessage = outgoingMessage.asInstanceOf[KernelMessage]

        val actual = Json.parse(kernelMessage.contentString)
          .as[v5.content.StreamContent].text

        actual should be (expected)
      }
    }
  }

} 
Example 171
Source File: StdinForSystemSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package system

import org.apache.toree.kernel.protocol.v5.client.SparkKernelClient
import org.scalatest.concurrent.Eventually
import org.scalatest.time.{Seconds, Milliseconds, Span}
import org.scalatest.{BeforeAndAfterAll, FunSpec, Matchers}
import test.utils.root.{SparkKernelClientDeployer, SparkKernelDeployer}



  describe("Stdin for System") {
    describe("when the kernel requests input") {
      ignore("should receive input based on the client's response function") {
        var response: String = ""
        client.setResponseFunction((_, _) => TestReplyString)

        // Read in a chunk of data (our reply string) and return it as a string
        // to be verified by the test
        client.execute(
          """
            |var result: Array[Byte] = Array()
            |val in = kernel.in
            |do {
            |    result = result :+ in.read().toByte
            |} while(in.available() > 0)
            |new String(result)
          """.stripMargin
        ).onResult { result =>
          response = result.data("text/plain")
        }.onError { _ =>
          fail("Client execution to trigger kernel input request failed!")
        }

        eventually {
          response should contain (TestReplyString)
        }
      }
    }
  }

} 
Example 172
Source File: DownloadSupportSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.utils

import java.io.FileNotFoundException
import java.net.URL

import org.scalatest.{BeforeAndAfter, Matchers, FunSpec}
import scala.io.Source
import scala.tools.nsc.io.File

class DownloadSupportSpec extends FunSpec with Matchers with BeforeAndAfter {
  val downloadDestinationUrl = new URL("file:///tmp/testfile2.ext")

  val testFileContent = "This is a test"
  val testFileName = "/tmp/testfile.txt"

  //  Create a test file for downloading
  before {
    File(testFileName).writeAll(testFileContent)
  }

  //  Cleanup what we made
  after {
    if (File(testFileName).exists) File(testFileName).delete()
    if (File(downloadDestinationUrl.getPath).exists) File(downloadDestinationUrl.getPath).delete()
  }

  describe("DownloadSupport"){
    describe("#downloadFile( String, String )"){
      it("should download a file to the download directory"){
        val testFileUrl = "file:///tmp/testfile.txt"

        //  Create our utility and download the file
        val downloader = new Object with DownloadSupport
        downloader.downloadFile(
          testFileUrl,
          downloadDestinationUrl.getProtocol + "://" +
            downloadDestinationUrl.getPath)

        //  Verify the file contents are what was in the original file
        val downloadedFileContent: String =
          Source.fromFile(downloadDestinationUrl.getPath).mkString

        downloadedFileContent should be (testFileContent)
      }

    }

    describe("#downloadFile( URL, URL )"){
      it("should download a file to the download directory"){
        val testFileUrl = new URL("file:///tmp/testfile.txt")

        val downloader = new Object with DownloadSupport
        downloader.downloadFile(testFileUrl, downloadDestinationUrl)

        //  Verify the file contents are what was in the original file
        val downloadedFileContent: String =
          Source.fromFile(downloadDestinationUrl.getPath).mkString

        downloadedFileContent should be (testFileContent)
      }

      it("should throw FileNotFoundException if the download URL is bad"){
        val badFilename = "file:///tmp/testbadfile.txt"
        if (File(badFilename).exists) File(badFilename).delete()

        val badFileUrl = new URL(badFilename)

        val downloader = new Object with DownloadSupport
        intercept[FileNotFoundException] {
          downloader.downloadFile(badFileUrl, downloadDestinationUrl)
        }
      }

      it("should throw FileNotFoundException if the download ") {
        val testFileUrl = new URL("file:///tmp/testfile.txt")
        val badDestinationUrl =
          new URL("file:///tmp/badloc/that/doesnt/exist.txt")

        val downloader = new Object with DownloadSupport
        intercept[FileNotFoundException] {
          downloader.downloadFile(testFileUrl, badDestinationUrl)
        }
      }
    }
  }

} 
Example 173
Source File: MultiOutputStreamSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.utils

import java.io.OutputStream

import org.scalatest.mock.MockitoSugar
import org.scalatest.{BeforeAndAfter, Matchers, FunSpec}
import org.mockito.Matchers._
import org.mockito.Mockito._

class MultiOutputStreamSpec
  extends FunSpec with Matchers with MockitoSugar with BeforeAndAfter {

  describe("MultiOutputStream") {
    val listOfMockOutputStreams = List(mock[OutputStream], mock[OutputStream])
    val multiOutputStream = MultiOutputStream(listOfMockOutputStreams)

    describe("#close") {
      it("should call #close on all internal output streams") {
        multiOutputStream.close()

        listOfMockOutputStreams.foreach(mockOutputStream => verify(mockOutputStream).close())
      }
    }

    describe("#flush") {
      it("should call #flush on all internal output streams") {
        multiOutputStream.flush()

        listOfMockOutputStreams.foreach(mockOutputStream => verify(mockOutputStream).flush())
      }
    }

    describe("#write(int)") {
      it("should call #write(int) on all internal output streams") {
        multiOutputStream.write(anyInt())

        listOfMockOutputStreams.foreach(
          mockOutputStream => verify(mockOutputStream).write(anyInt()))
      }
    }
    describe("#write(byte[])") {
      it("should call #write(byte[]) on all internal output streams") {
        multiOutputStream.write(any[Array[Byte]])

        listOfMockOutputStreams.foreach(
          mockOutputStream => verify(mockOutputStream).write(any[Array[Byte]]))
      }
    }

    describe("#write(byte[], int, int)") {
      it("should call #write(byte[], int, int) on all internal output streams") {
        multiOutputStream.write(any[Array[Byte]], anyInt(), anyInt())

        listOfMockOutputStreams.foreach(
          mockOutputStream =>
            verify(mockOutputStream).write(any[Array[Byte]], anyInt(), anyInt()))
      }
    }
  }
} 
Example 174
Source File: KeyValuePairUtilsSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.utils

import joptsimple.util.KeyValuePair
import org.scalatest.{Matchers, FunSpec}

class KeyValuePairUtilsSpec extends FunSpec with Matchers {
  private object TestKeyValuePair {
    def apply(key: String, value: String) = KeyValuePair.valueOf(s"$key=$value")
  }

  describe("KeyValuePairUtils") {
    describe("#stringToKeyValuePairSeq") {
      it("should throw an exception when given a null string") {
        intercept[IllegalArgumentException] {
          KeyValuePairUtils.stringToKeyValuePairSeq(null)
        }
      }

      it("should convert an empty string to an empty sequence") {
        val expected = Nil
        val actual = KeyValuePairUtils.stringToKeyValuePairSeq("")

        actual should be (expected)
      }

      it("should convert a single key-value pair to a sequence with one pair") {
        val expected = Seq(TestKeyValuePair("key", "value"))
        val actual = KeyValuePairUtils.stringToKeyValuePairSeq("key=value")

        actual should be (expected)
      }

      it("should convert multiple key-value pairs using the provided delimiter") {
        val expected = Seq(
          TestKeyValuePair("key1", "value1"),
          TestKeyValuePair("key2", "value2")
        )
        val actual = KeyValuePairUtils.stringToKeyValuePairSeq(
          "key1=value1, key2=value2", ",")

        actual should be (expected)
      }

      it("should fail if the string does not contain valid key-value pairs") {
        KeyValuePairUtils.stringToKeyValuePairSeq("not valid")
      }
    }

    describe("#keyValuePairSeqToString") {
      it("should throw an exception when given a null sequence") {
        intercept[IllegalArgumentException] {
          KeyValuePairUtils.keyValuePairSeqToString(null)
        }
      }

      it("should return an empty string if the sequence is empty") {
        val expected = ""
        val actual = KeyValuePairUtils.keyValuePairSeqToString(Nil)

        actual should be (expected)
      }

      it("should generate key=value for a key-value pair") {
        val expected = "key=value"
        val actual = KeyValuePairUtils.keyValuePairSeqToString(
          Seq(TestKeyValuePair("key", "value")))

        actual should be (expected)
      }

      it("should use the provided delimiter to separate key-value pairs") {
        val expected = "key1=value1,key2=value2"
        val actual = KeyValuePairUtils.keyValuePairSeqToString(Seq(
          TestKeyValuePair("key1", "value1"),
          TestKeyValuePair("key2", "value2")
        ), ",")

        actual should be (expected)
      }

      it("should trim whitespace from keys and values") {
        val expected = "key1=value1,key2=value2"
        val actual = KeyValuePairUtils.keyValuePairSeqToString(Seq(
          TestKeyValuePair(" key1", "  value1 "),
          TestKeyValuePair("\tkey2 ", "value2\t")
        ), ",")

        actual should be (expected)
      }
    }
  }
} 
Example 175
Source File: ArgumentParsingSupportSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.utils

import org.scalatest.{BeforeAndAfter, Matchers, FunSpec}
import joptsimple.{OptionSet, OptionSpec, OptionParser}
import org.scalatest.mock.MockitoSugar

import org.mockito.Mockito._
import org.mockito.Matchers._

import collection.JavaConverters._

class ArgumentParsingSupportSpec extends FunSpec with Matchers
  with BeforeAndAfter with MockitoSugar
{
  private var mockOptions: OptionSet = _
  private var mockParser: OptionParser = _
  private var argumentParsingInstance: ArgumentParsingSupport = _

  before {
    mockOptions = mock[OptionSet]
    mockParser = mock[OptionParser]
    doReturn(mockOptions).when(mockParser).parse(anyVararg[String]())

    argumentParsingInstance = new Object() with ArgumentParsingSupport {
      override protected lazy val parser: OptionParser = mockParser
    }
  }

  describe("ArgumentParsingSupport") {
    describe("#parseArgs") {
      it("should invoke the underlying parser's parse method") {
        doReturn(Nil.asJava).when(mockOptions).nonOptionArguments()
        argumentParsingInstance.parseArgs("")

        verify(mockParser).parse(anyString())
      }

      it("should return an empty list if there are no non-option arguments") {
        val expected = Nil
        doReturn(expected.asJava).when(mockOptions).nonOptionArguments()
        val actual = argumentParsingInstance.parseArgs((
          "--transitive" :: expected
        ).mkString(" "))

        actual should be (expected)
      }

      it("should return a list containing non-option arguments") {
        val expected = "non-option" :: Nil
        doReturn(expected.asJava).when(mockOptions).nonOptionArguments()
        val actual = argumentParsingInstance.parseArgs((
          "--transitive" :: expected
          ).mkString(" "))

        actual should be (expected)
      }
    }
  }
} 
Example 176
Source File: ConditionalOutputStreamSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.utils

import java.io.OutputStream

import org.scalatest.mock.MockitoSugar
import org.mockito.Mockito._
import org.mockito.Matchers._
import org.scalatest.{Matchers, FunSpec}

class ConditionalOutputStreamSpec extends FunSpec with Matchers with MockitoSugar {
  describe("ConditionalOutputStream") {
    describe("#()") {
      it("should throw an exception if the output stream is null") {
        intercept[IllegalArgumentException] {
          new ConditionalOutputStream(null, true)
        }
      }
    }

    describe("#write") {
      it("should call the underlying write if the condition is true") {
        val mockOutputStream = mock[OutputStream]
        val conditionalOutputStream =
          new ConditionalOutputStream(mockOutputStream, true)

        val expected = 101
        conditionalOutputStream.write(expected)

        verify(mockOutputStream).write(expected)
      }

      it("should call the underlying write if the condition becomes true") {
        val mockOutputStream = mock[OutputStream]
        var condition = false

        val conditionalOutputStream =
          new ConditionalOutputStream(mockOutputStream, condition)

        condition = true

        val expected = 101
        conditionalOutputStream.write(expected)

        verify(mockOutputStream).write(expected)
      }

      it("should not call the underlying write if the condition is false") {
        val mockOutputStream = mock[OutputStream]
        val conditionalOutputStream =
          new ConditionalOutputStream(mockOutputStream, false)

        val expected = 101
        conditionalOutputStream.write(expected)

        verify(mockOutputStream, never()).write(any[Byte])
      }

      it("should not call the underlying write if the condition becomes false") {
        val mockOutputStream = mock[OutputStream]
        var condition = true

        val conditionalOutputStream =
          new ConditionalOutputStream(mockOutputStream, condition)

        condition = false

        val expected = 101
        conditionalOutputStream.write(expected)

        verify(mockOutputStream, never()).write(any[Byte])
      }
    }
  }
} 
Example 177
Source File: InternalClassLoaderSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.magic

import org.scalatest.{Matchers, FunSpec}
import org.scalatest.mock.MockitoSugar

class InternalClassLoaderSpec extends FunSpec with Matchers with MockitoSugar {

  abstract class MockClassLoader extends ClassLoader(null) {
    override def loadClass(name: String): Class[_] = null
  }

  describe("InternalClassLoader") {
    describe("#loadClass") {
      it("should invoke super loadClass with loader's package prepended") {
        val expected = classOf[Class[_]]
        val packageName = "org.apache.toree.magic"
        val className = "SomeClass"

        var parentLoadClassCorrectlyInvoked = false

        val internalClassLoader = new InternalClassLoader(null) {
          override private[magic] def parentLoadClass(name: String, resolve: Boolean): Class[_] = {
            parentLoadClassCorrectlyInvoked =
              name == s"$packageName.$className" && resolve
            expected
          }
        }

        internalClassLoader.loadClass(className, true) should be (expected)

        parentLoadClassCorrectlyInvoked should be (true)
      }

      it("should use loader's package instead of provided package first") {
        val expected = classOf[Class[_]]
        val forcedPackageName = "org.apache.toree.magic"
        val packageName = "some.other.package"
        val className = "SomeClass"

        var parentLoadClassCorrectlyInvoked = false

        val internalClassLoader = new InternalClassLoader(null) {
          override private[magic] def parentLoadClass(name: String, resolve: Boolean): Class[_] = {
            parentLoadClassCorrectlyInvoked =
              name == s"$forcedPackageName.$className" && resolve
            expected
          }
        }

        internalClassLoader.loadClass(s"$packageName.$className", true) should be (expected)

        parentLoadClassCorrectlyInvoked should be (true)
      }

      it("should invoke super loadClass with given package if internal missing") {
        val expected = classOf[Class[_]]
        val packageName = "some.other.package"
        val className = "SomeClass"

        var parentLoadClassCorrectlyInvoked = false

        var methodCalled = false
        val internalClassLoader = new InternalClassLoader(null) {
          override private[magic] def parentLoadClass(name: String, resolve: Boolean): Class[_] = {
            if (!methodCalled) {
              methodCalled = true
              throw new ClassNotFoundException()
            }

            parentLoadClassCorrectlyInvoked =
              name == s"$packageName.$className" && resolve
            expected
          }
        }

        internalClassLoader.loadClass(s"$packageName.$className", true) should
          be (expected)

        parentLoadClassCorrectlyInvoked should be (true)
      }
    }
  }
} 
Example 178
Source File: CoursierDependencyDownloaderSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.dependencies

import java.net.URL
import java.nio.file.Files

import org.scalatest.{FunSpec, Matchers, OneInstancePerTest}

class CoursierDependencyDownloaderSpec extends FunSpec with Matchers
  with OneInstancePerTest
{
  private val coursierDependencyDownloader = new CoursierDependencyDownloader

  describe("CoursierDependencyDownloader") {
    describe("#addMavenRepository") {
      it("should add to the list of repositories") {
        val repo = new URL("http://some-repo.com")

        coursierDependencyDownloader.addMavenRepository(repo, None)

        val repos = coursierDependencyDownloader.getRepositories

        repos should contain (repo.toURI)
      }
    }

    describe("#removeMavenRepository") {
      it("should remove from the list of repositories") {
        val repo = new URL("http://some-repo.com")

        coursierDependencyDownloader.addMavenRepository(repo, None)
        coursierDependencyDownloader.removeMavenRepository(repo)

        val repos = coursierDependencyDownloader.getRepositories

        repos should not contain (repo.toURI)
      }
    }

    describe("#setDownloadDirectory") {
      it("should set the new download directory if valid") {
        val validDir = Files.createTempDirectory("tmpdir").toFile
        validDir.deleteOnExit()

        val result = coursierDependencyDownloader.setDownloadDirectory(validDir)
        result should be (true)

        val dir = coursierDependencyDownloader.getDownloadDirectory
        dir should be (validDir.getAbsolutePath)
      }

      it("should not change the directory if given a file") {
        val invalidDir = Files.createTempFile("tmp", "file").toFile
        invalidDir.deleteOnExit()

        val result = coursierDependencyDownloader.setDownloadDirectory(invalidDir)
        result should be (false)

        val dir = coursierDependencyDownloader.getDownloadDirectory
        dir should not be (invalidDir.getAbsolutePath)
      }

      it("should support creating missing directories") {
        val baseDir = Files.createTempDirectory("tmpdir").toFile
        val validDir = baseDir.toPath.resolve("otherdir").toFile
        validDir.deleteOnExit()
        baseDir.deleteOnExit()

        val result = coursierDependencyDownloader.setDownloadDirectory(validDir)
        result should be (true)

        val dir = coursierDependencyDownloader.getDownloadDirectory
        dir should be (validDir.getAbsolutePath)
      }
    }

    describe("#getRepositories") {
      it("should have the default repositories") {
        val expected = Seq(DependencyDownloader.DefaultMavenRepository.toURI)

        val actual = coursierDependencyDownloader.getRepositories

        actual should be (expected)
      }
    }

    describe("#getDownloadDirectory") {
      it("should have the default download directory") {
        val expected = DependencyDownloader.DefaultDownloadDirectory.getAbsolutePath

        val actual = coursierDependencyDownloader.getDownloadDirectory

        actual should be (expected)
      }
    }
  }
} 
Example 179
Source File: BrokerTransformerSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.interpreter.broker

import org.apache.toree.interpreter.{ExecuteError, Results}
import org.scalatest.concurrent.Eventually
import scala.concurrent.Promise
import org.scalatest.{FunSpec, Matchers, OneInstancePerTest}

class BrokerTransformerSpec extends FunSpec with Matchers
  with OneInstancePerTest with Eventually
{
  private val brokerTransformer = new BrokerTransformer

  describe("BrokerTransformer") {
    describe("#transformToInterpreterResult") {
      it("should convert to success with result output if no failure") {
        val codeResultPromise = Promise[BrokerTypes.CodeResults]()

        val transformedFuture = brokerTransformer.transformToInterpreterResult(
          codeResultPromise.future
        )

        val successOutput = "some success"
        codeResultPromise.success(successOutput)

        eventually {
          val result = transformedFuture.value.get.get
          result should be((Results.Success, Left(Map("text/plain" -> successOutput))))
        }
      }

      it("should convert to error with broker exception if failure") {
        val codeResultPromise = Promise[BrokerTypes.CodeResults]()

        val transformedFuture = brokerTransformer.transformToInterpreterResult(
          codeResultPromise.future
        )

        val failureException = new BrokerException("some failure")
        codeResultPromise.failure(failureException)

        eventually {
          val result = transformedFuture.value.get.get
          result should be((Results.Error, Right(ExecuteError(
            name = failureException.getClass.getName,
            value = failureException.getLocalizedMessage,
            stackTrace = failureException.getStackTrace.map(_.toString).toList
          ))))
        }
      }
    }
  }
} 
Example 180
Source File: BrokerProcessHandlerSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.interpreter.broker

import org.apache.commons.exec.ExecuteException
import org.scalatest.mock.MockitoSugar
import org.scalatest.{FunSpec, Matchers, OneInstancePerTest}
import org.mockito.Mockito._
import org.mockito.Matchers._

class BrokerProcessHandlerSpec extends FunSpec with Matchers
  with OneInstancePerTest with MockitoSugar
{
  private val mockBrokerBridge = mock[BrokerBridge]
  private val brokerProcessHandler = new BrokerProcessHandler(
    mockBrokerBridge,
    restartOnFailure = true,
    restartOnCompletion = true
  )

  describe("BrokerProcessHandler") {
    describe("#onProcessFailed") {
      it("should invoke the reset method") {
        val mockResetMethod = mock[String => Unit]
        brokerProcessHandler.setResetMethod(mockResetMethod)

        brokerProcessHandler.onProcessFailed(mock[ExecuteException])

        verify(mockResetMethod).apply(anyString())
      }

      it("should invoke the restart method if the proper flag is set to true") {
        val mockRestartMethod = mock[() => Unit]
        brokerProcessHandler.setRestartMethod(mockRestartMethod)

        brokerProcessHandler.onProcessFailed(mock[ExecuteException])

        verify(mockRestartMethod).apply()
      }
    }

    describe("#onProcessComplete") {
      it("should invoke the reset method") {
        val mockResetMethod = mock[String => Unit]
        brokerProcessHandler.setResetMethod(mockResetMethod)

        brokerProcessHandler.onProcessComplete(0)

        verify(mockResetMethod).apply(anyString())
      }

      it("should invoke the restart method if the proper flag is set to true") {
        val mockRestartMethod = mock[() => Unit]
        brokerProcessHandler.setRestartMethod(mockRestartMethod)

        brokerProcessHandler.onProcessComplete(0)

        verify(mockRestartMethod).apply()
      }
    }
  }
} 
Example 181
Source File: BrokerBridgeSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.interpreter.broker

import org.apache.toree.kernel.api.KernelLike
import org.scalatest.mock.MockitoSugar
import org.scalatest.{FunSpec, Matchers, OneInstancePerTest}

class BrokerBridgeSpec extends FunSpec with Matchers with OneInstancePerTest
  with MockitoSugar
{
  private val mockBrokerState = mock[BrokerState]
  private val mockKernel = mock[KernelLike]

  private val brokerBridge = new BrokerBridge(
    mockBrokerState,
    mockKernel
  )

  describe("BrokerBridge") {
    describe("#state") {
      it("should return the broker state from the constructor") {
        brokerBridge.state should be (mockBrokerState)
      }
    }

    describe("#kernel") {
      it("should return the kernel from the constructor") {
        brokerBridge.kernel should be (mockKernel)
      }
    }
  }
} 
Example 182
Source File: PluginManagerSpecForIntegration.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package integration

import org.apache.toree.plugins.{PluginManager, Plugin}
import org.apache.toree.plugins.annotations.Init
import org.scalatest.{OneInstancePerTest, Matchers, FunSpec}

class PluginManagerSpecForIntegration extends FunSpec with Matchers
  with OneInstancePerTest
{
  private val pluginManager = new PluginManager

  describe("PluginManager") {
    it("should be able to load and initialize internal plugins") {
      val plugins = pluginManager.initialize()
      plugins.map(_.name) should contain allOf (
        classOf[NonCircularPlugin].getName,
        classOf[RegisterPluginA].getName,
        classOf[ConsumePluginA].getName
      )
    }

    it("should be able to initialize plugins with dependencies provided by other plugins") {
      val cpa = pluginManager.loadPlugin("", classOf[ConsumePluginA]).get
      val rpa = pluginManager.loadPlugin("", classOf[RegisterPluginA]).get

      val results = pluginManager.initializePlugins(Seq(cpa, rpa))

      results.forall(_.isSuccess) should be (true)
    }

    it("should fail when plugins have circular dependencies") {
      val cp = pluginManager.loadPlugin("", classOf[CircularPlugin]).get

      val results = pluginManager.initializePlugins(Seq(cp))

      results.forall(_.isFailure) should be (true)
    }

    it("should be able to handle non-circular dependencies within the same plugin") {
      val ncp = pluginManager.loadPlugin("", classOf[NonCircularPlugin]).get

      val results = pluginManager.initializePlugins(Seq(ncp))

      results.forall(_.isSuccess) should be (true)
    }
  }
}

private class DepA
private class DepB

private class CircularPlugin extends Plugin {
  @Init def initMethodA(depA: DepA) = register(new DepB)
  @Init def initMethodB(depB: DepB) = register(new DepA)
}

private class NonCircularPlugin extends Plugin {
  @Init def initMethodB(depB: DepB) = {}
  @Init def initMethodA(depA: DepA) = register(new DepB)
  @Init def initMethod() = register(new DepA)
}

private class RegisterPluginA extends Plugin {
  @Init def initMethod() = register(new DepA)
}

private class ConsumePluginA extends Plugin {
  @Init def initMethod(depA: DepA) = {}
} 
Example 183
Source File: ImplicitsSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.plugins

import org.apache.toree.plugins.dependencies.Dependency
import org.scalatest.{OneInstancePerTest, Matchers, FunSpec}

class ImplicitsSpec extends FunSpec with Matchers with OneInstancePerTest {
  describe("Implicits") {
    describe("#$dep") {
      it("should convert values to dependencies with generated names") {
        import scala.reflect.runtime.universe._
        import org.apache.toree.plugins.Implicits._

        val value = new Object

        val d: Dependency[_] = value

        d.name should not be (empty)
        d.`type` should be (typeOf[Object])
        d.value should be (value)
      }

      it("should convert tuples of (string, value) to dependencies with the specified names") {
        import scala.reflect.runtime.universe._
        import org.apache.toree.plugins.Implicits._

        val name = "some name"
        val value = new Object

        val d: Dependency[_] = name -> value

        d.name should be (name)
        d.`type` should be (typeOf[Object])
        d.value should be (value)
      }
    }
  }
} 
Example 184
Source File: OrderServiceTest.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.integration

import scala.concurrent.duration._
import scala.xml.NodeSeq
import akka.actor.Props

import akka.http.scaladsl.marshallers.xml.ScalaXmlSupport._
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server._
import akka.http.scaladsl.testkit.ScalatestRouteTest

import org.scalatest.{ Matchers, WordSpec }
 
class OrderServiceTest extends WordSpec 
    with Matchers 
    with OrderService
    with ScalatestRouteTest {

  implicit val executionContext = system.dispatcher
  implicit val requestTimeout = akka.util.Timeout(1 second)
  val processOrders = 
    system.actorOf(Props(new ProcessOrders), "orders")

  "The order service" should {
    "return NotFound if the order cannot be found" in {
      Get("/orders/1") ~> routes ~> check {
        status shouldEqual StatusCodes.NotFound
      }
    }

    "return the tracking order for an order that was posted" in {
      val xmlOrder = 
      <order><customerId>customer1</customerId>
        <productId>Akka in action</productId>
        <number>10</number>
      </order>
      
      Post("/orders", xmlOrder) ~> routes ~> check {
        status shouldEqual StatusCodes.OK
        val xml = responseAs[NodeSeq]
        val id = (xml \\ "id").text.toInt
        val orderStatus = (xml \\ "status").text
        id shouldEqual 1
        orderStatus shouldEqual "received"
      }
      Get("/orders/1") ~> routes ~> check {
        status shouldEqual StatusCodes.OK
        val xml = responseAs[NodeSeq]
        val id = (xml \\ "id").text.toInt
        val orderStatus = (xml \\ "status").text
        id shouldEqual 1
        orderStatus shouldEqual "processing"
      }
    }
  }
} 
Example 185
Source File: AxisOptionsWriterSpec.scala    From scala-plotly-client   with MIT License 5 votes vote down vote up
package co.theasi.plotly.writer

import org.scalatest.{FlatSpec, Matchers}

import org.json4s.{JString, JNothing}

import co.theasi.plotly.{AxisOptions, AxisType}

class AxisOptionsWriterSpec extends FlatSpec with Matchers {
  "toJson" should "serialize the plot type" in {
    val options = AxisOptions().axisType(AxisType.Log)
    val jobj = AxisOptionsWriter.toJson(options)
    jobj \ "type" shouldEqual JString("log")
  }

  it should "serialize to null if the plot type is not specified" in {
    val options = AxisOptions()
    val jobj = AxisOptionsWriter.toJson(options)
    jobj \ "type" shouldEqual JNothing
  }
} 
Example 186
Source File: ModuleRendererTest.scala    From sbt-avro4s   with MIT License 5 votes vote down vote up
package com.sksamuel.avro4s

import org.scalatest.{Matchers, WordSpec}

class ModuleRendererTest extends WordSpec with Matchers {

  "new ModuleRenderer()" should {
    "write field for Time" in {
      new ModuleRenderer()(RecordType("com.sammy", "MyClass", Seq(FieldDef("foo", PrimitiveType.Time)))) shouldBe "//auto generated code by avro4s\ncase class MyClass(\n  foo: java.time.LocalTime\n)"
    }
    "write field for Int" in {
      new ModuleRenderer()(RecordType("com.sammy", "MyClass", Seq(FieldDef("foo", PrimitiveType.String)))) shouldBe "//auto generated code by avro4s\ncase class MyClass(\n  foo: String\n)"
    }
    "write field for String" in {
      new ModuleRenderer()(RecordType("com.sammy", "MyClass", Seq(FieldDef("foo", PrimitiveType.Int)))) shouldBe "//auto generated code by avro4s\ncase class MyClass(\n  foo: Int\n)"
    }
    "write field for Boolean" in {
      new ModuleRenderer()(RecordType("com.sammy", "MyClass", Seq(FieldDef("foo", PrimitiveType.Boolean)))) shouldBe "//auto generated code by avro4s\ncase class MyClass(\n  foo: Boolean\n)"
    }
    "write field for doubles" in {
      new ModuleRenderer()(RecordType("com.sammy", "MyClass", Seq(FieldDef("foo", PrimitiveType.Double)))) shouldBe "//auto generated code by avro4s\ncase class MyClass(\n  foo: Double\n)"
    }
    "write field for Instant" in {
      new ModuleRenderer()(RecordType("com.sammy", "MyClass", Seq(FieldDef("foo", PrimitiveType.Instant)))) shouldBe "//auto generated code by avro4s\ncase class MyClass(\n  foo: java.time.Instant\n)"
    }
    "write field for longs" in {
      new ModuleRenderer()(RecordType("com.sammy", "MyClass", Seq(FieldDef("foo", PrimitiveType.Long)))) shouldBe "//auto generated code by avro4s\ncase class MyClass(\n  foo: Long\n)"
    }
    "write field for uuids" in {
      new ModuleRenderer()(RecordType("com.sammy", "MyClass", Seq(FieldDef("foo", PrimitiveType.UUID)))) shouldBe "//auto generated code by avro4s\ncase class MyClass(\n  foo: java.util.UUID\n)"
    }
    "generate field for Maps with strings" in {
      new ModuleRenderer()(RecordType("com.sammy", "MyClass", Seq(FieldDef("name", MapType(PrimitiveType.String))))) shouldBe "//auto generated code by avro4s\ncase class MyClass(\n  name: Map[String, String]\n)"
    }
    "generate field for Maps with doubles" in {
      new ModuleRenderer()(RecordType("com.sammy", "MyClass", Seq(FieldDef("name", MapType(PrimitiveType.Double))))) shouldBe "//auto generated code by avro4s\ncase class MyClass(\n  name: Map[String, Double]\n)"
    }
    "generate field for arrays of strings" in {
      new ModuleRenderer()(RecordType("com.sammy", "MyClass", Seq(FieldDef("name", ArrayType(PrimitiveType.String))))) shouldBe "//auto generated code by avro4s\ncase class MyClass(\n  name: Seq[String]\n)"
    }
    "generate field for arrays of doubles" in {
      new ModuleRenderer()(RecordType("com.sammy", "MyClass", Seq(FieldDef("name", ArrayType(PrimitiveType.Double))))) shouldBe "//auto generated code by avro4s\ncase class MyClass(\n  name: Seq[Double]\n)"
    }
    "generate field for arrays of longs" in {
      new ModuleRenderer()(RecordType("com.sammy", "MyClass", Seq(FieldDef("name", ArrayType(PrimitiveType.Long))))) shouldBe "//auto generated code by avro4s\ncase class MyClass(\n  name: Seq[Long]\n)"
    }
    "generate field for arrays of records" in {
      new ModuleRenderer()(RecordType("com.sammy", "MyClass", Seq(FieldDef("name", ArrayType(RecordType("com.sammy", "NestedClass", Seq(FieldDef("name", PrimitiveType.String)))))))) shouldBe "//auto generated code by avro4s\ncase class MyClass(\n  name: Seq[com.sammy.NestedClass]\n)"
    }
    "generate field for bytes" in {
      new ModuleRenderer()(RecordType("com.sammy", "MyClass", Seq(FieldDef("name", PrimitiveType.Bytes)))) shouldBe "//auto generated code by avro4s\ncase class MyClass(\n  name: Array[Byte]\n)"
    }
    "generate BigDecimal field" in {
      new ModuleRenderer()(RecordType("com.sammy", "MyClass", Seq(FieldDef("name", PrimitiveType.BigDecimal)))) shouldBe "//auto generated code by avro4s\ncase class MyClass(\n  name: BigDecimal\n)"
    }
    "generate java enum for enums" in {
      new ModuleRenderer()(EnumType("com.sammy", "MyClass", Seq("Boo", "Foo", "Hoo"))) shouldBe "//auto generated code by avro4s\npublic enum MyClass{\n    Boo, Foo, Hoo\n}"
    }
    "generate option for nullable unions" in {
      new ModuleRenderer()(RecordType("com.sammy", "MyClass", Seq(FieldDef("name", UnionType(NullType, PrimitiveType.String))))) shouldBe "//auto generated code by avro4s\ncase class MyClass(\n  name: Option[String]\n)"
    }
    "generate either for union types of records" in {
      new ModuleRenderer()(RecordType("com.sammy", "MyClass", Seq(FieldDef("name", UnionType(PrimitiveType.String, RecordType("com.sammy", "NestedClass", Seq(FieldDef("name", PrimitiveType.String)))))))) shouldBe "//auto generated code by avro4s\ncase class MyClass(\n  name: Either[String, com.sammy.NestedClass]\n)"
    }
    "generate Option[Either] for union types of 3 types with null" in {
      new ModuleRenderer()(RecordType("com.sammy", "MyClass", Seq(FieldDef("name", UnionType(NullType, PrimitiveType.Int, PrimitiveType.Boolean))))) shouldBe "//auto generated code by avro4s\ncase class MyClass(\n  name: Option[Either[Int, Boolean]]\n)"
    }
    "generate coproducts for union types of 3+ non-null types" in {
      new ModuleRenderer()(RecordType("com.sammy", "MyClass", Seq(FieldDef("name", UnionType(PrimitiveType.String, PrimitiveType.Int, PrimitiveType.Boolean))))) should include ("//auto generated code by avro4s\ncase class MyClass(\n  name: shapeless.:+:[String, shapeless.:+:[Int, shapeless.:+:[Boolean, shapeless.CNil]]]\n)")
    }
    "generate Option[coproducts] for union types of 3+ non-null types with null" in {
      new ModuleRenderer()(RecordType("com.sammy", "MyClass", Seq(FieldDef("name", UnionType(NullType, PrimitiveType.String, PrimitiveType.Int, PrimitiveType.Boolean))))) should include ("//auto generated code by avro4s\ncase class MyClass(\n  name: Option[shapeless.:+:[String, shapeless.:+:[Int, shapeless.:+:[Boolean, shapeless.CNil]]]]\n)")
    }}
} 
Example 187
Source File: ClassRendererTest.scala    From sbt-avro4s   with MIT License 5 votes vote down vote up
package com.sksamuel.avro4s

import org.scalatest.{WordSpec, Matchers}

class ClassRendererTest extends WordSpec with Matchers {

  val types = ModuleGenerator(getClass.getResourceAsStream("/gameofthrones.avsc"))
  val fields = types.collect {
    case record: RecordType => record.fields
  }.flatten

  "ClassRenderer" should {
    "generate field for Int fields" in {
      fields should contain(FieldDef("kingdoms", PrimitiveType("Int")))
    }
    "generate field for Boolean fields" in {
      fields should contain(FieldDef("aired", PrimitiveType("Boolean")))
    }
    "generate field for Instant fields" in {
      fields should contain(FieldDef("airedDate", PrimitiveType("java.time.Instant")))
    }
    "generate field for Double fields" in {
      fields should contain(FieldDef("temperature", PrimitiveType("Double")))
    }
    "generate field for String fields" in {
      fields should contain(FieldDef("ruler", PrimitiveType("String")))
    }
    "generate field for Long fields" in {
      fields should contain(FieldDef("deathCount", PrimitiveType("Long")))
    }
    "generate definition for enums" in {
      val enum = ModuleGenerator(getClass.getResourceAsStream("/enum.avsc")).head
      enum shouldBe EnumType("com.example.avro", "MyEnum", List("DONE", "ARCHIVED", "DELETED"))
    }
  }
} 
Example 188
Source File: TemplateGeneratorTest.scala    From sbt-avro4s   with MIT License 5 votes vote down vote up
package com.sksamuel.avro4s

import org.scalatest.{WordSpec, Matchers}

class TemplateGeneratorTest extends WordSpec with Matchers {

  "TemplateGenerator" should {
    "generate a file per enum" in {
      val enums = TemplateGenerator(Seq(EnumType("com.a", "Boo", Seq("A", "B")), EnumType("com.a", "Foo", Seq("A", "B"))))
      enums shouldBe Seq(Template("com/a/Boo", "java", "package com.a;\n\n//auto generated code by avro4s\npublic enum Boo{\n    A, B\n}"), Template("com/a/Foo", "java", "package com.a;\n\n//auto generated code by avro4s\npublic enum Foo{\n    A, B\n}"))
    }
    "generate one file for all records of same namespace" in {
      val enums = TemplateGenerator(Seq(RecordType("com.a", "Boo", Seq(FieldDef("name", PrimitiveType.String), FieldDef("bibble", PrimitiveType.Long))), RecordType("com.a", "Foo", Seq(FieldDef("dibble", PrimitiveType.Double), FieldDef("bibble", PrimitiveType.Boolean)))))
      enums shouldBe Seq(Template("com/a/domain", "scala", "package com.a\n\n//auto generated code by avro4s\ncase class Boo(\n  name: String,\n  bibble: Long\n)\n\n//auto generated code by avro4s\ncase class Foo(\n  dibble: Double,\n  bibble: Boolean\n)"))
    }
  }
} 
Example 189
Source File: FavouriteSongSuite.scala    From PureCSV   with Apache License 2.0 5 votes vote down vote up
import org.apache.spark.SparkContext
import org.joda.time.Period
import org.scalatest.{Matchers, FunSuite}
import purecsv.example.favouritesong.FavouriteSong
import purecsv.example.favouritesong.FavouriteSong.periodStringConverter
import purecsv.safe.converter.StringConverter


class FavouriteSongSuite extends FunSuite with Matchers {

  test("JodaTime Period can be read from String") {
    val period = StringConverter[Period].from("P0000-00-00T00:04:53")
    period.getYears   should be (0)
    period.getMonths  should be (0)
    period.getDays    should be (0)
    period.getHours   should be (0)
    period.getMinutes should be (4)
    period.getSeconds should be (53)
  }

  test("FavouriteSong returns the songs with highest " +
       "like per artist and the number of broken records") {

    // 3 records with 1 broken (Human Nature)
    val rawRecords = Seq(
        "Billie Jean,Michael Jackson,Thriller,P0000-00-00T00:04:53,6430000"
      , "Human Nature,Michael Jackson,Thriller,P012,"
      , "Thriller,Michael Jackson,Thriller,P0000-00-00T00:05:59,5700000"
      )

    val sc = new SparkContext("local[2]", "test favourite song")
    val rawSongs = sc.parallelize(rawRecords)
    val result = FavouriteSong(rawSongs)

    // one record is broken
    result.brokenRecords should be (1)

    // the top song for Michael Jackson is Billie Jean
    result.artistToBestSong should contain theSameElementsAs(Seq("Michael Jackson" -> "Billie Jean"))
  }

} 
Example 190
Source File: ConverterSuite.scala    From PureCSV   with Apache License 2.0 5 votes vote down vote up
package purecsv.unsafe.converter

import java.util.UUID

import purecsv.unsafe.converter.defaults.rawfields._
import purecsv.unsafe.converter.defaults.string._
import purecsv.util.serializeAndDeserialize
import org.scalatest.{FunSuite, Matchers}
import shapeless.{::, Generic, HNil}

case class Event(ts: Long, msg: String)

class ConverterSuite extends FunSuite with Matchers {

  test("conversion String <-> Boolean works") {
    StringConverter[Boolean].to(true) should be ("true")
    StringConverter[Boolean].from("false") should be (false)
    StringConverter[Boolean].from("1") should be (true)
    StringConverter[Boolean].from("TRUE") should be (true)
  }

  test("conversion String <-> UUID works") {
    val uuid = UUID.randomUUID()
    StringConverter[UUID].to(uuid) should be (uuid.toString)
    StringConverter[UUID].from(uuid.toString) should be (uuid)
    StringConverter[UUID].from(uuid.toString.toLowerCase) should be (uuid)
    StringConverter[UUID].from(uuid.toString.toUpperCase) should be (uuid)
  }

  test("conversion HNil <-> String works") {
    RawFieldsConverter[HNil].to(HNil) should contain theSameElementsInOrderAs  (Seq.empty)
    RawFieldsConverter[HNil].from(Seq.empty) should be (HNil)
  }

  test("conversion HList <-> String works") {
    val conv = RawFieldsConverter[String :: Int :: HNil]
    conv.to("test" :: 1 :: HNil) should contain theSameElementsInOrderAs (Seq("\"test\"","1"))
    conv.from(Seq("foo","2")) should be ("foo" :: 2 :: HNil)
  }

  test("conversion case class <-> String works") {
    val conv = RawFieldsConverter[Event]
    conv.to(Event(1,"foobar")) should contain theSameElementsInOrderAs(Seq("1","\"foobar\""))
    conv.from(Seq("2","barfoo")) should be (Event(2,"barfoo"))
  }

  class Event2(val ts: Long, var msg: String) {
    override def equals(o: Any): Boolean = o match {
      case other:Event2 => (this.ts == other.ts && this.msg == other.msg)
      case _ => false
    }
    override def toString: String = s"Event($ts, $msg)"
  }

  implicit val fooGeneric = new Generic[Event2] {
    override type Repr = Long :: String :: HNil
    override def from(r: Repr): Event2 = {
      val ts :: msg :: HNil = r
      new Event2(ts, msg)
    }
    override def to(t: Event2): Repr = t.ts :: t.msg :: HNil
  }

  test("conversion class with custom Generic <-> String works") {
    val conv = RawFieldsConverter[Event2]
    conv.to(new Event2(1,"foo")) should contain theSameElementsInOrderAs(Seq("1","\"foo\""))
    conv.from(Seq("2","bar")) should be (new Event2(2,"bar"))

    // Strings are quoted
    val event = new Event2(1,"foo")
    val expectedEvent = new Event2(1, "\"foo\"")
    conv.from(conv.to(event)) should be (expectedEvent)
  }

  test("serializing a RawFieldsConverter should work") {
    val conv = RawFieldsConverter[Event]
    val convDeserialized = serializeAndDeserialize(conv)

    convDeserialized.to(Event(1,"foobar")) should contain theSameElementsInOrderAs(Seq("1","\"foobar\""))
    convDeserialized.from(Seq("2","barfoo")) should be (Event(2,"barfoo"))
  }
} 
Example 191
Source File: unsafeSuite.scala    From PureCSV   with Apache License 2.0 5 votes vote down vote up
package purecsv.unsafe

import java.io.CharArrayReader
import java.nio.file.Files

import purecsv.unsafe._
import purecsv.util.serializeAndDeserialize

import org.scalatest.{FunSuite, Matchers}

case class Event(ts: Long, msg: String, user: Option[Int])

class unsafeSuite extends FunSuite with Matchers {

  val events = Seq(Event(1,"foo",None),Event(2,"bar",Some(1)))
  val rawEvents = Seq("1,\"foo\",","2,\"bar\",1")

  test("Converting an iterable of events to CSV lines works") {
    events.toCSVLines().toSeq should contain theSameElementsInOrderAs(rawEvents)
  }

  test("Reading events from a String reader works") {
    val reader = new CharArrayReader(rawEvents.mkString(System.lineSeparator()).toCharArray)
    CSVReader[Event].readCSVFromReader(reader).toSeq should be (events)
  }

  test("Can read a file written with writeCSVToFile") {
    val file = Files.createTempFile("casecsv",".csv").toFile
    file.deleteOnExit()
    events.writeCSVToFile(file)
    CSVReader[Event].readCSVFromFile(file) should contain theSameElementsInOrderAs(events)
  }

  test("serializing a CSVReader should work") {
    val csvReader = CSVReader[Event]
    val csvReaderDeserialized = serializeAndDeserialize(csvReader)

    val result = csvReaderDeserialized.readCSVFromString("123|bar|\n456|foo|3", false, '|')

    result.length should be (2)
    result should be (List(
      Event(123, "bar", None),
      Event(456, "foo", Some(3))))
  }

} 
Example 192
Source File: ConverterSuite.scala    From PureCSV   with Apache License 2.0 5 votes vote down vote up
package purecsv.safe.converter

import java.util.UUID

import org.scalatest.{FunSuite, Matchers}
import purecsv.safe.converter.defaults.string._
import purecsv.util.serializeAndDeserialize
import shapeless.{::, Generic, HNil}
import scala.util.Success

case class Event(ts: Long, msg: String, user: Option[Int])

class ConverterSuite extends FunSuite with Matchers {

  test("conversion String -> Try[Boolean] works") {
    StringConverter[Boolean].tryFrom("false") should be (Success(false))
    StringConverter[Boolean].tryFrom("1") should be (Success(true))
    StringConverter[Boolean].tryFrom("TRUE") should be (Success(true))
  }

  test("conversion String <-> Try[UUID] works") {
    val uuid = UUID.randomUUID()
    StringConverter[UUID].tryFrom(uuid.toString) should be (Success(uuid))
    StringConverter[UUID].tryFrom(uuid.toString.toLowerCase) should be (Success(uuid))
    StringConverter[UUID].tryFrom(uuid.toString.toUpperCase) should be (Success(uuid))
  }

  test("conversion string -> Try[Option[Int]] works") {
    StringConverter[Option[Int]].tryFrom("") should be (Success(None))
    StringConverter[Option[Int]].tryFrom("1") should be (Success(Some(1)))
  }

  test("conversion String -> HNil works") {
    RawFieldsConverter[HNil].tryFrom(Seq.empty) should be (Success(HNil))
  }

  test("conversion String -> HList works") {
    val conv = RawFieldsConverter[String :: Int :: HNil]
    conv.tryFrom(Seq("foo","2")) should be (Success("foo" :: 2 :: HNil))
  }

  test("conversion String -> case class works") {
    val conv = RawFieldsConverter[Event]
    conv.tryFrom(Seq("2","barfoo","")) should be (Success(Event(2,"barfoo",None)))
    conv.tryFrom(Seq("2","barfoo","1")) should be (Success(Event(2,"barfoo",Some(1))))
  }

  class Event2(val ts: Long, var msg: String) {
    override def equals(o: Any): Boolean = o match {
      case other:Event2 => (this.ts == other.ts && this.msg == other.msg)
      case _ => false
    }
    override def toString: String = s"Event($ts, $msg)"
  }

  implicit val fooGeneric = new Generic[Event2] {
    override type Repr = Long :: String :: HNil
    override def from(r: Repr): Event2 = {
      val ts :: msg :: HNil = r
      new Event2(ts, msg)
    }
    override def to(t: Event2): Repr = t.ts :: t.msg :: HNil
  }

  test("conversion String -> class with custom Generic works") {
    val conv = RawFieldsConverter[Event2]
    conv.tryFrom(Seq("2","bar")) should be (Success(new Event2(2,"bar")))

    // Strings are quoted
    val event = new Event2(1,"foo")
    val expectedEvent = new Event2(1, "\"foo\"")
    conv.tryFrom(conv.to(event)) should be (Success(expectedEvent))
  }

  test("serializing a RawFieldsConverter should work") {
    val conv = RawFieldsConverter[Event]
    val convDeserialized = serializeAndDeserialize(conv)

    convDeserialized.tryFrom(Seq("2","barfoo","")) should be (Success(Event(2,"barfoo",None)))
    convDeserialized.tryFrom(Seq("2","barfoo","1")) should be (Success(Event(2,"barfoo",Some(1))))
  }
} 
Example 193
Source File: TryUtilSuite.scala    From PureCSV   with Apache License 2.0 5 votes vote down vote up
package purecsv.safe

import org.scalatest.{Matchers, FunSuite}

import scala.util.{Failure, Success}


class MyException(val s: String) extends RuntimeException(s) {
  override def equals(o: Any): Boolean = o match {
    case e:MyException => s.equals(e.s)
    case _ => false
  }
}

class TryUtilSuite extends FunSuite with Matchers {
  import tryutil._

  def failure(s: String) = Failure(new MyException(s))

  test("getSuccessesAndFailures partition an Iterator[Try[A]] into successes and failures") {
    val startingSuccesses = Seq(Success(1),Success(2))
    val startingFailures = Seq(failure("3"),failure("4"))
    val expectedSuccesses = Seq(1 -> 1, 2 -> 2)
    val expectedFailures = Seq(3 -> new MyException("3"), 4 -> new MyException("4"))
    val (resSuccesses,resFailures) = (startingSuccesses ++ startingFailures).getSuccessesAndFailures
    resSuccesses should be (expectedSuccesses)
    resFailures should be (expectedFailures)
  }
} 
Example 194
Source File: safeSuite.scala    From PureCSV   with Apache License 2.0 5 votes vote down vote up
package purecsv.safe

import java.io.CharArrayReader
import java.nio.file.Files

import purecsv.safe._
import purecsv.safe.tryutil._
import purecsv.util.serializeAndDeserialize

import org.scalatest.{Matchers, FunSuite}

import scala.util.Success

case class Event(ts: Long, msg: String, user: Option[Int])

class safeSuite extends FunSuite with Matchers {

  val events = Seq(Event(1,"foo",None),Event(2,"bar",Some(1)))
  val rawEvents = Seq("1,\"foo\",","2,\"bar\",1")

  test("Converting an iterable of events to CSV lines works") {
    events.toCSVLines().toSeq should contain theSameElementsInOrderAs(rawEvents)
  }

  test("Reading events from a String reader works") {
    val reader = new CharArrayReader(rawEvents.mkString(System.lineSeparator()).toCharArray)
    CSVReader[Event].readCSVFromReader(reader).toSeq should contain theSameElementsInOrderAs(events.map(Success(_)))
  }

  test("Reading events and get successes and failures works") {
    val reader = new CharArrayReader(rawEvents.mkString(System.lineSeparator()).toCharArray)
    val (successes,failures) = CSVReader[Event].readCSVFromReader(reader).getSuccessesAndFailures
    val expectedSuccesses = Seq(1 -> events(0), 2 -> events(1))
    successes should contain theSameElementsInOrderAs(expectedSuccesses)
    failures should be (Seq.empty[Event])
  }

  test("Can read a file written with writeCSVToFile") {
    val file = Files.createTempFile("casecsv",".csv").toFile
    events.writeCSVToFile(file)
    CSVReader[Event].readCSVFromFile(file) should contain theSameElementsInOrderAs(events.map(Success(_)))
  }

  test("serializing a CSVReader should work") {
    val csvReader = CSVReader[Event]
    val csvReaderDeserialized = serializeAndDeserialize(csvReader)

    val result = csvReaderDeserialized.readCSVFromString("123|bar|\n456|foo|3", '|', false)

    result.length should be (2)
    result should be (List(
      Success(Event(123, "bar", None)),
      Success(Event(456, "foo", Some(3)))))
  }

} 
Example 195
Source File: customerDelimiterSafeSuite.scala    From PureCSV   with Apache License 2.0 5 votes vote down vote up
package purecsv.safe

import java.io.CharArrayReader
import java.nio.file.Files

import purecsv.safe._
import purecsv.safe.tryutil._
import org.scalatest.{Matchers, FunSuite}

import scala.util.Success


class customerDelimiterSafeSuite extends FunSuite with Matchers {

  case class Event(ts: Long, msg: String, user: Option[Int])
  val events = Seq(Event(1,"foo",None),Event(2,"bar",Some(1)))
  val rawEvents = Seq("1|\"foo\"|","2|\"bar\"|1")

  test("Converting an iterable of events to CSV lines works") {
    events.toCSVLines("|").toSeq should contain theSameElementsInOrderAs(rawEvents)
  }

  test("Reading events from a String reader works") {
    val reader = new CharArrayReader(rawEvents.mkString(System.lineSeparator()).toCharArray)
    CSVReader[Event].readCSVFromReader(reader, '|').toSeq should contain theSameElementsInOrderAs(events.map(Success(_)))
  }

  test("Reading events and get successes and failures works") {
    val reader = new CharArrayReader(rawEvents.mkString(System.lineSeparator()).toCharArray)
    val (successes,failures) = CSVReader[Event].readCSVFromReader(reader, '|').getSuccessesAndFailures
    val expectedSuccesses = Seq(1 -> events(0), 2 -> events(1))
    successes should contain theSameElementsInOrderAs(expectedSuccesses)
    failures should be (Seq.empty[Event])
  }

  test("Can read a file written with writeCSVToFile") {
    val file = Files.createTempFile("casecsv",".csv").toFile
    events.writeCSVToFile(file, "☃")
    CSVReader[Event].readCSVFromFile(file, '☃') should contain theSameElementsInOrderAs(events.map(Success(_)))
  }

} 
Example 196
Source File: RecordSplitterSuite.scala    From PureCSV   with Apache License 2.0 5 votes vote down vote up
package purecsv

import java.io.CharArrayReader

import purecsv.unsafe.RecordSplitterImpl
import org.scalatest.{FunSuite, Matchers}


class RecordSplitterSuite extends FunSuite with Matchers {

  test("RecordSplitterImpl works with no records") {
    val reader = new CharArrayReader("".toCharArray)
    RecordSplitterImpl.getRecords(reader).toSeq should contain theSameElementsInOrderAs(Seq.empty)
  }

  test("RecordSplitterImpl works with two records") {
    val reader = new CharArrayReader("foo,bar\nbar,foo".toCharArray)
    RecordSplitterImpl.getRecords(reader).toSeq should contain theSameElementsInOrderAs(Seq(Array("foo","bar"),Array("bar","foo")))
  }


  test("RecordSplitterImpl works with custom delimiter") {
    val reader = new CharArrayReader("foo|bar\nbar|foo".toCharArray)
    RecordSplitterImpl.getRecords(reader, '|').toSeq should contain theSameElementsInOrderAs(Seq(Array("foo","bar"),Array("bar","foo")))
  }

  test("RecordSplitterImpl works with custom UTF8 delimiter") {
    val reader = new CharArrayReader("foo☃bar\nbar☃foo".toCharArray)
    RecordSplitterImpl.getRecords(reader, '☃').toSeq should contain theSameElementsInOrderAs(Seq(Array("foo","bar"),Array("bar","foo")))
  }
} 
Example 197
Source File: CSVRecordTest.scala    From PureCSV   with Apache License 2.0 5 votes vote down vote up
package purecsv.csviterable

import org.scalatest.{ FunSuite, Matchers }
import purecsv.unsafe._
import purecsv.unsafe.converter.RawFieldsConverter

final case class Person(name: String, surname: String)

class CSVRecordTest extends FunSuite with Matchers {

  test("CSVRecord output should be parsable by purecsv") {
    val person = Person("Jon", "Snow \"III\" of Winterfell")
    implicit val rfc = RawFieldsConverter[Person]
    val csvRecord = CSVRecord(person).toCSV()
    println(s"csvRecord: $csvRecord")
    CSVReader[Person].readCSVFromString(csvRecord) should contain theSameElementsAs Seq(person)
  }
} 
Example 198
Source File: DistanceFunctionSpec.scala    From lsh-scala   with Apache License 2.0 5 votes vote down vote up
package io.krom.lsh

import breeze.linalg.DenseVector

import org.scalatest.{Matchers, FunSpec}

import DistanceFunction._

class DistanceFunctionSpec extends FunSpec with Matchers {

  describe("calculating Euclidean distance score") {
    it("should equal 1 over 1 plus the square root of the sum of the squares of the sides") {
      val point1 = DenseVector[Double](1.0, 0.0)
      val point2 = DenseVector[Double](0.0, 1.0)
      val point3 = DenseVector[Double](3.0, 0.0)

      euclideanDistance(point1, point1) should equal (1.0)
      euclideanDistance(point1, point2) should equal (1.0 / (1.0 + Math.sqrt(2.0)))
      euclideanDistance(point1, point3) should equal (1.0 / (1.0 + Math.sqrt(4.0)))
      euclideanDistance(point2, point3) should equal (1.0 / (1.0 + Math.sqrt(10.0)))
    }
  }
  describe("calculating Cosine distance score") {
    it("should equal 1 minus the cosine of the angle between the vectors") {
      val point1 = DenseVector(1.0, 0.0)
      val point2 = DenseVector(0.0, 1.0)
      val point3 = DenseVector(3.0, 0.0)

      val point4 = DenseVector(2.0, 3.0)
      val point5 = DenseVector(1.0, 1.5)
      val point6 = DenseVector(6.0, 9.0)

      cosineDistance(point1, point1) should equal (1.0)
      cosineDistance(point1, point2) should equal (0.0)
      cosineDistance(point2, point1) should equal (0.0)
      cosineDistance(point1, point3) should equal (1.0)
      cosineDistance(point4, point5) should equal (1.0)
      cosineDistance(point4, point6) should equal (1.0)

      val point7 = DenseVector(-1.0, 0.0)
      val point8 = DenseVector(0.0, -1.0)

      cosineDistance(point1, point7) should equal (1.0)
      cosineDistance(point1, point8) should equal (0.0)
      cosineDistance(point7, point8) should equal (0.0)

      val point9 = DenseVector(0.0, 0.0)
      cosineDistance(point9, point1).isNaN should be (true)
    }
  }
} 
Example 199
Source File: ServiceSpec.scala    From mqtt-mongo   with MIT License 5 votes vote down vote up
package com.izmailoff.mm.service

import akka.actor.ActorSystem
import akka.testkit.{TestProbe, DefaultTimeout, ImplicitSender, TestKit}
import com.izmailoff.mm.config.GlobalAppConfig
import com.sandinh.paho.akka.MqttPubSub.{Subscribe, SubscribeAck, Message}
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
import scala.concurrent.duration._
import scala.collection.JavaConversions._


class ServiceSpec
  extends TestKit(ActorSystem("test-mqtt-mongo-system", GlobalAppConfig.config))
  with DefaultTimeout
  with ImplicitSender
  with WordSpecLike
  with Matchers
  with BeforeAndAfterAll
  with TestMqttMongoServiceImpl
  with TestHelpers {

  override def afterAll {
    shutdown()
  }

  "Subscription between MQTT Broker and Consumer" should {
    "get established when consumer is started" in {
      val mqttBroker = startMqttIntermediary()
      val probe = TestProbe()
      val mqttConsumer = startMqttConsumer(probe.ref)

      probe.expectMsg(Subscribe(testTopic, mqttConsumer))
      probe.forward(mqttBroker, Subscribe(testTopic, probe.ref))
      probe.expectMsg(SubscribeAck(Subscribe(testTopic, probe.ref)))
      probe.forward(mqttConsumer, SubscribeAck(Subscribe(testTopic, mqttConsumer)))
      probe.expectNoMsg()
    }
  }

  "Sending a message to MQTT Broker" should {
    "forward it to MQTT Consumer and get saved in DB in proper JSON format" in {
      val collection = getCollectionName(testTopic).head
      db.getCollection(collection).count() should be(0)
      val mqttBroker = startMqttIntermediary()
      val mqttConsumer = startMqttConsumer(mqttBroker)
      expectNoMsg(1 second)

      mqttBroker ! new Message(testTopic, "test content".getBytes)
      mqttBroker ! new Message(testTopic, """{ "field1" : "str val", "field2" : 123 }""".getBytes)
      expectNoMsg(1 second)

      db.getCollection(collection).count() should be(2)
      val allDocsDb = db.getCollection(collection).find().iterator.toList
      allDocsDb.exists { d =>
        val fields: Map[Any, Any] = d.toMap.toMap
        fields.size == 2 &&
          fields("payload") == "test content"
      } should be(true)
      allDocsDb.exists { d =>
        val fields: Map[Any, Any] = d.toMap.toMap
        fields.size == 3 &&
          fields("field1") == "str val" &&
          fields("field2") == 123
      } should be(true)
    }
  }


} 
Example 200
Source File: ExtractNodes.scala    From tensorframes   with Apache License 2.0 5 votes vote down vote up
package org.tensorframes.dsl

import java.io.{BufferedReader, InputStreamReader, File}
import java.nio.file.Files
import java.nio.charset.StandardCharsets
import org.tensorframes.Logging
import org.scalatest.Matchers

import scala.collection.JavaConverters._

object ExtractNodes extends Matchers with Logging {

  def executeCommand(py: String): Map[String, String] = {
    val content =
      s"""
         |from __future__ import print_function
         |import tensorflow as tf
         |
         |$py
         |g = tf.get_default_graph().as_graph_def()
         |for n in g.node:
         |    print(">>>>>", str(n.name), "<<<<<<")
         |    print(n)
       """.stripMargin
    val f = File.createTempFile("pythonTest", ".py")
    logTrace(s"Created temp file ${f.getAbsolutePath}")
    Files.write(f.toPath, content.getBytes(StandardCharsets.UTF_8))
    // Using the standard python installation in the PATH. It needs to have TensorFlow installed.
    val p = new ProcessBuilder("python", f.getAbsolutePath).start()
    val s = p.getInputStream
    val isr = new InputStreamReader(s)
    val br = new BufferedReader(isr)
    var res: String = ""
    var str: String = ""
    while(str != null) {
      str = br.readLine()
      if (str != null) {
        res = res + "\n" + str
      }
    }

    p.waitFor()
    assert(p.exitValue() === 0, (p.exitValue(),
      {
        println(content)
        s"===========\n$content\n==========="
      }))
    res.split(">>>>>").map(_.trim).filterNot(_.isEmpty).map { b =>
      val zs = b.split("\n")
      val node = zs.head.dropRight(7)
      val rest = zs.tail
      node -> rest.mkString("\n")
    } .toMap
  }

  def compareOutput(py: String, nodes: Operation*): Unit = {
    val g = TestUtilities.buildGraph(nodes.head, nodes.tail:_*)
    val m1 = g.getNodeList.asScala.map { n =>
      n.getName -> n.toString.trim
    } .toMap
    val pym = executeCommand(py)
    logTrace(s"m1 = '$m1'")
    logTrace(s"pym = '$pym'")
    assert((m1.keySet -- pym.keySet).isEmpty, {
      val diff = (m1.keySet -- pym.keySet).toSeq.sorted
      s"Found extra nodes in scala: $diff"
    })
    assert((pym.keySet -- m1.keySet).isEmpty, {
      val diff = (pym.keySet -- m1.keySet).toSeq.sorted
      s"Found extra nodes in python: $diff"
    })
    for (k <- m1.keySet) {
      assert(m1(k) === pym(k),
        s"scala=${m1(k)}\npython=${pym(k)}")
    }
  }
}