org.scalatest.Inspectors Scala Examples

The following examples show how to use org.scalatest.Inspectors. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: LesserThanGeneratorTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.generators.binary

import be.cetic.tsimulus.config.GeneratorFormat
import be.cetic.tsimulus.test.RTSTest
import org.scalatest.{FlatSpec, Inspectors, Matchers}
import spray.json._
import be.cetic.tsimulus.generators.binary.LesserThanGenerator


class LesserThanGeneratorTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   val source =
      """
        |{
        |   "name": "lt-generator",
        |   "type": "lesser-than",
        |   "a": "daily-generator",
        |   "b": "monthly-generator",
        |   "strict": false
        |}
      """.stripMargin

   val source_implicit =
      """
        |{
        |   "name": "lt-generator",
        |   "type": "lesser-than",
        |   "a": "daily-generator",
        |   "b": "monthly-generator"
        |}
      """.stripMargin

   "A LT generator" should "be correctly read from a json document" in {

      val generator = LesserThanGenerator(source.parseJson)

      generator.name shouldBe Some("lt-generator")
      generator.a shouldBe Left("daily-generator")
      generator.b shouldBe Left("monthly-generator")
      generator.strict shouldBe Some(false)
   }

   it should "be extracted from the global extractor without any error" in {
      noException should be thrownBy GeneratorFormat.read(source.parseJson)
   }

   it should "be correctly extracted from the global extractor" in {
      GeneratorFormat.read(source.parseJson) shouldBe LesserThanGenerator(source.parseJson)
   }

   it should "be correctly exported to a json document" in {
      val generator = new LesserThanGenerator(
         Some("lt-generator"),
         Left("daily-generator"),
         Left("monthly-generator"),
         Some(false)
      )

      generator shouldBe LesserThanGenerator(generator.toJson)
   }

   "A LT generator with implicit strictness" should "be correctly read from a json document" in {
      val generator = LesserThanGenerator(source_implicit.parseJson)

      generator.name shouldBe Some("lt-generator")
      generator.a shouldBe Left("daily-generator")
      generator.b shouldBe Left("monthly-generator")
      generator.strict shouldBe None
   }

   it should "be correctly exported to a json document" in {
      val generator = new LesserThanGenerator(
         Some("lt-generator"),
         Left("daily-generator"),
         Left("monthly-generator"),
         None
      )

      generator shouldBe LesserThanGenerator(generator.toJson)
   }

   it should "have a correct textual representation" in {
      val generator = new LesserThanGenerator(
         Some("lesser-than-generator"),
         Left("a-generator"),
         Left("b-generator"),
         None
      )

      generator.toString shouldBe """LesserThan(Some(lesser-than-generator), Left(a-generator), Left(b-generator), None)"""
   }
} 
Example 2
Source File: PartialGeneratorTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.generators.missing

import be.cetic.tsimulus.config.GeneratorFormat
import be.cetic.tsimulus.generators.missing.PartialGenerator
import org.joda.time.LocalDateTime
import org.scalatest.{FlatSpec, Inspectors, Matchers}
import spray.json._


class PartialGeneratorTest extends FlatSpec with Matchers with Inspectors
{
   val source =
      """
        |{
        |  "name": "partial-generator",
        |  "type": "partial",
        |  "generator": "daily-generator",
        |  "from": "2016-01-01 00:00:00.000",
        |  "to": "2016-04-23 01:23:45.678",
        |  "missing-rate": 0.001
        |}
      """.stripMargin

   "A Partial generator" should "be correctly read from a json document" in {
      val generator = PartialGenerator(source.parseJson)

      generator.name shouldBe Some("partial-generator")
      generator.generator shouldBe Left("daily-generator")
      generator.from shouldBe Some(new LocalDateTime(2016, 1, 1, 0, 0, 0))
      generator.to shouldBe Some(new LocalDateTime(2016, 4, 23, 1, 23, 45, 678))
      generator.missingRate shouldBe Some(0.001)
   }

   it should "be extracted from the global extractor without any error" in {
      noException should be thrownBy GeneratorFormat.read(source.parseJson)
   }

   it should "be correctly extracted from the global extractor" in {
      GeneratorFormat.read(source.parseJson) shouldBe PartialGenerator(source.parseJson)
   }

   it should "be correctly exported to a json document" in {
      val generator = new PartialGenerator(
         Some("limited-generator"),
         Left("daily-generator"),
         Some(new LocalDateTime(2016, 1, 1, 0, 0, 0)),
         Some(new LocalDateTime(2016, 4, 23, 1, 23, 45, 678)),
         Some(0.001)
      )
      generator shouldBe PartialGenerator(generator.toJson)
   }
} 
Example 3
Source File: AndGeneratorTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.generators.binary

import be.cetic.tsimulus.config.GeneratorFormat
import org.scalatest.{FlatSpec, Inspectors, Matchers}
import spray.json._
import be.cetic.tsimulus.generators.binary.AndGenerator
import be.cetic.tsimulus.test.RTSTest

class AndGeneratorTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   val andSource =
      """
        |{
        |   "name": "and-generator",
        |   "type": "and",
        |   "a": "daily-generator",
        |   "b": "monthly-generator"
        |}
      """.stripMargin

   "A AND generator" should "be correctly read from a json document" in {
      val generator = AndGenerator(andSource.parseJson)

      generator.name shouldBe Some("and-generator")
      generator.a shouldBe Left("daily-generator")
      generator.b shouldBe Left("monthly-generator")
   }

   it should "be extracted from the global extractor without any error" in {
      noException should be thrownBy GeneratorFormat.read(andSource.parseJson)
   }

   it should "be correctly extracted from the global extractor" in {
      GeneratorFormat.read(andSource.parseJson) shouldBe AndGenerator(andSource.parseJson)
   }

   it should "be correctly exported to a json document" in {
      val generator = new AndGenerator(
         Some("and-generator"),
         Left("daily-generator"),
         Left("monthly-generator")
      )
      generator shouldBe AndGenerator(generator.toJson)
   }

   it should "have a correct textual representation" in {
      val generator = new AndGenerator(
         Some("and-generator"),
         Left("a-generator"),
         Left("b-generator")
      )

      generator.toString shouldBe """And(Some(and-generator), Left(a-generator), Left(b-generator))"""
   }
} 
Example 4
Source File: OrGeneratorTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.generators.binary

import be.cetic.tsimulus.config.GeneratorFormat
import be.cetic.tsimulus.timeseries.binary.{FalseTimeSeries, OrTimeSeries, TrueTimeSeries}
import be.cetic.tsimulus.timeseries.missing.UndefinedTimeSeries
import org.joda.time.LocalDateTime
import com.github.nscala_time.time.Imports._
import be.cetic.tsimulus.generators.binary.OrGenerator
import org.scalatest.{FlatSpec, Inspectors, Matchers}
import spray.json._
import be.cetic.tsimulus.test.RTSTest


class OrGeneratorTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
  val source =
      """
        |{
        |   "name": "or-generator",
        |   "type": "or",
        |   "a": "daily-generator",
        |   "b": "monthly-generator"
        |}
      """.stripMargin

   "A OR generator" should "be correctly read from a json document" in {
      val generator = OrGenerator(source.parseJson)

      generator.name shouldBe Some("or-generator")
      generator.a shouldBe Left("daily-generator")
      generator.b shouldBe Left("monthly-generator")
   }

   it should "be extracted from the global extractor without any error" in {
      noException should be thrownBy GeneratorFormat.read(source.parseJson)
   }

   it should "be correctly extracted from the global extractor" in {
      GeneratorFormat.read(source.parseJson) shouldBe OrGenerator(source.parseJson)
   }

   it should "be correctly exported to a json document" in {
      val generator = new OrGenerator(
         Some("or-generator"),
         Left("daily-generator"),
         Left("monthly-generator")
      )
      generator shouldBe OrGenerator(generator.toJson)
   }

   it should "have a correct textual representation" in {
      val generator = new OrGenerator(
         Some("or-generator"),
         Left("a-generator"),
         Left("b-generator")
      )

      generator.toString shouldBe """Or(Some(or-generator), Left(a-generator), Left(b-generator))"""
   }
} 
Example 5
Source File: ImpliesGeneratorTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.generators.binary

import be.cetic.tsimulus.config.GeneratorFormat
import org.scalatest.{FlatSpec, Inspectors, Matchers}
import spray.json._
import be.cetic.tsimulus.generators.binary.ImpliesGenerator
import be.cetic.tsimulus.test.RTSTest

class ImpliesGeneratorTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   val source =
      """
        |{
        |   "name": "implies-generator",
        |   "type": "implies",
        |   "a": "daily-generator",
        |   "b": "monthly-generator"
        |}
      """.stripMargin

   "An Implies generator" should "be correctly read from a json document" in {
      val generator = ImpliesGenerator(source.parseJson)

      generator.name shouldBe Some("implies-generator")
      generator.a shouldBe Left("daily-generator")
      generator.b shouldBe Left("monthly-generator")
   }

   it should "be extracted from the global extractor without any error" in {
      noException should be thrownBy GeneratorFormat.read(source.parseJson)
   }

   it should "be correctly extracted from the global extractor" in {
      GeneratorFormat.read(source.parseJson) shouldBe ImpliesGenerator(source.parseJson)
   }

   it should "be correctly exported to a json document" in {
      val generator = new ImpliesGenerator(
         Some("implies-generator"),
         Left("daily-generator"),
         Left("monthly-generator")
      )
      generator shouldBe ImpliesGenerator(generator.toJson)
   }

   it should "have a correct textual representation" in {
      val generator = new ImpliesGenerator(
         Some("implies-generator"),
         Left("a-generator"),
         Left("b-generator")
      )

      generator.toString shouldBe """Implies(Some(implies-generator), Left(a-generator), Left(b-generator))"""
   }
} 
Example 6
Source File: LogisticGeneratorTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.generators.binary

import be.cetic.tsimulus.config.GeneratorFormat
import be.cetic.tsimulus.generators.binary.LogisticGenerator
import org.scalatest.{FlatSpec, Inspectors, Matchers}
import spray.json._
import be.cetic.tsimulus.test.RTSTest

class LogisticGeneratorTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   val source =
      """
        |{
        |  "name": "logistic-generator",
        |  "type": "logistic",
        |  "generator": "g1",
        |  "location": 6,
        |  "scale": 2.4,
        |  "seed": 1809
        |}
      """.stripMargin

   val generator = LogisticGenerator(source.parseJson)

   "A logistic generator" should "be correctly read from a json document" in {

      generator.name shouldBe Some("logistic-generator")
      generator.generator shouldBe Left("g1")
      generator.location shouldBe 6
      generator.scale shouldBe 2.4 +- 0.001
      generator.seed shouldBe Some(1809)
   }

   it should "be extracted from the global extractor without any error" in {
      noException should be thrownBy GeneratorFormat.read(source.parseJson)
   }

   it should "be correctly extracted from the global extractor" in {
      GeneratorFormat.read(source.parseJson) shouldBe LogisticGenerator(source.parseJson)
   }

   it should "be correctly exported to a json document" in {
      val generator = new LogisticGenerator(
         Some("logistic-generator"),
         Left("g1"),
         6,
         2.4,
         Some(1809)
      )
      generator shouldBe LogisticGenerator(generator.toJson)
   }

   it should "have a correct textual representation" in {
      val generator = new LogisticGenerator(
         Some("logistic-generator"),
         Left("g1"),
         6,
         2.4,
         Some(1809)
      )

      generator.toString shouldBe """Logistic(Some(logistic-generator), Left(g1), 6.0, 2.4, Some(1809))"""
   }
} 
Example 7
Source File: GreaterThanGeneratorTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.generators.binary

import be.cetic.tsimulus.config.GeneratorFormat
import be.cetic.tsimulus.test.RTSTest
import org.scalatest.{FlatSpec, Inspectors, Matchers}
import spray.json._
import be.cetic.tsimulus.generators.binary.GreaterThanGenerator


class GreaterThanGeneratorTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   val source =
      """
        |{
        |   "name": "gt-generator",
        |   "type": "greater-than",
        |   "a": "daily-generator",
        |   "b": "monthly-generator",
        |   "strict": false
        |}
      """.stripMargin

   val source_implicit =
      """
        |{
        |   "name": "gt-generator",
        |   "type": "greater-than",
        |   "a": "daily-generator",
        |   "b": "monthly-generator"
        |}
      """.stripMargin

   "A GT generator" should "be correctly read from a json document" in {
      val generator = GreaterThanGenerator(source.parseJson)

      generator.name shouldBe Some("gt-generator")
      generator.a shouldBe Left("daily-generator")
      generator.b shouldBe Left("monthly-generator")
      generator.strict shouldBe Some(false)
   }

   it should "be extracted from the global extractor without any error" in {
      noException should be thrownBy GeneratorFormat.read(source.parseJson)
   }

   it should "be correctly extracted from the global extractor" in {
      GeneratorFormat.read(source.parseJson) shouldBe GreaterThanGenerator(source.parseJson)
   }

   it should "be correctly exported to a json document" in {
      val generator = new GreaterThanGenerator(
         Some("gt-generator"),
         Left("daily-generator"),
         Left("monthly-generator"),
         Some(false)
      )

      generator shouldBe GreaterThanGenerator(generator.toJson)
   }

   "A GT generator with implicit strictness" should "be correctly read from a json document" in {
      val generator = GreaterThanGenerator(source_implicit.parseJson)

      generator.name shouldBe Some("gt-generator")
      generator.a shouldBe Left("daily-generator")
      generator.b shouldBe Left("monthly-generator")
      generator.strict shouldBe None
   }

   it should "be correctly exported to a json document" in {
      val generator = new GreaterThanGenerator(
         Some("gt-generator"),
         Left("daily-generator"),
         Left("monthly-generator"),
         None
      )

      generator shouldBe GreaterThanGenerator(generator.toJson)
   }

   it should "have a correct textual representation" in {
      val generator = new GreaterThanGenerator(
         Some("greater-than-generator"),
         Left("a-generator"),
         Left("b-generator"),
         None
      )

      generator.toString shouldBe """GreaterThan(Some(greater-than-generator), Left(a-generator), Left(b-generator), None)"""
   }
} 
Example 8
Source File: XorGeneratorTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.generators.binary

import be.cetic.tsimulus.config.GeneratorFormat
import be.cetic.tsimulus.timeseries.binary.{FalseTimeSeries, TrueTimeSeries, XorTimeSeries}
import be.cetic.tsimulus.timeseries.missing.UndefinedTimeSeries
import org.joda.time.LocalDateTime
import com.github.nscala_time.time.Imports._
import org.scalatest.{FlatSpec, Inspectors, Matchers}
import spray.json._
import be.cetic.tsimulus.generators.binary.XorGenerator
import be.cetic.tsimulus.test.RTSTest

class XorGeneratorTest extends FlatSpec with Matchers with Inspectors with RTSTest
{

   val source =
      """
        |{
        |   "name": "xor-generator",
        |   "type": "xor",
        |   "a": "daily-generator",
        |   "b": "monthly-generator"
        |}
      """.stripMargin

   "A XOR generator" should "be correctly read from a json document" in {
      val generator = XorGenerator(source.parseJson)

      generator.name shouldBe Some("xor-generator")
      generator.a shouldBe Left("daily-generator")
      generator.b shouldBe Left("monthly-generator")
   }

   it should "be extracted from the global extractor without any error" in {
      noException should be thrownBy GeneratorFormat.read(source.parseJson)
   }

   it should "be correctly extracted from the global extractor" in {
      GeneratorFormat.read(source.parseJson) shouldBe XorGenerator(source.parseJson)
   }

   it should "be correctly exported to a json document" in {
      val generator = new XorGenerator(
         Some("xor-generator"),
         Left("daily-generator"),
         Left("monthly-generator")
      )
      generator shouldBe XorGenerator(generator.toJson)
   }

   it should "have a correct textual representation" in {
      val generator = new XorGenerator(
         Some("xor-generator"),
         Left("a-generator"),
         Left("b-generator")
      )

      generator.toString shouldBe """Xor(Some(xor-generator), Left(a-generator), Left(b-generator))"""
   }
} 
Example 9
Source File: NotGeneratorTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.generators.binary

import be.cetic.tsimulus.config.GeneratorFormat
import be.cetic.tsimulus.timeseries.binary.{FalseTimeSeries, NotTimeSeries, TrueTimeSeries}
import be.cetic.tsimulus.timeseries.missing.UndefinedTimeSeries
import org.joda.time.LocalDateTime
import com.github.nscala_time.time.Imports._
import org.scalatest.{FlatSpec, Inspectors, Matchers}
import spray.json._
import be.cetic.tsimulus.generators.binary.NotGenerator
import be.cetic.tsimulus.test.RTSTest

class NotGeneratorTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   val source =
      """
        |{
        |   "name": "not-generator",
        |   "type": "not",
        |   "generator": "binary-generator"
        |}
      """.stripMargin

   "A NOT generator" should "be correctly read from a json document" in {
      val generator = NotGenerator(source.parseJson)

      generator.name shouldBe Some("not-generator")
      generator.generator shouldBe Left("binary-generator")
   }

   it should "be extracted from the global extractor without any error" in {
      noException should be thrownBy GeneratorFormat.read(source.parseJson)
   }

   it should "be correctly extracted from the global extractor" in {
      GeneratorFormat.read(source.parseJson) shouldBe NotGenerator(source.parseJson)
   }

   it should "be correctly exported to a json document" in {
      val generator = new NotGenerator(
         Some("not-generator"),
         Left("binary-generator")
      )
      generator shouldBe NotGenerator(generator.toJson)
   }

   it should "have a correct textual representation" in {
      val generator = new NotGenerator(
         Some("not-generator"),
         Left("a-generator")
      )

      generator.toString shouldBe """Not(Some(not-generator), Left(a-generator))"""
   }
} 
Example 10
Source File: TrueGeneratorTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.generators.binary

import be.cetic.tsimulus.config.GeneratorFormat
import be.cetic.tsimulus.generators.binary.TrueGenerator
import org.scalatest.{FlatSpec, Inspectors, Matchers}
import spray.json._

class TrueGeneratorTest extends FlatSpec with Matchers with Inspectors
{
   val source =
      """
        |{
        |   "name": "true-generator",
        |   "type": "true"
        |}
      """.stripMargin

   "A TRUE generator" should "be correctly read from a json document" in {
      val generator = TrueGenerator(source.parseJson)

      generator.name shouldBe Some("true-generator")
   }

   it should "be extracted from the global extractor without any error" in {
      noException should be thrownBy GeneratorFormat.read(source.parseJson)
   }

   it should "be correctly extracted from the global extractor" in {
      GeneratorFormat.read(source.parseJson) shouldBe TrueGenerator(source.parseJson)
   }

   it should "be correctly exported to a json document" in {
      val generator = new TrueGenerator(
         Some("true-generator")
      )
      generator shouldBe TrueGenerator(generator.toJson)
   }

   it should "have a correct textual representation" in {
      val generator = new TrueGenerator(
         Some("true-generator")
      )

      generator.toString shouldBe """True(Some(true-generator))"""
   }
} 
Example 11
Source File: FalseGeneratorTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.generators.binary

import be.cetic.tsimulus.config.GeneratorFormat
import be.cetic.tsimulus.generators.binary.FalseGenerator
import org.scalatest.{FlatSpec, Inspectors, Matchers}
import spray.json._


class FalseGeneratorTest extends FlatSpec with Matchers with Inspectors
{
   val source =
      """
        |{
        |   "name": "false-generator",
        |   "type": "false"
        |}
      """.stripMargin

   "A FALSE generator" should "be correctly read from a json document" in {
      val generator = FalseGenerator(source.parseJson)

      generator.name shouldBe Some("false-generator")
   }

   it should "be extracted from the global extractor without any error" in {
      noException should be thrownBy GeneratorFormat.read(source.parseJson)
   }

   it should "be correctly extracted from the global extractor" in {
      GeneratorFormat.read(source.parseJson) shouldBe FalseGenerator(source.parseJson)
   }

   it should "be correctly exported to a json document" in {
      val generator = new FalseGenerator(
         Some("false-generator")
      )
      generator shouldBe FalseGenerator(generator.toJson)
   }

   it should "have a correct textual representation" in {
      val generator = new FalseGenerator(
         Some("false-generator")
      )

      generator.toString shouldBe """False(Some(false-generator))"""
   }
} 
Example 12
Source File: EquivGeneratorTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.generators.binary

import be.cetic.tsimulus.config.GeneratorFormat
import org.scalatest.{FlatSpec, Inspectors, Matchers}
import spray.json._
import be.cetic.tsimulus.generators.binary.EquivGenerator
import be.cetic.tsimulus.test.RTSTest

class EquivGeneratorTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   val source =
      """
        |{
        |   "name": "equiv-generator",
        |   "type": "equiv",
        |   "a": "daily-generator",
        |   "b": "monthly-generator"
        |}
      """.stripMargin

   "An Equiv generator" should "be correctly read from a json document" in {
      val generator = EquivGenerator(source.parseJson)

      generator.name shouldBe Some("equiv-generator")
      generator.a shouldBe Left("daily-generator")
      generator.b shouldBe Left("monthly-generator")
   }

   it should "be extracted from the global extractor without any error" in {
      noException should be thrownBy GeneratorFormat.read(source.parseJson)
   }

   it should "be correctly extracted from the global extractor" in {
      GeneratorFormat.read(source.parseJson) shouldBe EquivGenerator(source.parseJson)
   }

   it should "be correctly exported to a json document" in {
      val generator = new EquivGenerator(
         Some("equiv-generator"),
         Left("daily-generator"),
         Left("monthly-generator")
      )
      generator shouldBe EquivGenerator(generator.toJson)
   }

   it should "have a correct textual representation" in {
      val generator = new EquivGenerator(
         Some("equiv-generator"),
         Left("a-generator"),
         Left("b-generator")
      )

      generator.toString shouldBe """Equiv(Some(equiv-generator), Left(a-generator), Left(b-generator))"""
   }
} 
Example 13
Source File: UndefinedGeneratorTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.generators.missing

import be.cetic.tsimulus.config.GeneratorFormat
import be.cetic.tsimulus.generators.missing.{LimitedGenerator, PartialGenerator, UndefinedGenerator}
import org.joda.time.LocalDateTime
import org.scalatest.{FlatSpec, Inspectors, Matchers}
import spray.json._


class UndefinedGeneratorTest extends FlatSpec with Matchers with Inspectors
{
   val source =
      """
        |{
        |  "name": "undefined-generator",
        |  "type": "undefined"
        |}
      """.stripMargin

   "An Undefined generator" should "be correctly read from a json document" in {
      val generator = UndefinedGenerator(source.parseJson)

      generator.name shouldBe Some("undefined-generator")
   }

   it should "be extracted from the global extractor without any error" in {
      noException should be thrownBy GeneratorFormat.read(source.parseJson)
   }

   it should "be correctly extracted from the global extractor" in {
      GeneratorFormat.read(source.parseJson) shouldBe UndefinedGenerator(source.parseJson)
   }

   it should "be correctly exported to a json document" in {
      val generator = new UndefinedGenerator(
         Some("undefined-generator")
      )
      generator shouldBe UndefinedGenerator(generator.toJson)
   }
} 
Example 14
Source File: ConditionalTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.missing

import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.composite.ConditionalTimeSeries
import be.cetic.tsimulus.timeseries.primary.ConstantTimeSeries
import org.scalatest.{FlatSpec, Inspectors, Matchers}

class ConditionalTimeSeriesTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   "A successful conditional time series" should "use the success time series" in {
      forAll(ConditionalTimeSeries(t, t, f).compute(dates)) {
         result => result._2 shouldBe Some(true)
      }
   }

   "An unsuccessful conditional time series" should "use the failure time series" in {
      forAll(ConditionalTimeSeries(f, t, f).compute(dates)) {
         result => result._2 shouldBe Some(false)
      }
   }

   "A undetermined conditional time series" should "use the success time series" in {
      forAll(ConditionalTimeSeries(u, t, f).compute(dates)) {
         result => result._2 shouldBe None
      }
   }
} 
Example 15
Source File: DefaultTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.missing

import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.missing.DefaultTimeSeries
import org.scalatest.{FlatSpec, Inspectors, Matchers}

class DefaultTimeSeriesTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   "A default time series with undefined values first" should "skip to the first defined values" in {
      forAll (DefaultTimeSeries(Seq(u, t, f)).compute(dates)) { result => result._2 shouldBe Some(true)}
   }

   "An empty default time series" should "generate undefined values" in {
      forAll (DefaultTimeSeries(Seq()).compute(dates)) { result => result._2 shouldBe None}
   }

   "A default time series with only undefined values" should "generate undefined values" in {
      forAll (DefaultTimeSeries(Seq(u, u)).compute(dates)) { result => result._2 shouldBe None}
   }
} 
Example 16
Source File: XorTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.binary

import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.binary.XorTimeSeries
import org.scalatest.{FlatSpec, Inspectors, Matchers}

class XorTimeSeriesTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   "True XOR True" should "be False" in {
      forAll (new XorTimeSeries(t, t).compute(dates)) { result => result._2 shouldBe Some(false)}
   }

   "True XOR False" should "be True" in {
      forAll (new XorTimeSeries(t, f).compute(dates)) { result => result._2 shouldBe Some(true)}
   }

   "False XOR True" should "be True" in {
      forAll (new XorTimeSeries(f, t).compute(dates)) { result => result._2 shouldBe Some(true)}
   }

   "False XOR False" should "be False" in {
      forAll (new XorTimeSeries(f, f).compute(dates)) { result => result._2 shouldBe Some(false)}
   }

   "True XOR Undetermined" should "be Undetermined" in {
      forAll (new XorTimeSeries(t, u).compute(dates)) { result => result._2 shouldBe None}
   }

   "False XOR Undetermined" should "be Undetermined" in {
      forAll (new XorTimeSeries(f, u).compute(dates)) { result => result._2 shouldBe None}
   }

   "Undetermined XOR True" should "be Undetermined" in {
      forAll (new XorTimeSeries(u, t).compute(dates)) { result => result._2 shouldBe None}
   }

   "Undetermined XOR False" should "be Undetermined" in {
      forAll (new XorTimeSeries(u, f).compute(dates)) { result => result._2 shouldBe None}
   }

   "Undetermined XOR Undetermined" should "be Undetermined" in {
      forAll (new XorTimeSeries(u, u).compute(dates)) { result => result._2 shouldBe None}
   }
} 
Example 17
Source File: AndTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.binary

import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.binary.AndTimeSeries
import org.scalatest.{FlatSpec, Inspectors, Matchers}

class AndTimeSeriesTest extends FlatSpec with Matchers
                                            with Inspectors
                                            with RTSTest
{
   "True AND True" should "be True" in {
      forAll (new AndTimeSeries(t, t).compute(dates)) { result => result._2 shouldBe Some(true)}
   }

   "True AND False" should "be False" in {
      forAll (new AndTimeSeries(t, f).compute(dates)) { result => result._2 shouldBe Some(false)}
   }

   "False AND True" should "be False" in {
      forAll (new AndTimeSeries(f, t).compute(dates)) { result => result._2 shouldBe Some(false)}
   }

   "False AND False" should "be False" in {
      forAll (new AndTimeSeries(f, f).compute(dates)) { result => result._2 shouldBe Some(false)}
   }

   "True AND Undefined" should "be Undefined" in {
      forAll (new AndTimeSeries(t, u).compute(dates)) { result => result._2 shouldBe None}
   }

   "Undefined AND True" should "be Undefined" in {
      forAll (new AndTimeSeries(u, t).compute(dates)) { result => result._2 shouldBe None}
   }

   "Undefined AND Undefined" should "be Undefined" in {
      forAll (new AndTimeSeries(u, u).compute(dates)) { result => result._2 shouldBe None}
   }

   "False AND Undefined" should "be Undefined" in {
      forAll (new AndTimeSeries(f, u).compute(dates)) { result => result._2 shouldBe None}
   }

   "Undefined AND False" should "be Undefined" in {
      forAll (new AndTimeSeries(u, f).compute(dates)) { result => result._2 shouldBe None}
   }
} 
Example 18
Source File: OrTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.binary

import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.binary.OrTimeSeries
import org.scalatest.{FlatSpec, Inspectors, Matchers}


class OrTimeSeriesTest extends FlatSpec
               with Matchers
               with Inspectors
               with RTSTest
{
   "True OR True" should "be True" in {
      forAll (new OrTimeSeries(t, t).compute(dates)) { result => result._2 shouldBe Some(true)}
   }

   "True OR False" should "be True" in {
      forAll (new OrTimeSeries(t, f).compute(dates)) { result => result._2 shouldBe Some(true)}
   }

   "False OR True" should "be True" in {
      forAll (new OrTimeSeries(f, t).compute(dates)) { result => result._2 shouldBe Some(true)}
   }

   "False OR False" should "be False" in {
      forAll (new OrTimeSeries(f, f).compute(dates)) { result => result._2 shouldBe Some(false)}
   }

   "True OR Undetermined" should "be Undetermined" in {
      forAll (new OrTimeSeries(t, u).compute(dates)) { result => result._2 shouldBe None}
   }

   "False OR Undetermined" should "be Undetermined" in {
      forAll (new OrTimeSeries(f, u).compute(dates)) { result => result._2 shouldBe None}
   }

   "Undetermined OR True" should "be Undetermined" in {
      forAll (new OrTimeSeries(u, t).compute(dates)) { result => result._2 shouldBe None}
   }

   "Undetermined OR False" should "be Undetermined" in {
      forAll (new OrTimeSeries(u, f).compute(dates)) { result => result._2 shouldBe None}
   }

   "Undetermined OR Undetermined" should "be Undetermined" in {
      forAll (new OrTimeSeries(u, u).compute(dates)) { result => result._2 shouldBe None}
   }
} 
Example 19
Source File: LesserThanTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.binary

import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.binary.LesserThanTimeSeries
import be.cetic.tsimulus.timeseries.primary.{ConstantTimeSeries, UndefinedTimeSeries}
import org.scalatest.{FlatSpec, Inspectors, Matchers}

class LesserThanTimeSeriesTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   "A greater than time series for which A is always greater than B" should "produce false" in {
      forAll (new LesserThanTimeSeries(ConstantTimeSeries(2), ConstantTimeSeries(1)).compute(dates)) {
         _._2 shouldBe Some(false)
      }
   }

   "A greater than time series for which A is always lesser than B" should "produce true" in {
      forAll (new LesserThanTimeSeries(ConstantTimeSeries(1), ConstantTimeSeries(2)).compute(dates)) {
         _._2 shouldBe Some(true)
      }
   }

   "A greater than time series for which A is always undefined" should "produce undefined values" in {
      forAll (new LesserThanTimeSeries(new UndefinedTimeSeries(), ConstantTimeSeries(1)).compute(dates)) {
         _._2 shouldBe None
      }
   }

   "A greater than time series for which B is always undefined" should "produce undefined values" in {
      forAll (new LesserThanTimeSeries(ConstantTimeSeries(1), new UndefinedTimeSeries()).compute(dates)) {
         _._2 shouldBe None
      }
   }

   "A greater than time series for which A and B are always undefined" should "produce undefined values" in {
      forAll (new LesserThanTimeSeries(new UndefinedTimeSeries(), new UndefinedTimeSeries()).compute(dates)) {
         _._2 shouldBe None
      }
   }
} 
Example 20
Source File: EquivTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.binary

import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.binary.{AndTimeSeries, EquivTimeSeries}
import org.scalatest.{FlatSpec, Inspectors, Matchers}

class EquivTimeSeriesTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   "True Equiv True" should "be True" in {
      forAll (new EquivTimeSeries(t, t).compute(dates)) { result => result._2 shouldBe Some(true)}
   }

   "True Equiv False" should "be False" in {
      forAll (new EquivTimeSeries(t, f).compute(dates)) { result => result._2 shouldBe Some(false)}
   }

   "False Equiv True" should "be False" in {
      forAll (new EquivTimeSeries(f, t).compute(dates)) { result => result._2 shouldBe Some(false)}
   }

   "False Equiv False" should "be TRUE" in {
      forAll (new EquivTimeSeries(f, f).compute(dates)) { result => result._2 shouldBe Some(true)}
   }

   "True Equiv Undefined" should "be Undefined" in {
      forAll (new EquivTimeSeries(t, u).compute(dates)) { result => result._2 shouldBe None}
   }

   "Undefined Equiv True" should "be Undefined" in {
      forAll (new EquivTimeSeries(u, t).compute(dates)) { result => result._2 shouldBe None}
   }

   "Undefined Equiv Undefined" should "be Undefined" in {
      forAll (new EquivTimeSeries(u, u).compute(dates)) { result => result._2 shouldBe None}
   }

   "False Equiv Undefined" should "be Undefined" in {
      forAll (new EquivTimeSeries(f, u).compute(dates)) { result => result._2 shouldBe None}
   }

   "Undefined Equiv False" should "be Undefined" in {
      forAll (new EquivTimeSeries(u, f).compute(dates)) { result => result._2 shouldBe None}
   }
} 
Example 21
Source File: ImpliesTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.binary

import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.binary.ImpliesTimeSeries
import org.scalatest.{FlatSpec, Inspectors, Matchers}

class ImpliesTimeSeriesTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   "True IMPLIES True" should "be True" in {
      forAll (new ImpliesTimeSeries(t, t).compute(dates)) { result => result._2 shouldBe Some(true)}
   }

   "True IMPLIES False" should "be False" in {
      forAll (new ImpliesTimeSeries(t, f).compute(dates)) { result => result._2 shouldBe Some(false)}
   }

   "False IMPLIES True" should "be True" in {
      forAll (new ImpliesTimeSeries(f, t).compute(dates)) { result => result._2 shouldBe Some(true)}
   }

   "False IMPLIES False" should "be TRUE" in {
      forAll (new ImpliesTimeSeries(f, f).compute(dates)) { result => result._2 shouldBe Some(true)}
   }

   "True IMPLIES Undefined" should "be Undefined" in {
      forAll (new ImpliesTimeSeries(t, u).compute(dates)) { result => result._2 shouldBe None}
   }

   "Undefined IMPLIES True" should "be Undefined" in {
      forAll (new ImpliesTimeSeries(u, t).compute(dates)) { result => result._2 shouldBe None}
   }

   "Undefined IMPLIES Undefined" should "be Undefined" in {
      forAll (new ImpliesTimeSeries(u, u).compute(dates)) { result => result._2 shouldBe None}
   }

   "False IMPLIES Undefined" should "be Undefined" in {
      forAll (new ImpliesTimeSeries(f, u).compute(dates)) { result => result._2 shouldBe None}
   }

   "Undefined IMPLIES False" should "be Undefined" in {
      forAll (new ImpliesTimeSeries(u, f).compute(dates)) { result => result._2 shouldBe None}
   }
} 
Example 22
Source File: NotTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.binary

import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.binary.NotTimeSeries
import org.scalatest.{FlatSpec, Inspectors, Matchers}

class NotTimeSeriesTest extends FlatSpec
   with Matchers
   with Inspectors
   with RTSTest
{
   "Not True" should "be False" in {
      forAll (NotTimeSeries(t).compute(dates)) { result => result._2 shouldBe Some(false)}
   }

   "Not False" should "be True" in {
      forAll (NotTimeSeries(f).compute(dates)) { result => result._2 shouldBe Some(true)}
   }

   "Not Undefined" should "be Undefined" in {
      forAll (NotTimeSeries(u).compute(dates)) { result => result._2 shouldBe None}
   }
} 
Example 23
Source File: GreaterThanTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.binary

import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.binary.GreaterThanTimeSeries
import be.cetic.tsimulus.timeseries.primary.{ConstantTimeSeries, UndefinedTimeSeries}
import org.scalatest.{FlatSpec, Inspectors, Matchers}

class GreaterThanTimeSeriesTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   "A greater than time series for which A is always greater than B" should "produce true" in {
      forAll (new GreaterThanTimeSeries(ConstantTimeSeries(2), ConstantTimeSeries(1)).compute(dates)) {
         _._2 shouldBe Some(true)
      }
   }

   "A greater than time series for which A is always lesser than B" should "produce false" in {
      forAll (new GreaterThanTimeSeries(ConstantTimeSeries(1), ConstantTimeSeries(2)).compute(dates)) {
         _._2 shouldBe Some(false)
      }
   }

   "A greater than time series for which A is always undefined" should "produce undefined values" in {
      forAll (new GreaterThanTimeSeries(new UndefinedTimeSeries(), ConstantTimeSeries(1)).compute(dates)) {
         _._2 shouldBe None
      }
   }

   "A greater than time series for which B is always undefined" should "produce undefined values" in {
      forAll (new GreaterThanTimeSeries(ConstantTimeSeries(1), new UndefinedTimeSeries()).compute(dates)) {
         _._2 shouldBe None
      }
   }

   "A greater than time series for which A and B are always undefined" should "produce undefined values" in {
      forAll (new GreaterThanTimeSeries(new UndefinedTimeSeries(), new UndefinedTimeSeries()).compute(dates)) {
         _._2 shouldBe None
      }
   }
} 
Example 24
Source File: RandomWalkTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.primary

import be.cetic.tsimulus.Utils
import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.primary.{ARMA, RandomWalkTimeSeries}
import org.joda.time.LocalDateTime
import org.scalatest.{FlatSpec, Inspectors, Matchers}
import com.github.nscala_time.time.Imports._


class RandomWalkTimeSeriesTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   "A random walk time series" should "provide identical results, by each moment at a time, or on a batch, when the origin is before the dates" in {
      val dates = Utils.sampling(new LocalDateTime(2016, 1, 1, 0, 0), new LocalDateTime(2016, 1, 2, 0, 0), 100)
      val ts = new RandomWalkTimeSeries(ARMA(Array(), Array(), 0.01, 0, 42), new LocalDateTime(2015, 12, 30, 0, 0), 1 minute)

      val individuals = dates.map(d => ts.compute(d).get)
      val batched = ts.compute(dates).map(_._2.get)

      individuals shouldBe batched
   }

   "A random walk time series" should "provide identical results, by each moment at a time, or on a batch, when the origin is among the dates" in {
      val dates = Utils.sampling(new LocalDateTime(2016, 1, 1, 0, 0), new LocalDateTime(2016, 1, 2, 0, 0), 100)
      val ts = new RandomWalkTimeSeries(ARMA(Array(), Array(), 0.01, 0, 42), new LocalDateTime(2016, 1, 1, 2, 0), 1 minute)

      val individuals = dates.map(d => ts.compute(d).get)
      val batched = ts.compute(dates).map(_._2.get)

      individuals shouldBe batched
   }

   "A random walk time series" should "provide identical results, by each moment at a time, or on a batch, when the origin is after the dates" in {
      val dates = Utils.sampling(new LocalDateTime(2016, 1, 1, 0, 0), new LocalDateTime(2016, 1, 2, 0, 0), 100)
      val ts = new RandomWalkTimeSeries(ARMA(Array(), Array(), 0.01, 0, 42), new LocalDateTime(2016, 2, 1, 0, 0), 1 minute)

      val individuals = dates.map(d => ts.compute(d).get)
      val batched = ts.compute(dates).map(_._2.get)

      individuals shouldBe batched
   }
} 
Example 25
Source File: FanOutSpec.scala    From swave   with Mozilla Public License 2.0 5 votes vote down vote up
package swave.core.impl.stages

import org.scalacheck.Gen
import org.scalatest.Inspectors
import swave.core.StreamEnv
import swave.core.internal.testkit.TestFixture

final class FanOutSpec extends SyncPipeSpec with Inspectors {

  implicit val env    = StreamEnv()
  implicit val config = PropertyCheckConfiguration(minSuccessful = 1000)

  implicit val integerInput = Gen.chooseNum(0, 999)

  "Broadcast" in check {
    testSetup
      .input[Int]
      .fixtures(Gen.chooseNum(1, 3), _.output[Int])
      .prop
      .from { (in, outs) ⇒
        import TestFixture.State._

        in.spout
          .fanOutBroadcast()
          .subDrains(outs.tail.map(_.drain.dropResult))
          .subContinue
          .drainTo(outs.head.drain)

        in.terminalState match {
          case Cancelled ⇒ forAll(outs) { _.terminalState shouldBe Cancelled }
          case Completed ⇒ forAll(outs) { _.terminalState should (be(Cancelled) or be(Completed)) }
          case error     ⇒ forAll(outs) { _.terminalState should (be(error) or be(Cancelled)) }
        }

        forAll(outs) { out ⇒
          out.received shouldEqual in.produced.take(out.scriptedSize)
        }
      }
  }

  "BroadcastBuffered" in check {
    testSetup
      .input[Int]
      .fixtures(Gen.chooseNum(1, 3), _.output[Int])
      .param(Gen.chooseNum(1, 16))
      .prop
      .from { (in, outs, bufferSize) ⇒
        import TestFixture.State._

        in.spout
          .fanOutBroadcast(bufferSize)
          .subDrains(outs.tail.map(_.drain.dropResult))
          .subContinue
          .drainTo(outs.head.drain)

        in.terminalState match {
          case Cancelled ⇒ forAll(outs) { _.terminalState shouldBe Cancelled }
          case Completed ⇒ forAll(outs) { _.terminalState should (be(Cancelled) or be(Completed)) }
          case error     ⇒ forAll(outs) { _.terminalState should (be(error) or be(Cancelled)) }
        }

        forAll(outs) { out ⇒
          out.received shouldEqual in.produced.take(out.size)
        }
      }
  }
} 
Example 26
Source File: TableSchemaGeneratorTest.scala    From spark-vector   with Apache License 2.0 5 votes vote down vote up
package com.actian.spark_vector.vector

import org.apache.spark.sql.types._
import org.scalacheck.Gen.identifier
import org.scalacheck.Shrink
import org.scalatest.{ FunSuite, Inspectors, Matchers }
import org.scalatest.prop.PropertyChecks

import com.actian.spark_vector.vector.VectorJDBC.withJDBC;
import com.actian.spark_vector.DataTypeGens.schemaGen
import com.actian.spark_vector.test.IntegrationTest
import com.actian.spark_vector.test.tags.RandomizedTest

@IntegrationTest
class TableSchemaGeneratorTest extends FunSuite with Matchers with PropertyChecks with VectorFixture {
  import com.actian.spark_vector.DataTypeGens._
  import com.actian.spark_vector.vector.TableSchemaGenerator._
  import org.scalacheck.Gen._

  val defaultFields: Seq[StructField] = Seq(
    StructField("a", BooleanType, true),
    StructField("b", ByteType, false),
    StructField("c", ShortType, true),
    StructField("d", IntegerType, false),
    StructField("e", LongType, true),
    StructField("f", FloatType, false),
    StructField("g", DoubleType, true),
    StructField("h", DecimalType(10, 2), false),
    StructField("i", DateType, true),
    StructField("j", TimestampType, false),
    StructField("k", StringType, true))

  val defaultSchema = StructType(defaultFields)

  test("table schema") {
    withJDBC(connectionProps)(cxn => {
      cxn.autoCommit(false)
      assertSchemaGeneration(cxn, "testtable", defaultSchema)
    })
  }

  test("table schema/gen", RandomizedTest) {
    withJDBC(connectionProps)(cxn => {
      cxn.autoCommit(false)
      forAll(identifier, schemaGen)((name, schema) => {
        assertSchemaGeneration(cxn, name, schema)
      })(PropertyCheckConfig(minSuccessful = 5), Shrink.shrinkAny[String], Shrink.shrinkAny[StructType])
    })
  }

  private def assertSchemaGeneration(cxn: VectorJDBC, name: String, schema: StructType): Unit = {
    val sql = generateTableSQL(name, schema)
    try {
      cxn.executeStatement(sql)
      val columnsAsFields = cxn.columnMetadata(name).map(_.structField)
      columnsAsFields.size should be(schema.fields.length)
      Inspectors.forAll(columnsAsFields.zip(schema.fields)) {
        case (columnField, origField) => {
          columnField.name should be(origField.name.toLowerCase)
          columnField.dataType should be(origField.dataType)
          columnField.nullable should be(origField.nullable)
          // TODO ensure field metadata consistency
        }
      }
      cxn.dropTable(name)
    } finally {
      cxn.rollback()
    }
  }
} 
Example 27
Source File: ResourceDecoderSpec.scala    From nexus-kg   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.serializers

import java.time.Instant

import akka.http.scaladsl.testkit.ScalatestRouteTest
import ch.epfl.bluebrain.nexus.commons.test.{EitherValues, Resources}
import ch.epfl.bluebrain.nexus.iam.client.types.Identity.User
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.config.Schemas
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.ProjectRef
import ch.epfl.bluebrain.nexus.kg.resources.{Id, ResourceF, ResourceGraph}
import ch.epfl.bluebrain.nexus.rdf.implicits._
import io.circe.Decoder
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
import org.scalatest.{Inspectors, OptionValues}

class ResourceDecoderSpec
    extends AnyWordSpecLike
    with Matchers
    with Inspectors
    with EitherValues
    with ScalatestRouteTest
    with OptionValues
    with Resources
    with TestHelper {

  private val json                                     = jsonContentOf("/serialization/resource.json")
  private val projectRef                               = ProjectRef(genUUID)
  private val id                                       = url"http://example.com/prefix/myId"
  private val graph                                    = json.toGraph(id).rightValue
  private implicit val decoder: Decoder[ResourceGraph] = ResourceF.resourceGraphDecoder(projectRef)

  private val model = ResourceF(
    Id(projectRef, url"http://example.com/prefix/myId"),
    1L,
    Set(url"https://example.com/vocab/A", url"https://example.com/vocab/B"),
    deprecated = false,
    Map.empty,
    None,
    Instant.parse("2020-01-17T12:45:01.479676Z"),
    Instant.parse("2020-01-17T13:45:01.479676Z"),
    User("john", "bbp"),
    User("brenda", "bbp"),
    Schemas.unconstrainedRef,
    graph
  )

  "A resource" should {
    "be decoded" in {
      json.as[ResourceGraph].rightValue shouldEqual model
    }
  }

} 
Example 28
Source File: StorageCacheSpec.scala    From nexus-kg   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.cache

import java.nio.file.Paths
import java.time.Clock

import akka.testkit._
import ch.epfl.bluebrain.nexus.commons.test.ActorSystemFixture
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.config.AppConfig._
import ch.epfl.bluebrain.nexus.kg.config.{AppConfig, Settings}
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.{ProjectRef}
import ch.epfl.bluebrain.nexus.kg.storage.Storage.DiskStorage
import ch.epfl.bluebrain.nexus.rdf.implicits._
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.matchers.should.Matchers
import org.scalatest.{Inspectors, TryValues}

import scala.concurrent.duration._

//noinspection NameBooleanParameters
class StorageCacheSpec
    extends ActorSystemFixture("StorageCacheSpec", true)
    with Matchers
    with Inspectors
    with ScalaFutures
    with TryValues
    with TestHelper {

  override implicit def patienceConfig: PatienceConfig = PatienceConfig(3.seconds.dilated, 5.milliseconds)

  private implicit val clock: Clock         = Clock.systemUTC
  private implicit val appConfig: AppConfig = Settings(system).appConfig

  val ref1 = ProjectRef(genUUID)
  val ref2 = ProjectRef(genUUID)

  val time   = clock.instant()
  val lastId = url"http://example.com/lastA"
  // initialInstant.minusSeconds(1L + genInt().toLong)

  val tempStorage = DiskStorage(ref1, genIri, 1L, false, true, "alg", Paths.get("/tmp"), read, write, 1024L)

  val lastStorageProj1 = tempStorage.copy(id = lastId)
  val lastStorageProj2 = tempStorage.copy(ref = ref2, id = lastId)

  val storagesProj1: List[DiskStorage] = List.fill(5)(tempStorage.copy(id = genIri)) :+ lastStorageProj1
  val storagesProj2: List[DiskStorage] = List.fill(5)(tempStorage.copy(ref = ref2, id = genIri)) :+ lastStorageProj2

  private val cache = StorageCache[Task]

  "StorageCache" should {

    "index storages" in {
      forAll((storagesProj1 ++ storagesProj2).zipWithIndex) {
        case (storage, index) =>
          implicit val instant = time.plusSeconds(index.toLong)
          cache.put(storage).runToFuture.futureValue
          cache.get(storage.ref, storage.id).runToFuture.futureValue shouldEqual Some(storage)
      }
    }

    "get latest default storage" in {
      cache.getDefault(ref1).runToFuture.futureValue shouldEqual Some(lastStorageProj1)
      cache.getDefault(ref2).runToFuture.futureValue shouldEqual Some(lastStorageProj2)
      cache.getDefault(ProjectRef(genUUID)).runToFuture.futureValue shouldEqual None
    }

    "list storages" in {
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs storagesProj1
      cache.get(ref2).runToFuture.futureValue should contain theSameElementsAs storagesProj2
    }

    "deprecate storage" in {
      val storage          = storagesProj1.head
      implicit val instant = time.plusSeconds(30L)
      cache.put(storage.copy(deprecated = true, rev = 2L)).runToFuture.futureValue
      cache.get(storage.ref, storage.id).runToFuture.futureValue shouldEqual None
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs storagesProj1.filterNot(_ == storage)
    }
  }
} 
Example 29
Source File: ResolverCacheSpec.scala    From nexus-kg   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.cache

import akka.actor.ExtendedActorSystem
import akka.serialization.Serialization
import akka.testkit._
import ch.epfl.bluebrain.nexus.commons.test.ActorSystemFixture
import ch.epfl.bluebrain.nexus.iam.client.types.Identity.Anonymous
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.config.AppConfig._
import ch.epfl.bluebrain.nexus.kg.config.{AppConfig, Settings}
import ch.epfl.bluebrain.nexus.kg.resolve.Resolver._
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.{ProjectLabel, ProjectRef}
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.matchers.should.Matchers
import org.scalatest.{Inspectors, TryValues}

import scala.concurrent.duration._

//noinspection NameBooleanParameters
class ResolverCacheSpec
    extends ActorSystemFixture("ResolverCacheSpec", true)
    with Matchers
    with Inspectors
    with ScalaFutures
    with TryValues
    with TestHelper {

  override implicit def patienceConfig: PatienceConfig = PatienceConfig(3.seconds.dilated, 5.milliseconds)

  private implicit val appConfig: AppConfig = Settings(system).appConfig

  val ref1 = ProjectRef(genUUID)
  val ref2 = ProjectRef(genUUID)

  val label1 = ProjectLabel(genString(), genString())
  val label2 = ProjectLabel(genString(), genString())

  val resolver: InProjectResolver = InProjectResolver(ref1, genIri, 1L, false, 10)
  val crossRefs: CrossProjectResolver =
    CrossProjectResolver(Set(genIri), List(ref1, ref2), Set(Anonymous), ref1, genIri, 0L, false, 1)
  val crossLabels: CrossProjectResolver =
    CrossProjectResolver(Set(genIri), List(label1, label2), Set(Anonymous), ref1, genIri, 0L, false, 1)

  val resolverProj1: Set[InProjectResolver] = List.fill(5)(resolver.copy(id = genIri)).toSet
  val resolverProj2: Set[InProjectResolver] = List.fill(5)(resolver.copy(id = genIri, ref = ref2)).toSet

  private val cache = ResolverCache[Task]

  "ResolverCache" should {

    "index resolvers" in {
      val list = (resolverProj1 ++ resolverProj2).toList
      forAll(list) { resolver =>
        cache.put(resolver).runToFuture.futureValue
        cache.get(resolver.ref, resolver.id).runToFuture.futureValue shouldEqual Some(resolver)
      }
    }

    "list resolvers" in {
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs resolverProj1
      cache.get(ref2).runToFuture.futureValue should contain theSameElementsAs resolverProj2
    }

    "deprecate resolver" in {
      val resolver = resolverProj1.head
      cache.put(resolver.copy(deprecated = true, rev = 2L)).runToFuture.futureValue
      cache.get(resolver.ref, resolver.id).runToFuture.futureValue shouldEqual None
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs resolverProj1.filterNot(_ == resolver)
    }

    "serialize cross project resolver" when {
      val serialization = new Serialization(system.asInstanceOf[ExtendedActorSystem])
      "parameterized with ProjectRef" in {
        val bytes = serialization.serialize(crossRefs).success.value
        val out   = serialization.deserialize(bytes, classOf[CrossProjectResolver]).success.value
        out shouldEqual crossRefs
      }
      "parameterized with ProjectLabel" in {
        val bytes = serialization.serialize(crossLabels).success.value
        val out   = serialization.deserialize(bytes, classOf[CrossProjectResolver]).success.value
        out shouldEqual crossLabels
      }
    }
  }
} 
Example 30
Source File: TaggingAdapterSpec.scala    From nexus-kg   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.persistence

import java.time.{Clock, Instant, ZoneId}

import akka.persistence.journal.Tagged
import cats.syntax.show._
import ch.epfl.bluebrain.nexus.iam.client.types.Identity.Anonymous
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.config.Schemas._
import ch.epfl.bluebrain.nexus.kg.config.Vocabulary._
import ch.epfl.bluebrain.nexus.kg.persistence.TaggingAdapterSpec.Other
import ch.epfl.bluebrain.nexus.kg.resources.Event._
import ch.epfl.bluebrain.nexus.kg.resources.{Id, OrganizationRef, Ref}
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.ProjectRef
import io.circe.Json
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class TaggingAdapterSpec extends AnyWordSpecLike with Matchers with Inspectors with TestHelper {

  "A TaggingAdapter" should {
    val clock = Clock.fixed(Instant.ofEpochSecond(3600), ZoneId.systemDefault())

    def genJson(): Json = Json.obj("key" -> Json.fromString(genString()))

    val adapter = new TaggingAdapter()
    val orgRef  = OrganizationRef(genUUID)
    val id      = Id(ProjectRef(genUUID), nxv.projects)

    val mapping = Map(
      Set(
        s"type=${nxv.Schema.value.show}",
        s"type=${nxv.Resource.value.show}",
        s"project=${id.parent.id}",
        s"org=${orgRef.show}",
        "event"
      ) ->
        Created(id, orgRef, Ref(shaclSchemaUri), Set(nxv.Schema, nxv.Resource), genJson(), clock.instant(), Anonymous),
      Set(
        s"type=${nxv.Resolver.value.show}",
        s"type=${nxv.Resource.value.show}",
        s"project=${id.parent.id}",
        s"org=${orgRef.show}",
        "event"
      ) ->
        Updated(id, orgRef, 1L, Set(nxv.Resource, nxv.Resolver), genJson(), clock.instant(), Anonymous),
      Set(s"type=${nxv.Resource.value.show}", s"project=${id.parent.id}", s"org=${orgRef.show}", "event") ->
        Deprecated(id, orgRef, 1L, Set(nxv.Resource), clock.instant(), Anonymous),
      Set(s"project=${id.parent.id}", s"org=${orgRef.show}", "event") ->
        TagAdded(id, orgRef, 2L, 1L, "tag", clock.instant(), Anonymous)
    )

    "set the appropriate tags" in {
      forAll(mapping.toList) {
        case (tags, ev) => adapter.toJournal(ev) shouldEqual Tagged(ev, tags)
      }
    }

    "return an empty manifest" in {
      adapter.manifest(Other(genString())) shouldEqual ""
    }
  }
}

object TaggingAdapterSpec {
  private[persistence] final case class Other(value: String)

} 
Example 31
Source File: StatisticsSpec.scala    From nexus-kg   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.indexing

import java.util.regex.Pattern.quote

import ch.epfl.bluebrain.nexus.commons.test.Resources
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.config.AppConfig
import ch.epfl.bluebrain.nexus.kg.config.Vocabulary.nxv
import ch.epfl.bluebrain.nexus.kg.indexing.Statistics.{CompositeViewStatistics, ViewStatistics}
import ch.epfl.bluebrain.nexus.rdf.implicits._
import ch.epfl.bluebrain.nexus.commons.circe.syntax._
import io.circe.Printer
import io.circe.syntax._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
import org.scalatest.{Inspectors, OptionValues}

class StatisticsSpec
    extends AnyWordSpecLike
    with Matchers
    with OptionValues
    with TestHelper
    with Resources
    with Inspectors {

  "Statistics" should {
    val sourceId           = genIri
    val projectionId       = nxv.defaultElasticSearchIndex.value
    val single: Statistics = ViewStatistics(10L, 1L, 2L, 12L, None, None, None)
    val singleJson         = jsonContentOf("/view/statistics.json").removeKeys("projectionId")
    val composite: Statistics =
      CompositeViewStatistics(IdentifiedProgress(sourceId, projectionId, single.asInstanceOf[ViewStatistics]))
    val compositeJson    = jsonContentOf("/view/composite_statistics.json", Map(quote("{sourceId}") -> sourceId.asString))
    val printer: Printer = Printer.noSpaces.copy(dropNullValues = true)

    "be encoded" in {
      forAll(List(single -> singleJson, composite -> compositeJson)) {
        case (model, json) =>
          printer.print(model.asJson.sortKeys(AppConfig.orderedKeys)) shouldEqual printer.print(json)
      }
    }
  }

} 
Example 32
Source File: RefSpec.scala    From nexus-kg   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.resources

import ch.epfl.bluebrain.nexus.kg.resources.Ref._
import cats.syntax.show._
import ch.epfl.bluebrain.nexus.commons.test.EitherValues
import ch.epfl.bluebrain.nexus.rdf.Iri.{AbsoluteIri, Urn}
import ch.epfl.bluebrain.nexus.rdf.implicits._
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class RefSpec extends AnyWordSpecLike with Matchers with Inspectors with EitherValues {

  "A Ref" should {

    "be constructed from an AbsoluteIri" in {
      val list = List[(AbsoluteIri, Ref)](
        url"http://ex.com?rev=1&other=value"             -> Revision(url"http://ex.com?other=value", 1L),
        url"http://ex.com?rev=1"                         -> Revision(url"http://ex.com", 1L),
        url"http://ex.com?tag=this&other=value"          -> Ref.Tag(url"http://ex.com?other=value", "this"),
        url"http://ex.com?rev=1&tag=this&other=value"    -> Revision(url"http://ex.com?other=value", 1L),
        url"http://ex.com?other=value"                   -> Latest(url"http://ex.com?other=value"),
        url"http://ex.com#fragment"                      -> Latest(url"http://ex.com#fragment"),
        Urn("urn:ex:a/b/c").rightValue                   -> Latest(Urn("urn:ex:a/b/c").rightValue),
        Urn("urn:ex:a/b/c?=rev=1").rightValue            -> Revision(Urn("urn:ex:a/b/c").rightValue, 1L),
        Urn("urn:ex:a?=tag=this&other=value").rightValue -> Ref.Tag(Urn("urn:ex:a?=other=value").rightValue, "this")
      )
      forAll(list) {
        case (iri, ref) => Ref(iri) shouldEqual ref
      }
    }

    "print properly" in {
      (Latest(url"http://ex.com#fragment"): Ref).show shouldEqual url"http://ex.com#fragment".show
      (Revision(url"http://ex.com?other=value", 1L): Ref).show shouldEqual url"http://ex.com?other=value".show + s" @ rev: '1'"
      (Ref.Tag(url"http://ex.com?other=value", "this"): Ref).show shouldEqual url"http://ex.com?other=value".show + s" @ tag: 'this'"
    }
  }

} 
Example 33
Source File: GroupBySpec.scala    From swave   with Mozilla Public License 2.0 5 votes vote down vote up
package swave.core.impl.stages

import scala.collection.mutable.ListBuffer
import scala.util.Failure
import org.scalacheck.Gen
import org.scalatest.Inspectors
import swave.core._
import swave.core.internal.testkit._

final class GroupBySpec extends SyncPipeSpec with Inspectors {

  implicit val env    = StreamEnv()
  implicit val config = PropertyCheckConfiguration(minSuccessful = 1000)

  implicit val integerInput = Gen.chooseNum(0, 999)

  "GroupBy" in check {
    testSetup
      .input[Int]
      .fixture(_.output[Spout[Int]](TestGeneration.Default.nonDroppingOutputScripts))
      .fixture(fd ⇒ Gen.listOfN(16, fd.output[Int](TestGeneration.Default.nonDroppingOutputScripts)))
      .param(Gen.oneOf(false, true))
      .param(Gen.oneOf(false, true))
      .prop
      .from { (in, out, allSubOuts, reopenCancelledSubs, eagerCancel) ⇒
        import TestFixture.State._

        val iter    = allSubOuts.iterator
        val subOuts = ListBuffer.empty[TestOutput[Int]]
        out.appendElemHandler { sub ⇒
          if (iter.hasNext) {
            val subOut = iter.next()
            subOuts += subOut
            inside(sub.drainTo(subOut.drain).value) {
              case Some(Failure(e)) ⇒
                if (e != TestError) e.printStackTrace()
                e shouldEqual TestError
              case _ ⇒ // ok here
            }
          } else sub.drainTo(Drain.ignore)
        }

        in.spout
          .groupBy(maxSubstreams = 256, reopenCancelledSubs, eagerCancel)(_ % 8)
          .drainTo(out.drain) shouldTerminate likeThis {
          case Cancelled ⇒ // input can be in any state
            forAll(subOuts) {
              _.terminalState should (be(Cancelled) or be(Completed) or be(Error(TestError)))
            }

          case Completed if subOuts.nonEmpty ⇒
            forAll(subOuts) {
              _.terminalState should (be(Cancelled) or be(Completed))
            }

          case Completed ⇒ in.scriptedSize shouldBe 0

          case error @ Error(TestError) ⇒
            forAll(subOuts) {
              _.terminalState should (be(Cancelled) or be(error))
            }
            in.terminalState should (be(Cancelled) or be(error))
        }

        val subResults = subOuts.map(_.received).filter(_.nonEmpty).groupBy(_.head % 8)
        val expected   = in.produced.groupBy(_                                     % 8)
        val received =
          if (reopenCancelledSubs) subResults.map { case (key, seqs) ⇒ key → seqs.flatten } else
            subResults.map { case (key, seqs)                        ⇒ key → seqs.head }
        forAll(received) {
          case (key, receivedValues) ⇒
            receivedValues shouldEqual expected(key).take(receivedValues.size)
        }
      }
  }
} 
Example 34
Source File: SplitSpec.scala    From swave   with Mozilla Public License 2.0 5 votes vote down vote up
package swave.core.impl.stages

import scala.collection.mutable.ListBuffer
import scala.util.Failure
import org.scalacheck.Gen
import org.scalatest.Inspectors
import swave.core._
import swave.core.internal.testkit._

final class SplitSpec extends SyncPipeSpec with Inspectors {

  implicit val env    = StreamEnv()
  implicit val config = PropertyCheckConfiguration(minSuccessful = 1000)

  implicit val integerInput = Gen.chooseNum(0, 999)
  implicit val booleanInput = Gen.oneOf(true, false)

  "SplitWhen" - {

    "state space verification" in stateSpaceVerification(Pipe[Int].splitWhen(_ < 100, _))

    "Example 1" in {
      Spout(1 to 9)
        .splitWhen(_ % 4 == 0)
        .map(_.map(_.toString).reduce(_ + _))
        .flattenConcat()
        .drainToMkString(100, ",")
        .value
        .get
        .get shouldEqual "123,4567,89"
    }
  }

  "SplitAfter" - {

    "state space verification" in stateSpaceVerification(Pipe[Int].splitAfter(_ < 100, _))

    "Example 1" in {
      Spout(1 to 9)
        .splitAfter(_ % 4 == 0)
        .map(_.map(_.toString).reduce(_ + _))
        .flattenConcat()
        .drainToMkString(100, ",")
        .value
        .get
        .get shouldEqual "1234,5678,9"
    }
  }

  def stateSpaceVerification(pipe: Boolean => Pipe[Int, Spout[Int]]): Unit = check {
    testSetup
      .input[Int]
      .output[Spout[Int]]
      .fixture(fd ⇒ Gen.listOfN(10, fd.output[Int]))
      .param[Boolean]
      .prop
      .from { (in, out, allSubOuts, eagerCancel) ⇒
        import TestFixture.State._

        val iter    = allSubOuts.iterator
        val subOuts = ListBuffer.empty[TestOutput[Int]]
        out.appendElemHandler { sub ⇒
          if (iter.hasNext) {
            val subOut = iter.next()
            subOuts += subOut
            inside(sub.drainTo(subOut.drain).value) {
              case Some(Failure(e)) ⇒ e shouldEqual TestError
              case _                ⇒ // ok here
            }
          } else sub.drainTo(Drain.ignore)
        }

        in.spout.via(pipe(eagerCancel)).drainTo(out.drain) shouldTerminate likeThis {
          case Cancelled ⇒ // input can be in any state

          case Completed if subOuts.nonEmpty ⇒
            forAll(subOuts) {
              _.terminalState should (be(Cancelled) or be(Completed))
            }

          case Completed ⇒ in.scriptedSize shouldBe 0

          case error @ Error(TestError) ⇒
            if (subOuts.nonEmpty) {
              forAll(subOuts.init) {
                _.terminalState should (be(Cancelled) or be(Completed))
              }
            }
            in.terminalState should (be(Cancelled) or be(error))
        }
      }
  }
} 
Example 35
Source File: LazyStartSpoutSpec.scala    From swave   with Mozilla Public License 2.0 5 votes vote down vote up
package swave.core.impl.stages

import org.scalacheck.Gen
import org.scalatest.Inspectors
import swave.core._

final class LazyStartSpoutSpec extends SyncPipeSpec with Inspectors {

  implicit val env    = StreamEnv()
  implicit val config = PropertyCheckConfiguration(minSuccessful = 100)

  implicit val integerInput = Gen.chooseNum(0, 999)

  "Spout.lazy" in check {
    testSetup
      .input[Int]
      .output[String]
      .prop
      .from { (in, out) ⇒
        Spout
          .lazyStart(() ⇒ in.spout)
          .map(_.toString)
          .drainTo(out.drain) shouldTerminate asScripted(in)

        out.received shouldEqual in.produced.take(out.scriptedSize).map(_.toString)
      }
  }
} 
Example 36
Source File: FlattenSpec.scala    From swave   with Mozilla Public License 2.0 5 votes vote down vote up
package swave.core.impl.stages

import org.scalacheck.Gen
import org.scalatest.Inspectors
import swave.core.internal.testkit.{TestError, TestFixture}
import swave.core._

final class FlattenSpec extends SyncPipeSpec with Inspectors {

  implicit val env    = StreamEnv()
  implicit val config = PropertyCheckConfiguration(minSuccessful = 1000)

  implicit val integerInput = Gen.chooseNum(0, 999)

  "FlattenConcat" in check {
    testSetup
      .fixture(fd ⇒ fd.inputFromIterables(Gen.chooseNum(0, 3).flatMap(Gen.listOfN(_, fd.input[Int]))))
      .output[Int]
      .param(Gen.chooseNum(1, 3))
      .prop
      .from { (in, out, parallelism) ⇒
        import TestFixture.State._

        val allInputs          = in :: in.elements.toList
        var expectedResultSize = out.scriptedSize

        in.spout
          .map(_.spout)
          .flattenConcat(parallelism)
          .drainTo(out.drain) shouldTerminate likeThis {
          case Cancelled ⇒ // inputs can be in any state
          case Completed ⇒ forAll(allInputs) { _.terminalState shouldBe Completed }
          case error @ Error(TestError) ⇒
            forAtLeast(1, allInputs) { _.terminalState shouldBe error }
            expectedResultSize = out.size
        }

        out.received shouldEqual in.elements.flatMap(_.produced).take(expectedResultSize)
      }
  }

  "FlattenMerge" in check {
    testSetup
      .fixture(fd ⇒ fd.inputFromIterables(nonOverlappingIntTestInputs(fd, 0, 3)))
      .output[Int]
      .param(Gen.chooseNum(1, 3))
      .prop
      .from { (in, out, parallelism) ⇒
        import TestFixture.State._

        val allInputs          = in :: in.elements.toList
        var expectedResultSize = out.scriptedSize

        in.spout
          .map(_.spout)
          .flattenMerge(parallelism)
          .drainTo(out.drain) shouldTerminate likeThis {
          case Cancelled ⇒ // inputs can be in any state
          case Completed ⇒ forAll(allInputs) { _.terminalState shouldBe Completed }
          case error @ Error(TestError) ⇒
            forAtLeast(1, allInputs) { _.terminalState shouldBe error }
            expectedResultSize = out.size
        }

        // verify that we received the elements in the right order
        val received = out.received
        for (sub ← in.elements) {
          val produced = sub.produced.filter(received.contains).distinct
          received.filter(produced.contains).distinct shouldEqual produced
        }
      }
  }
} 
Example 37
Source File: ConstantTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.primary

import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.primary.ConstantTimeSeries
import org.scalatest.{FlatSpec, Inspectors, Matchers}

class ConstantTimeSeriesTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   "A constant time series" should "produce constant values" in {
      forAll (ConstantTimeSeries(42).compute(dates)) { result => result._2 match {
         case Some(x) => x == 42.0 +- 0.0001
         case _ => false
      }}
   }

   "A constant time series" should "produce the same values by batch and individually" in {

      val ts = ConstantTimeSeries(42.0)
      dates.map(d => ts.compute(d)) shouldBe ts.compute(dates).map(_._2)
   }
} 
Example 38
Source File: HttpOriginMatcherSpec.scala    From akka-http-cors   with Apache License 2.0 5 votes vote down vote up
package ch.megard.akka.http.cors.scaladsl.model

import akka.http.scaladsl.model.headers.HttpOrigin
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class HttpOriginMatcherSpec extends AnyWordSpec with Matchers with Inspectors {
  "The `*` matcher" should {
    "match any Origin" in {
      val origins = Seq(
        "http://localhost",
        "http://192.168.1.1",
        "http://test.com",
        "http://test.com:8080",
        "https://test.com",
        "https://test.com:4433"
      ).map(HttpOrigin.apply)

      forAll(origins) { o => HttpOriginMatcher.*.matches(o) shouldBe true }
    }

    "be printed as `*`" in {
      HttpOriginMatcher.*.toString shouldBe "*"
    }
  }

  "The strict() method" should {
    "build a strict matcher, comparing exactly the origins" in {
      val positives = Seq(
        "http://localhost",
        "http://test.com",
        "https://test.ch:12345",
        "https://*.test.uk.co"
      ).map(HttpOrigin.apply)

      val negatives = Seq(
        "http://localhost:80",
        "https://localhost",
        "http://test.com:8080",
        "https://test.ch",
        "https://abc.test.uk.co"
      ).map(HttpOrigin.apply)

      val matcher = HttpOriginMatcher.strict(positives: _*)

      forAll(positives) { o => matcher.matches(o) shouldBe true }

      forAll(negatives) { o => matcher.matches(o) shouldBe false }
    }

    "build a matcher with a toString() method that is a valid range" in {
      val matcher = HttpOriginMatcher(Seq("http://test.com", "https://test.ch:12345").map(HttpOrigin.apply): _*)
      matcher.toString shouldBe "http://test.com https://test.ch:12345"
    }
  }

  "The apply() method" should {
    "build a matcher accepting sub-domains with wildcards" in {
      val matcher = HttpOriginMatcher(
        Seq(
          "http://test.com",
          "https://test.ch:12345",
          "https://*.test.uk.co",
          "http://*.abc.com:8080",
          "http://*abc.com",        // Must start with `*.`
          "http://abc.*.middle.com" // The wildcard can't be in the middle
        ).map(HttpOrigin.apply): _*
      )

      val positives = Seq(
        "http://test.com",
        "https://test.ch:12345",
        "https://sub.test.uk.co",
        "https://sub1.sub2.test.uk.co",
        "http://sub.abc.com:8080"
      ).map(HttpOrigin.apply)

      val negatives = Seq(
        "http://test.com:8080",
        "http://sub.test.uk.co", // must compare the scheme
        "http://sub.abc.com",    // must compare the port
        "http://abc.test.com",   // no wildcard
        "http://sub.abc.com",
        "http://subabc.com",
        "http://abc.sub.middle.com",
        "http://abc.middle.com"
      ).map(HttpOrigin.apply)

      forAll(positives) { o => matcher.matches(o) shouldBe true }

      forAll(negatives) { o => matcher.matches(o) shouldBe false }
    }

    "build a matcher with a toString() method that is a valid range" in {
      val matcher = HttpOriginMatcher(Seq("http://test.com", "https://*.test.ch:12345").map(HttpOrigin.apply): _*)
      matcher.toString shouldBe "http://test.com https://*.test.ch:12345"
    }
  }
} 
Example 39
Source File: ScanResourceSpec.scala    From squbs   with Apache License 2.0 5 votes vote down vote up
package org.squbs.unicomplex

import java.util.concurrent.TimeUnit
import javax.management.ObjectName

import akka.actor.ActorSystem
import akka.testkit.{ImplicitSender, TestKit}
import akka.util.Timeout
import com.typesafe.config.ConfigFactory
import org.scalatest.concurrent.Waiters
import org.scalatest.{BeforeAndAfterAll, Inspectors, Matchers, WordSpecLike}
import org.squbs.lifecycle.GracefulStop

import scala.util.Try

object ScanResourceSpec {

  val jmxPrefix = "ScanResourceSpec"

  val config = ConfigFactory.parseString(
    s"""
       |squbs {
       |  actorsystem-name = scanResourceSpec
       |  ${JMX.prefixConfig} = true
       |}
       |
       |default-listener.bind-port = 0
    """.stripMargin)

  implicit val akkaTimeout: Timeout =
    Try(System.getProperty("test.timeout").toLong) map { millis =>
      akka.util.Timeout(millis, TimeUnit.MILLISECONDS)
    } getOrElse Timeouts.askTimeout

  val boot = UnicomplexBoot(config)
    .createUsing {(name, config) => ActorSystem(name, config)}
    .scanResources()
    .initExtensions.start()
}

class ScanResourceSpec extends TestKit(ScanResourceSpec.boot.actorSystem) with ImplicitSender with WordSpecLike
    with Matchers with Inspectors with BeforeAndAfterAll with Waiters {

  import ScanResourceSpec._
  import system.dispatcher

  "The scanned resource" must {

    "have some actors started" in {
      val w = new Waiter

      system.actorSelection("/user/ScanResourceCube").resolveOne().onComplete { result =>
        w {
          assert(result.isSuccess)
        }
        w.dismiss()
      }
      w.await()
    }

    "expose proper cube state through MXBean" in {
      import org.squbs.unicomplex.JMX._
      val cubeName = "ScanResourceCube"
      val cubesName = new ObjectName(prefix(system) + cubeStateName + cubeName)
      get(cubesName, "Name") should be (cubeName)
      get(cubesName, "CubeState") should be ("Active")
      val wellKnownActors = get(cubesName, "WellKnownActors").asInstanceOf[String]
      println(wellKnownActors)
      wellKnownActors should include ("Actor[akka://scanResourceSpec/user/ScanResourceCube/Prepender#")
      wellKnownActors should include ("Actor[akka://scanResourceSpec/user/ScanResourceCube/Appender#")
    }
  }

  override protected def afterAll(): Unit = {
    Unicomplex(system).uniActor ! GracefulStop
  }
} 
Example 40
Source File: TaggingAdapterSpec.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.io

import java.time.Instant

import akka.persistence.journal.Tagged
import ch.epfl.bluebrain.nexus.commons.test.EitherValues
import ch.epfl.bluebrain.nexus.iam.acls.AclEvent.AclDeleted
import ch.epfl.bluebrain.nexus.iam.permissions.PermissionsEvent.PermissionsDeleted
import ch.epfl.bluebrain.nexus.iam.realms.RealmEvent.RealmDeprecated
import ch.epfl.bluebrain.nexus.iam.types.Identity.Anonymous
import ch.epfl.bluebrain.nexus.iam.types.Label
import ch.epfl.bluebrain.nexus.rdf.Iri.Path
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class TaggingAdapterSpec extends AnyWordSpecLike with Matchers with Inspectors with EitherValues {

  private val pd = PermissionsDeleted(2L, Instant.EPOCH, Anonymous)
  private val ad = AclDeleted(Path("/a/b/c").rightValue, 2L, Instant.EPOCH, Anonymous)
  private val rd = RealmDeprecated(Label.unsafe("blah"), 2L, Instant.EPOCH, Anonymous)

  private val data = Map[AnyRef, (String, AnyRef)](
    pd  -> ("permissions-event" -> Tagged(pd, Set("permissions", "event"))),
    ad  -> ("acl-event"         -> Tagged(ad, Set("acl", "event"))),
    rd  -> ("realm-event"       -> Tagged(rd, Set("realm", "event"))),
    "a" -> (""                  -> "a")
  )

  "A TaggingAdapter" should {
    val adapter = new TaggingAdapter
    "return the correct manifests" in {
      forAll(data.toList) {
        case (event, (manifest, _)) => adapter.manifest(event) shouldEqual manifest
      }
    }
    "return the correct transformed event" in {
      forAll(data.toList) {
        case (event, (_, transformed)) => adapter.toJournal(event) shouldEqual transformed
      }
    }
  }

} 
Example 41
Source File: ResourceFSpec.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.types

import java.time.{Clock, Instant, ZoneId}

import ch.epfl.bluebrain.nexus.commons.test.{EitherValues, Resources}
import ch.epfl.bluebrain.nexus.iam.config.AppConfig.HttpConfig
import ch.epfl.bluebrain.nexus.iam.config.Vocabulary._
import ch.epfl.bluebrain.nexus.iam.testsyntax._
import ch.epfl.bluebrain.nexus.iam.types.Identity.User
import ch.epfl.bluebrain.nexus.rdf.implicits._
import io.circe.Printer
import io.circe.syntax._
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

//noinspection TypeAnnotation
class ResourceFSpec extends AnyWordSpecLike with Matchers with Inspectors with EitherValues with Resources {

  "A ResourceMetadata" should {
    val user          = User("mysubject", "myrealm")
    val user2         = User("mysubject2", "myrealm")
    implicit val http = HttpConfig("some", 8080, "v1", "http://nexus.example.com")
    val clock: Clock  = Clock.fixed(Instant.ofEpochSecond(3600), ZoneId.systemDefault())
    val instant       = clock.instant()
    val id            = url"http://example.com/id"
    val printer       = Printer.spaces2.copy(dropNullValues = true)

    "be converted to Json correctly" when {
      "using multiple types" in {
        val json  = jsonContentOf("/resources/write-response.json")
        val model = ResourceMetadata(id, 1L, Set(nxv.AccessControlList, nxv.Realm), instant, user, instant, user2)
        model.asJson.sort.printWith(printer) shouldEqual json.printWith(printer)
      }
      "using a single type" in {
        val json  = jsonContentOf("/resources/write-response-singletype.json")
        val model = ResourceMetadata(id, 1L, Set(nxv.AccessControlList), instant, user, instant, user2)
        model.asJson.sort.printWith(printer) shouldEqual json.printWith(printer)
      }
      "using no types" in {
        val json  = jsonContentOf("/resources/write-response-notypes.json")
        val model = ResourceMetadata(id, 1L, Set(), instant, user, instant, user2)
        model.asJson.sort.printWith(printer) shouldEqual json.printWith(printer)
      }
    }
  }
} 
Example 42
Source File: GrantTypeSpec.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.types

import ch.epfl.bluebrain.nexus.commons.test.EitherValues
import ch.epfl.bluebrain.nexus.iam.types.GrantType._
import io.circe.{Decoder, Encoder, Json}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
import org.scalatest.Inspectors

class GrantTypeSpec extends AnyWordSpecLike with Matchers with Inspectors with EitherValues {

  "A GrantType" when {
    "using Camel encoders" should {
      import GrantType.Camel._
      val map = Map(
        AuthorizationCode -> "authorizationCode",
        Implicit          -> "implicit",
        Password          -> "password",
        ClientCredentials -> "clientCredentials",
        DeviceCode        -> "deviceCode",
        RefreshToken      -> "refreshToken"
      )
      "be encoded properly" in {
        val encoder = implicitly[Encoder[GrantType]]
        forAll(map.toList) {
          case (gt, expected) =>
            encoder(gt) shouldEqual Json.fromString(expected)
        }
      }
      "be decoded properly" in {
        val decoder = implicitly[Decoder[GrantType]]
        forAll(map.toList) {
          case (expected, gt) =>
            decoder.decodeJson(Json.fromString(gt)).rightValue shouldEqual expected
        }
      }
      "fail to decode for unknown string" in {
        val decoder = implicitly[Decoder[GrantType]]
        decoder.decodeJson(Json.fromString("incorrect")).leftValue
      }
    }
    "using Snake encoders" should {
      import GrantType.Snake._
      val map = Map(
        AuthorizationCode -> "authorization_code",
        Implicit          -> "implicit",
        Password          -> "password",
        ClientCredentials -> "client_credentials",
        DeviceCode        -> "device_code",
        RefreshToken      -> "refresh_token"
      )
      "be encoded properly" in {
        val encoder = implicitly[Encoder[GrantType]]
        forAll(map.toList) {
          case (gt, expected) =>
            encoder(gt) shouldEqual Json.fromString(expected)
        }
      }
      "be decoded properly" in {
        val decoder = implicitly[Decoder[GrantType]]
        forAll(map.toList) {
          case (expected, gtString) =>
            decoder.decodeJson(Json.fromString(gtString)).rightValue shouldEqual expected
        }
      }
      "fail to decode for unknown string" in {
        val decoder = implicitly[Decoder[GrantType]]
        decoder.decodeJson(Json.fromString("incorrect")).leftValue
      }
    }
  }

} 
Example 43
Source File: IdentitySpec.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.types

import ch.epfl.bluebrain.nexus.commons.test.{EitherValues, Resources}
import ch.epfl.bluebrain.nexus.iam.config.AppConfig.HttpConfig
import ch.epfl.bluebrain.nexus.iam.types.Identity.{Anonymous, Authenticated, Group, Subject, User}
import io.circe.syntax._
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class IdentitySpec extends AnyWordSpecLike with Matchers with Inspectors with EitherValues with Resources {

  "An Identity" should {
    val user          = User("mysubject", "myrealm")
    val group         = Group("mygroup", "myrealm")
    val authenticated = Authenticated("myrealm")

    implicit val http: HttpConfig = HttpConfig("some", 8080, "v1", "http://nexus.example.com")

    "converted to Json" in {
      val userJson          = jsonContentOf("/identities/produce/user.json")
      val groupJson         = jsonContentOf("/identities/produce/group.json")
      val authenticatedJson = jsonContentOf("/identities/produce/authenticated.json")
      val anonymousJson     = jsonContentOf("/identities/produce/anonymous.json")

      val cases =
        List(user -> userJson, group -> groupJson, Anonymous -> anonymousJson, authenticated -> authenticatedJson)

      forAll(cases) {
        case (model: Subject, json) =>
          model.asJson shouldEqual json
          (model: Identity).asJson shouldEqual json
        case (model: Identity, json) => model.asJson shouldEqual json
      }
    }
    "convert from Json" in {
      val userJson          = jsonContentOf("/identities/consume/user.json")
      val groupJson         = jsonContentOf("/identities/consume/group.json")
      val authenticatedJson = jsonContentOf("/identities/consume/authenticated.json")
      val anonymousJson     = jsonContentOf("/identities/consume/anonymous.json")
      val cases =
        List(user -> userJson, group -> groupJson, Anonymous -> anonymousJson, authenticated -> authenticatedJson)
      forAll(cases) {
        case (model: Subject, json) =>
          json.as[Subject].rightValue shouldEqual model
          json.as[Identity].rightValue shouldEqual (model: Identity)
        case (model: Identity, json) => json.as[Identity].rightValue shouldEqual model

      }
    }
  }
} 
Example 44
Source File: LabelSpec.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.types

import ch.epfl.bluebrain.nexus.commons.test.{EitherValues, Randomness}
import ch.epfl.bluebrain.nexus.rdf.Iri.{Path, Url}
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class LabelSpec extends AnyWordSpecLike with Matchers with Randomness with Inspectors with EitherValues {

  "A Label" should {
    "be constructed correctly from alphanumeric chars" in {
      forAll(1 to 32) { length =>
        val string = genString(length, Vector.range('a', 'z') ++ Vector.range('0', '9'))
        Label.unsafe(string).value shouldEqual string
        Label(string).rightValue.value shouldEqual string
      }
    }
    "fail to construct for illegal formats" in {
      val cases = List("", " ", "a ", " a", "a-", "_")
      forAll(cases) { string =>
        intercept[IllegalArgumentException](Label.unsafe(string))
        Label(string).leftValue shouldEqual s"Label '$string' does not match pattern '${Label.regex.regex}'"
      }
    }
    "return its path representation" in {
      Label.unsafe("abc").toPath shouldEqual Path("/abc").rightValue
    }
    "return an iri representation" in {
      forAll(List("http://localhost", "http://localhost/")) { str =>
        val base  = Url(str).rightValue
        val label = Label.unsafe("abc")
        label.toIri(base) shouldEqual Url("http://localhost/abc").rightValue
      }
    }
  }

} 
Example 45
Source File: AccessControlListSpec.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.acls

import ch.epfl.bluebrain.nexus.commons.test.{EitherValues, Resources}
import ch.epfl.bluebrain.nexus.iam.config.AppConfig.HttpConfig
import ch.epfl.bluebrain.nexus.iam.types.Identity._
import ch.epfl.bluebrain.nexus.iam.types.{Identity, Permission}
import io.circe.syntax._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
import org.scalatest.{Inspectors, OptionValues}

class AccessControlListSpec
    extends AnyWordSpecLike
    with Matchers
    with Inspectors
    with EitherValues
    with OptionValues
    with Resources {

  "An Access Control List" should {
    val user: Identity  = User("uuid", "realm")
    val group: Identity = Group("mygroup", "myrealm")
    val readWrite       = Set(Permission("acls/read").value, Permission("acls/write").value)
    val manage          = Set(Permission("acls/manage").value)

    implicit val http: HttpConfig = HttpConfig("some", 8080, "v1", "http://nexus.example.com")

    "converted to Json" in {
      val acls = AccessControlList(user -> readWrite, group -> manage)
      val json = jsonContentOf("/acls/acl.json")
      acls.asJson shouldEqual json
    }
    "convert from Json" in {
      val acls = AccessControlList(user -> readWrite, group -> manage)
      val json = jsonContentOf("/acls/acl.json")
      json.as[AccessControlList].rightValue shouldEqual acls
    }

    "remove ACL" in {
      val read  = Permission.unsafe("read")
      val write = Permission.unsafe("write")
      val other = Permission.unsafe("other")
      val acl   = AccessControlList(user -> Set(read, write), group -> Set(other))
      val acl2  = AccessControlList(group -> Set(read))

      acl -- acl2 shouldEqual acl
      acl -- AccessControlList(user -> Set(read), group                              -> Set(other)) shouldEqual AccessControlList(user -> Set(write))
      acl -- AccessControlList(user -> Set(read)) shouldEqual AccessControlList(user -> Set(write), group                              -> Set(other))
    }
  }
} 
Example 46
Source File: IdentitySpec.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.client.types

import ch.epfl.bluebrain.nexus.commons.test.{EitherValues, Resources}
import ch.epfl.bluebrain.nexus.iam.client.config.IamClientConfig
import ch.epfl.bluebrain.nexus.iam.client.types.Identity._
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri
import ch.epfl.bluebrain.nexus.rdf.implicits._
import io.circe.syntax._
import org.scalatest.{Inspectors, OptionValues}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class IdentitySpec
    extends AnyWordSpecLike
    with Matchers
    with Inspectors
    with OptionValues
    with Resources
    with EitherValues {

  "An identity" should {
    implicit val config: IamClientConfig =
      IamClientConfig(url"http://nexus.example.com", url"http://internal.nexus.example.com", "v1")
    val user          = User("mysubject", "myrealm")
    val group         = Group("mygroup", "myrealm")
    val authenticated = Authenticated("myrealm")

    "be created from ids" in {
      val cases = List[(AbsoluteIri, Identity)](
        url"http://nexus.example.com/v1/realms/myrealm/users/mysubject" -> user,
        url"https://random.com/v1/realms/myrealm/users/mysubject"       -> user,
        url"http://nexus.example.com/v1/realms/myrealm/groups/mygroup"  -> group,
        url"https://random.com/v1/realms/myrealm/groups/mygroup"        -> group,
        url"http://nexus.example.com/v1/realms/myrealm/authenticated"   -> authenticated,
        url"https://random.com/v1/realms/myrealm/authenticated"         -> authenticated,
        url"http://nexus.example.com/v1/anonymous"                      -> Anonymous,
        url"https://random.com/v1/anonymous"                            -> Anonymous
      )
      forAll(cases) {
        case (iri, identity) => Identity(iri).value shouldEqual identity
      }
    }

    "converted to Json" in {
      val userJson          = jsonContentOf("/identities/produce/user.json")
      val groupJson         = jsonContentOf("/identities/produce/group.json")
      val authenticatedJson = jsonContentOf("/identities/produce/authenticated.json")
      val anonymousJson     = jsonContentOf("/identities/produce/anonymous.json")

      val cases =
        List(user -> userJson, group -> groupJson, Anonymous -> anonymousJson, authenticated -> authenticatedJson)

      forAll(cases) {
        case (model: Subject, json) =>
          model.asJson shouldEqual json
          (model: Identity).asJson shouldEqual json
        case (model: Identity, json) => model.asJson shouldEqual json
      }
    }
    "convert from Json" in {
      val userJson          = jsonContentOf("/identities/consume/user.json")
      val groupJson         = jsonContentOf("/identities/consume/group.json")
      val authenticatedJson = jsonContentOf("/identities/consume/authenticated.json")
      val anonymousJson     = jsonContentOf("/identities/consume/anonymous.json")
      val cases =
        List(user -> userJson, group -> groupJson, Anonymous -> anonymousJson, authenticated -> authenticatedJson)
      forAll(cases) {
        case (model: Subject, json) =>
          json.as[Subject].rightValue shouldEqual model
          json.as[Identity].rightValue shouldEqual (model: Identity)
        case (model: Identity, json) => json.as[Identity].rightValue shouldEqual model
      }
    }
  }
} 
Example 47
Source File: DefaultGeneratorTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.generators.missing

import be.cetic.tsimulus.config.GeneratorFormat
import be.cetic.tsimulus.generators.missing.DefaultGenerator
import org.scalatest.{FlatSpec, Inspectors, Matchers}
import spray.json._

class DefaultGeneratorTest extends FlatSpec with Matchers with Inspectors
{
   val source =
      """
        |{
        |  "name": "default-generator",
        |  "type": "first-of",
        |  "generators": ["daily-generator", "random-generator"]
        |}
      """.stripMargin

   "A Default generator" should "be correctly read from a json document" in {
      val generator = DefaultGenerator(source.parseJson)

      generator.name shouldBe Some("default-generator")
      generator.gens shouldBe Seq(Left("daily-generator"), Left("random-generator"))
   }

   it should "be extracted from the global extractor without any error" in {
      noException should be thrownBy GeneratorFormat.read(source.parseJson)
   }

   it should "be correctly extracted from the global extractor" in {
      GeneratorFormat.read(source.parseJson) shouldBe DefaultGenerator(source.parseJson)
   }

   it should "be correctly exported to a json document" in {
      val generator = new DefaultGenerator(
         Some("default-generator"),
         Seq(Left("daily-generator"), Left("random-generator"))
      )
      generator shouldBe DefaultGenerator(generator.toJson)
   }
} 
Example 48
Source File: LimitedGeneratorTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.generators.missing

import be.cetic.tsimulus.config.GeneratorFormat
import be.cetic.tsimulus.generators.missing.LimitedGenerator
import org.joda.time.LocalDateTime
import org.scalatest.{FlatSpec, Inspectors, Matchers}
import spray.json._


class LimitedGeneratorTest extends FlatSpec with Matchers with Inspectors
{
   val source =
      """
        |{
        |  "name": "limited-generator",
        |  "type": "limited",
        |  "generator": "daily-generator",
        |  "from": "2016-01-01 00:00:00.000",
        |  "to": "2016-04-23 01:23:45.678"
        |}
        |
      """.stripMargin

   "A Limited generator" should "be correctly read from a json document" in {
      val generator = LimitedGenerator(source.parseJson)

      generator.name shouldBe Some("limited-generator")
      generator.generator shouldBe Left("daily-generator")
      generator.from shouldBe Some(new LocalDateTime(2016, 1, 1, 0, 0, 0))
      generator.to shouldBe Some(new LocalDateTime(2016, 4, 23, 1, 23, 45, 678))
   }

   it should "be extracted from the global extractor without any error" in {
      noException should be thrownBy GeneratorFormat.read(source.parseJson)
   }

   it should "be correctly extracted from the global extractor" in {
      GeneratorFormat.read(source.parseJson) shouldBe LimitedGenerator(source.parseJson)
   }

   it should "be correctly exported to a json document" in {
      val generator = new LimitedGenerator(
         Some("limited-generator"),
         Left("daily-generator"),
         Some(new LocalDateTime(2016, 1, 1, 0, 0, 0)),
         Some(new LocalDateTime(2016, 4, 23, 1, 23, 45, 678))
      )
      generator shouldBe LimitedGenerator(generator.toJson)
   }
} 
Example 49
Source File: ProjectionProgressSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.sourcing.projections

import java.util.UUID

import akka.persistence.query.{Offset, Sequence, TimeBasedUUID}
import ch.epfl.bluebrain.nexus.sourcing.projections.ProjectionProgress._
import ch.epfl.bluebrain.nexus.sourcing.projections.implicits._
import io.circe.Encoder
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
import org.scalatest.{EitherValues, Inspectors}

class ProjectionProgressSpec extends AnyWordSpecLike with Matchers with Inspectors with TestHelpers with EitherValues {

  "A ProjectionProgress" should {
    val mapping = Map(
      OffsetProgress(Sequence(14L), 2, 0, 1)                                                            ->
        jsonContentOf("/indexing/sequence-offset-progress.json"),
      OffsetProgress(TimeBasedUUID(UUID.fromString("ee7e4360-39ca-11e9-9ed5-dbdaa32f8986")), 32, 5, 10) ->
        jsonContentOf("/indexing/timebaseduuid-offset-progress.json"),
      NoProgress                                                                                        ->
        jsonContentOf("/indexing/no-offset-progress.json"),
      OffsetsProgress(Map("noOffset" -> NoProgress, "other" -> OffsetProgress(Sequence(2L), 10L, 2L, 0L))) ->
        jsonContentOf("/indexing/offsets-progress.json")
    )

    "properly encode progress values" in {
      forAll(mapping.toList) {
        case (prog, repr) =>
          Encoder[ProjectionProgress].apply(prog) shouldEqual repr
      }
    }

    "properly decode progress values" in {
      forAll(mapping.toList) {
        case (prog, repr) =>
          repr.as[ProjectionProgress].rightValue shouldEqual prog
      }
    }

    "Add progress" in {
      val progress =
        OffsetsProgress(Map("noOffset" -> NoProgress, "other" -> OffsetProgress(Sequence(2L), 10L, 2L, 0L)))
      progress + ("noOffset", Sequence(1L), ProgressStatus.Failed("some error")) shouldEqual
        OffsetsProgress(
          Map(
            "noOffset" -> OffsetProgress(Sequence(1L), 1L, 0L, 1L),
            "other"    -> OffsetProgress(Sequence(2L), 10L, 2L, 0L)
          )
        )
      progress + ("other", Sequence(3L), ProgressStatus.Discarded) shouldEqual
        OffsetsProgress(Map("noOffset" -> NoProgress, "other" -> OffsetProgress(Sequence(3L), 11L, 3L, 0L)))
    }

    "fetch minimum progress" in {
      val progress = OffsetsProgress(
        Map(
          "one"   -> OffsetProgress(Sequence(1L), 2L, 1L, 0L),
          "other" -> OffsetProgress(Sequence(2L), 10L, 2L, 0L),
          "a"     -> OffsetProgress(Sequence(0L), 0L, 0L, 0L)
        )
      )
      progress.minProgressFilter(_.length > 1) shouldEqual OffsetProgress(Sequence(1L), 2L, 1L, 0L)
      progress.minProgress shouldEqual OffsetProgress(Sequence(0L), 0L, 0L, 0L)
    }

    "test TimeBasedUUIDd ordering" in {
      val time1 =
        TimeBasedUUID(UUID.fromString("49225740-2019-11ea-a752-ffae2393b6e4")) // 2019-12-16T15:32:36.148Z[UTC]
      val time2 =
        TimeBasedUUID(UUID.fromString("91be23d0-2019-11ea-a752-ffae2393b6e4")) // 2019-12-16T15:34:37.965Z[UTC]
      val time3 =
        TimeBasedUUID(UUID.fromString("91f95810-2019-11ea-a752-ffae2393b6e4")) // 2019-12-16T15:34:38.353Z[UTC]
      val offset1: Offset = time1
      val offset2: Offset = time2
      val offset3: Offset = time3
      time1.asInstant.isBefore(time2.asInstant) shouldEqual true
      time2.asInstant.isBefore(time3.asInstant) shouldEqual true
      offset1.gt(offset2) shouldEqual false
      offset3.gt(offset2) shouldEqual true
      List(time2, time1, time3).sorted(offsetOrdering) shouldEqual List(time1, time2, time3)
    }
  }
} 
Example 50
Source File: AccessControlListSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.acls

import ch.epfl.bluebrain.nexus.iam.types.Identity._
import ch.epfl.bluebrain.nexus.iam.types.{Identity, Permission}
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig.HttpConfig
import ch.epfl.bluebrain.nexus.util.{EitherValues, Resources}
import io.circe.syntax._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
import org.scalatest.{Inspectors, OptionValues}

class AccessControlListSpec
    extends AnyWordSpecLike
    with Matchers
    with Inspectors
    with EitherValues
    with OptionValues
    with Resources {

  "An Access Control List" should {
    val user: Identity  = User("uuid", "realm")
    val group: Identity = Group("mygroup", "myrealm")
    val readWrite       = Set(Permission("acls/read").value, Permission("acls/write").value)
    val manage          = Set(Permission("acls/manage").value)

    implicit val http: HttpConfig = HttpConfig("some", 8080, "v1", "http://nexus.example.com")

    "converted to Json" in {
      val acls = AccessControlList(user -> readWrite, group -> manage)
      val json = jsonContentOf("/acls/acl.json")
      acls.asJson shouldEqual json
    }
    "convert from Json" in {
      val acls = AccessControlList(user -> readWrite, group -> manage)
      val json = jsonContentOf("/acls/acl.json")
      json.as[AccessControlList].rightValue shouldEqual acls
    }

    "remove ACL" in {
      val read  = Permission.unsafe("read")
      val write = Permission.unsafe("write")
      val other = Permission.unsafe("other")
      val acl   = AccessControlList(user -> Set(read, write), group -> Set(other))
      val acl2  = AccessControlList(group -> Set(read))

      acl -- acl2 shouldEqual acl
      acl -- AccessControlList(user -> Set(read), group -> Set(other)) shouldEqual AccessControlList(user -> Set(write))
      acl -- AccessControlList(user -> Set(read)) shouldEqual AccessControlList(user -> Set(write), group -> Set(other))
    }
  }
} 
Example 51
Source File: ResourceDecoderSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.serializers

import java.time.Instant

import akka.http.scaladsl.testkit.ScalatestRouteTest
import ch.epfl.bluebrain.nexus.commons.test.{EitherValues, Resources}
import ch.epfl.bluebrain.nexus.iam.types.Identity.User
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.config.Schemas
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.ProjectRef
import ch.epfl.bluebrain.nexus.kg.resources.{Id, ResourceF, ResourceGraph}
import ch.epfl.bluebrain.nexus.rdf.implicits._
import io.circe.Decoder
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
import org.scalatest.{Inspectors, OptionValues}

class ResourceDecoderSpec
    extends AnyWordSpecLike
    with Matchers
    with Inspectors
    with EitherValues
    with ScalatestRouteTest
    with OptionValues
    with Resources
    with TestHelper {

  private val json                                     = jsonContentOf("/serialization/resource.json")
  private val projectRef                               = ProjectRef(genUUID)
  private val id                                       = url"http://example.com/prefix/myId"
  private val graph                                    = json.toGraph(id).rightValue
  implicit private val decoder: Decoder[ResourceGraph] = ResourceF.resourceGraphDecoder(projectRef)

  private val model = ResourceF(
    Id(projectRef, url"http://example.com/prefix/myId"),
    1L,
    Set(url"https://example.com/vocab/A", url"https://example.com/vocab/B"),
    deprecated = false,
    Map.empty,
    None,
    Instant.parse("2020-01-17T12:45:01.479676Z"),
    Instant.parse("2020-01-17T13:45:01.479676Z"),
    User("john", "bbp"),
    User("brenda", "bbp"),
    Schemas.unconstrainedRef,
    graph
  )

  "A resource" should {
    "be decoded" in {
      json.as[ResourceGraph].rightValue shouldEqual model
    }
  }

} 
Example 52
Source File: StorageCacheSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.cache

import java.nio.file.Paths
import java.time.Clock

import akka.testkit._
import ch.epfl.bluebrain.nexus.commons.test.ActorSystemFixture
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.ProjectRef
import ch.epfl.bluebrain.nexus.kg.storage.Storage.DiskStorage
import ch.epfl.bluebrain.nexus.rdf.implicits._
import ch.epfl.bluebrain.nexus.service.config.{ServiceConfig, Settings}
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.matchers.should.Matchers
import org.scalatest.{Inspectors, TryValues}

import scala.concurrent.duration._

//noinspection NameBooleanParameters
class StorageCacheSpec
    extends ActorSystemFixture("StorageCacheSpec", true)
    with Matchers
    with Inspectors
    with ScalaFutures
    with TryValues
    with TestHelper {

  implicit override def patienceConfig: PatienceConfig = PatienceConfig(3.seconds.dilated, 5.milliseconds)

  implicit private val clock: Clock             = Clock.systemUTC
  implicit private val appConfig: ServiceConfig = Settings(system).serviceConfig
  implicit private val keyValueStoreCfg         = appConfig.kg.keyValueStore.keyValueStoreConfig

  val ref1 = ProjectRef(genUUID)
  val ref2 = ProjectRef(genUUID)

  val time   = clock.instant()
  val lastId = url"http://example.com/lastA"
  // initialInstant.minusSeconds(1L + genInt().toLong)

  val tempStorage = DiskStorage(ref1, genIri, 1L, false, true, "alg", Paths.get("/tmp"), read, write, 1024L)

  val lastStorageProj1 = tempStorage.copy(id = lastId)
  val lastStorageProj2 = tempStorage.copy(ref = ref2, id = lastId)

  val storagesProj1: List[DiskStorage] = List.fill(5)(tempStorage.copy(id = genIri)) :+ lastStorageProj1
  val storagesProj2: List[DiskStorage] = List.fill(5)(tempStorage.copy(ref = ref2, id = genIri)) :+ lastStorageProj2

  private val cache = StorageCache[Task]

  "StorageCache" should {

    "index storages" in {
      forAll((storagesProj1 ++ storagesProj2).zipWithIndex) {
        case (storage, index) =>
          implicit val instant = time.plusSeconds(index.toLong)
          cache.put(storage).runToFuture.futureValue
          cache.get(storage.ref, storage.id).runToFuture.futureValue shouldEqual Some(storage)
      }
    }

    "get latest default storage" in {
      cache.getDefault(ref1).runToFuture.futureValue shouldEqual Some(lastStorageProj1)
      cache.getDefault(ref2).runToFuture.futureValue shouldEqual Some(lastStorageProj2)
      cache.getDefault(ProjectRef(genUUID)).runToFuture.futureValue shouldEqual None
    }

    "list storages" in {
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs storagesProj1
      cache.get(ref2).runToFuture.futureValue should contain theSameElementsAs storagesProj2
    }

    "deprecate storage" in {
      val storage          = storagesProj1.head
      implicit val instant = time.plusSeconds(30L)
      cache.put(storage.copy(deprecated = true, rev = 2L)).runToFuture.futureValue
      cache.get(storage.ref, storage.id).runToFuture.futureValue shouldEqual None
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs storagesProj1.filterNot(_ == storage)
    }
  }
} 
Example 53
Source File: ResolverCacheSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.cache

import akka.actor.ExtendedActorSystem
import akka.serialization.Serialization
import akka.testkit._
import ch.epfl.bluebrain.nexus.commons.test.ActorSystemFixture
import ch.epfl.bluebrain.nexus.iam.types.Identity.Anonymous
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.config.KgConfig._
import ch.epfl.bluebrain.nexus.kg.resolve.Resolver._
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.{ProjectLabel, ProjectRef}
import ch.epfl.bluebrain.nexus.service.config.{ServiceConfig, Settings}
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.matchers.should.Matchers
import org.scalatest.{Inspectors, TryValues}

import scala.concurrent.duration._

//noinspection NameBooleanParameters
class ResolverCacheSpec
    extends ActorSystemFixture("ResolverCacheSpec", true)
    with Matchers
    with Inspectors
    with ScalaFutures
    with TryValues
    with TestHelper {

  implicit override def patienceConfig: PatienceConfig = PatienceConfig(3.seconds.dilated, 5.milliseconds)

  implicit private val appConfig: ServiceConfig = Settings(system).serviceConfig
  implicit private val keyValueStoreCfg         = appConfig.kg.keyValueStore.keyValueStoreConfig

  val ref1 = ProjectRef(genUUID)
  val ref2 = ProjectRef(genUUID)

  val label1 = ProjectLabel(genString(), genString())
  val label2 = ProjectLabel(genString(), genString())

  val resolver: InProjectResolver       = InProjectResolver(ref1, genIri, 1L, false, 10)
  val crossRefs: CrossProjectResolver   =
    CrossProjectResolver(Set(genIri), List(ref1, ref2), Set(Anonymous), ref1, genIri, 0L, false, 1)
  val crossLabels: CrossProjectResolver =
    CrossProjectResolver(Set(genIri), List(label1, label2), Set(Anonymous), ref1, genIri, 0L, false, 1)

  val resolverProj1: Set[InProjectResolver] = List.fill(5)(resolver.copy(id = genIri)).toSet
  val resolverProj2: Set[InProjectResolver] = List.fill(5)(resolver.copy(id = genIri, ref = ref2)).toSet

  private val cache = ResolverCache[Task]

  "ResolverCache" should {

    "index resolvers" in {
      val list = (resolverProj1 ++ resolverProj2).toList
      forAll(list) { resolver =>
        cache.put(resolver).runToFuture.futureValue
        cache.get(resolver.ref, resolver.id).runToFuture.futureValue shouldEqual Some(resolver)
      }
    }

    "list resolvers" in {
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs resolverProj1
      cache.get(ref2).runToFuture.futureValue should contain theSameElementsAs resolverProj2
    }

    "deprecate resolver" in {
      val resolver = resolverProj1.head
      cache.put(resolver.copy(deprecated = true, rev = 2L)).runToFuture.futureValue
      cache.get(resolver.ref, resolver.id).runToFuture.futureValue shouldEqual None
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs resolverProj1.filterNot(_ == resolver)
    }

    "serialize cross project resolver" when {
      val serialization = new Serialization(system.asInstanceOf[ExtendedActorSystem])
      "parameterized with ProjectRef" in {
        val bytes = serialization.serialize(crossRefs).success.value
        val out   = serialization.deserialize(bytes, classOf[CrossProjectResolver]).success.value
        out shouldEqual crossRefs
      }
      "parameterized with ProjectLabel" in {
        val bytes = serialization.serialize(crossLabels).success.value
        val out   = serialization.deserialize(bytes, classOf[CrossProjectResolver]).success.value
        out shouldEqual crossLabels
      }
    }
  }
} 
Example 54
Source File: TaggingAdapterSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.persistence

import java.time.{Clock, Instant, ZoneId}

import akka.persistence.journal.Tagged
import cats.syntax.show._
import ch.epfl.bluebrain.nexus.iam.types.Identity.Anonymous
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.config.Schemas._
import ch.epfl.bluebrain.nexus.kg.persistence.TaggingAdapterSpec.Other
import ch.epfl.bluebrain.nexus.kg.resources.Event._
import ch.epfl.bluebrain.nexus.kg.resources.{Id, OrganizationRef, Ref}
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.ProjectRef
import ch.epfl.bluebrain.nexus.service.config.Vocabulary.nxv
import io.circe.Json
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class TaggingAdapterSpec extends AnyWordSpecLike with Matchers with Inspectors with TestHelper {

  "A TaggingAdapter" should {
    val clock = Clock.fixed(Instant.ofEpochSecond(3600), ZoneId.systemDefault())

    def genJson(): Json = Json.obj("key" -> Json.fromString(genString()))

    val adapter = new TaggingAdapter()
    val orgRef  = OrganizationRef(genUUID)
    val id      = Id(ProjectRef(genUUID), nxv.projects.value)

    val mapping = Map(
      Set(
        s"type=${nxv.Schema.value.show}",
        s"type=${nxv.Resource.value.show}",
        s"project=${id.parent.id}",
        s"org=${orgRef.show}",
        "event"
      )                                                                                                   ->
        Created(
          id,
          orgRef,
          Ref(shaclSchemaUri),
          Set(nxv.Schema.value, nxv.Resource.value),
          genJson(),
          clock.instant(),
          Anonymous
        ),
      Set(
        s"type=${nxv.Resolver.value.show}",
        s"type=${nxv.Resource.value.show}",
        s"project=${id.parent.id}",
        s"org=${orgRef.show}",
        "event"
      )                                                                                                   ->
        Updated(id, orgRef, 1L, Set(nxv.Resource.value, nxv.Resolver.value), genJson(), clock.instant(), Anonymous),
      Set(s"type=${nxv.Resource.value.show}", s"project=${id.parent.id}", s"org=${orgRef.show}", "event") ->
        Deprecated(id, orgRef, 1L, Set(nxv.Resource.value), clock.instant(), Anonymous),
      Set(s"project=${id.parent.id}", s"org=${orgRef.show}", "event")                                     ->
        TagAdded(id, orgRef, 2L, 1L, "tag", clock.instant(), Anonymous)
    )

    "set the appropriate tags" in {
      forAll(mapping.toList) {
        case (tags, ev) => adapter.toJournal(ev) shouldEqual Tagged(ev, tags)
      }
    }

    "return an empty manifest" in {
      adapter.manifest(Other(genString())) shouldEqual ""
    }
  }
}

object TaggingAdapterSpec {
  final private[persistence] case class Other(value: String)

} 
Example 55
Source File: StatisticsSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.indexing

import java.util.regex.Pattern.quote

import ch.epfl.bluebrain.nexus.commons.test.Resources
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.indexing.Statistics.{CompositeViewStatistics, ViewStatistics}
import ch.epfl.bluebrain.nexus.rdf.implicits._
import ch.epfl.bluebrain.nexus.commons.circe.syntax._
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig
import ch.epfl.bluebrain.nexus.service.config.Vocabulary.nxv
import io.circe.Printer
import io.circe.syntax._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
import org.scalatest.{Inspectors, OptionValues}

class StatisticsSpec
    extends AnyWordSpecLike
    with Matchers
    with OptionValues
    with TestHelper
    with Resources
    with Inspectors {

  "Statistics" should {
    val sourceId              = genIri
    val projectionId          = nxv.defaultElasticSearchIndex.value
    val single: Statistics    = ViewStatistics(10L, 1L, 2L, 12L, None, None, None)
    val singleJson            = jsonContentOf("/view/statistics.json").removeKeys("projectionId")
    val composite: Statistics =
      CompositeViewStatistics(IdentifiedProgress(sourceId, projectionId, single.asInstanceOf[ViewStatistics]))
    val compositeJson         = jsonContentOf("/view/composite_statistics.json", Map(quote("{sourceId}") -> sourceId.asString))
    val printer: Printer      = Printer.noSpaces.copy(dropNullValues = true)

    "be encoded" in {
      forAll(List(single -> singleJson, composite -> compositeJson)) {
        case (model, json) =>
          printer.print(model.asJson.sortKeys(ServiceConfig.orderedKeys)) shouldEqual printer.print(json)
      }
    }
  }

} 
Example 56
Source File: RefSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.resources

import ch.epfl.bluebrain.nexus.kg.resources.Ref._
import cats.syntax.show._
import ch.epfl.bluebrain.nexus.commons.test.EitherValues
import ch.epfl.bluebrain.nexus.rdf.Iri.{AbsoluteIri, Urn}
import ch.epfl.bluebrain.nexus.rdf.implicits._
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class RefSpec extends AnyWordSpecLike with Matchers with Inspectors with EitherValues {

  "A Ref" should {

    "be constructed from an AbsoluteIri" in {
      val list = List[(AbsoluteIri, Ref)](
        url"http://ex.com?rev=1&other=value"             -> Revision(url"http://ex.com?other=value", 1L),
        url"http://ex.com?rev=1"                         -> Revision(url"http://ex.com", 1L),
        url"http://ex.com?tag=this&other=value"          -> Ref.Tag(url"http://ex.com?other=value", "this"),
        url"http://ex.com?rev=1&tag=this&other=value"    -> Revision(url"http://ex.com?other=value", 1L),
        url"http://ex.com?other=value"                   -> Latest(url"http://ex.com?other=value"),
        url"http://ex.com#fragment"                      -> Latest(url"http://ex.com#fragment"),
        Urn("urn:ex:a/b/c").rightValue                   -> Latest(Urn("urn:ex:a/b/c").rightValue),
        Urn("urn:ex:a/b/c?=rev=1").rightValue            -> Revision(Urn("urn:ex:a/b/c").rightValue, 1L),
        Urn("urn:ex:a?=tag=this&other=value").rightValue -> Ref.Tag(Urn("urn:ex:a?=other=value").rightValue, "this")
      )
      forAll(list) {
        case (iri, ref) => Ref(iri) shouldEqual ref
      }
    }

    "print properly" in {
      (Latest(url"http://ex.com#fragment"): Ref).show shouldEqual url"http://ex.com#fragment".show
      (Revision(
        url"http://ex.com?other=value",
        1L
      ): Ref).show shouldEqual url"http://ex.com?other=value".show + s" @ rev: '1'"
      (Ref.Tag(
        url"http://ex.com?other=value",
        "this"
      ): Ref).show shouldEqual url"http://ex.com?other=value".show + s" @ tag: 'this'"
    }
  }

} 
Example 57
Source File: RejectionHandlingSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.commons.http

import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Rejection
import akka.http.scaladsl.testkit.ScalatestRouteTest
import ch.epfl.bluebrain.nexus.util.EitherValues
import com.typesafe.config.{Config, ConfigFactory}
import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport._
import io.circe.{Json, Printer}
import io.circe.parser._
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class RejectionHandlingSpec
    extends AnyWordSpecLike
    with Matchers
    with Inspectors
    with ScalatestRouteTest
    with EitherValues {

  class Custom extends Rejection

  override def testConfig: Config       = ConfigFactory.empty()
  implicit private val printer: Printer = Printer.spaces2.copy(dropNullValues = true)

  "A default rejection handler" should {
    val handler =
      RejectionHandling { _: Custom =>
        StatusCodes.InternalServerError -> Json.obj("reason" -> Json.fromString("custom"))
      }.withFallback(RejectionHandling.notFound)

    "handle not found" in {
      val route = handleRejections(handler)(pathEnd(complete("ok")))
      Get("/a") ~> route ~> check {
        val expected =
          s"""{
             |  "@context": "https://bluebrain.github.io/nexus/contexts/error.json",
             |  "@type": "NotFound",
             |  "reason": "The requested resource could not be found."
             |}""".stripMargin
        status shouldEqual StatusCodes.NotFound
        responseAs[Json] shouldEqual parse(expected).rightValue
      }
    }

    "handle missing query param" in {
      val route = handleRejections(handler)(parameter("rev".as[Long])(_ => complete("ok")))
      Get("/a") ~> route ~> check {
        val expected =
          s"""{
             |  "@context": "https://bluebrain.github.io/nexus/contexts/error.json",
             |  "@type": "MissingQueryParam",
             |  "reason": "Request is missing required query parameter 'rev'."
             |}""".stripMargin
        status shouldEqual StatusCodes.BadRequest
        responseAs[Json] shouldEqual parse(expected).rightValue
      }
    }

    "handle custom" in {
      val route = handleRejections(handler)(reject(new Custom))
      Get("/a") ~> route ~> check {
        val expected =
          s"""{
             |  "reason": "custom"
             |}""".stripMargin
        status shouldEqual StatusCodes.InternalServerError
        responseAs[Json] shouldEqual parse(expected).rightValue
      }
    }
  }

} 
Example 58
Source File: PrefixDirectivesSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.commons.http.directives

import akka.http.scaladsl.model.{StatusCodes, Uri}
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.testkit.ScalatestRouteTest
import ch.epfl.bluebrain.nexus.commons.http.directives.PrefixDirectives._
import com.typesafe.config.{Config, ConfigFactory}
import org.scalatest.wordspec.AnyWordSpecLike
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers

class PrefixDirectivesSpec extends AnyWordSpecLike with Matchers with Inspectors with ScalatestRouteTest {

  override def testConfig: Config = ConfigFactory.empty()

  "A PrefixDirective" should {

    "match the prefix uri" in {
      forAll(
        Map(
          ""         -> "",
          "/"        -> "",
          "///"      -> "",
          "/dev"     -> "/dev",
          "/dev/"    -> "/dev",
          "/dev///"  -> "/dev",
          "/dev/sn/" -> "/dev/sn"
        ).toList
      ) {
        case (suffix, prefix) =>
          val uri   = Uri("http://localhost:80" + suffix)
          val route = uriPrefix(uri) {
            path("remainder") {
              get {
                complete(StatusCodes.OK)
              }
            }
          }

          Get(prefix + "/remainder") ~> route ~> check {
            status shouldEqual StatusCodes.OK
          }
      }
    }
  }
} 
Example 59
Source File: StaticResourceIamAdminRoutesSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.commons.http.routes

import java.util.UUID

import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.testkit.ScalatestRouteTest
import ch.epfl.bluebrain.nexus.commons.http.RdfMediaTypes
import org.scalatest.Inspectors
import java.util.regex.Pattern.quote

import ch.epfl.bluebrain.nexus.util.Resources
import com.typesafe.config.{Config, ConfigFactory}
import io.circe.parser.parse
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class StaticResourceIamAdminRoutesSpec
    extends AnyWordSpecLike
    with Matchers
    with Inspectors
    with ScalatestRouteTest
    with Resources {

  val baseUri = "http://nexus.example.com/v1"

  override def testConfig: Config = ConfigFactory.empty()

  val staticRoutes = new StaticResourceRoutes(
    Map(
      "/contexts/context1" -> "/commons/static-routes-test/contexts/context1.json",
      "/contexts/context2" -> "/commons/static-routes-test/contexts/context2.json",
      "/schemas/schema1"   -> "/commons/static-routes-test/schemas/schema1.json",
      "/schemas/schema2"   -> "/commons/static-routes-test/schemas/schema2.json"
    ),
    "test",
    baseUri
  ).routes

  val baseReplacement = Map(
    quote("{{base}}") -> baseUri
  )
  val files           = Map(
    "/v1/test/contexts/context1" -> jsonContentOf(
      "/commons/static-routes-test/contexts/context1.json",
      baseReplacement
    ),
    "/v1/test/contexts/context2" -> jsonContentOf(
      "/commons/static-routes-test/contexts/context2.json",
      baseReplacement
    ),
    "/v1/test/schemas/schema1"   -> jsonContentOf("/commons/static-routes-test/schemas/schema1.json", baseReplacement),
    "/v1/test/schemas/schema2"   -> jsonContentOf("/commons/static-routes-test/schemas/schema2.json", baseReplacement)
  )

  "A StaticResourceRoutes" should {

    "return static resources" in {
      forAll(files.toList) {
        case (path, json) =>
          Get(path) ~> staticRoutes ~> check {
            status shouldEqual StatusCodes.OK
            contentType shouldEqual RdfMediaTypes.`application/ld+json`.toContentType
            parse(responseAs[String]).toOption.get shouldEqual json
          }
      }

    }

    "return 404 when resource doesn't exist" in {
      Get(s"/v1/test/schemas/${UUID.randomUUID().toString}") ~> staticRoutes ~> check {
        rejections shouldEqual Seq()
      }
    }
  }

} 
Example 60
Source File: SourcingSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.sourcing

import org.scalactic.source
import org.scalatest.{Inspectors, OptionValues, TryValues}
import org.scalatest.exceptions.{StackDepthException, TestFailedException}
import org.scalatest.matchers.should.Matchers

trait SourcingSpec
    extends org.scalatest.wordspec.AnyWordSpecLike
    with Matchers
    with Inspectors
    with OptionValues
    with TryValues {

  class EitherValuable[L, R](either: Either[L, R], pos: source.Position) {
    def rightValue: R =
      either match {
        case Right(value) => value
        case Left(_)      =>
          throw new TestFailedException(
            (_: StackDepthException) => Some("The Either value is not a Right(_)"),
            None,
            pos
          )
      }

    def leftValue: L =
      either match {
        case Left(value) => value
        case Right(_)    =>
          throw new TestFailedException(
            (_: StackDepthException) => Some("The Either value is not a Left(_)"),
            None,
            pos
          )
      }
  }

  implicit def convertEitherToValuable[L, R](either: Either[L, R])(implicit p: source.Position): EitherValuable[L, R] =
    new EitherValuable(either, p)

} 
Example 61
Source File: LabelSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.types

import ch.epfl.bluebrain.nexus.rdf.Iri.{Path, Url}
import ch.epfl.bluebrain.nexus.util.{EitherValues, Randomness}
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class LabelSpec extends AnyWordSpecLike with Matchers with Randomness with Inspectors with EitherValues {

  "A Label" should {
    "be constructed correctly from alphanumeric chars" in {
      forAll(1 to 32) { length =>
        val string = genString(length, Vector.range('a', 'z') ++ Vector.range('0', '9'))
        Label.unsafe(string).value shouldEqual string
        Label(string).rightValue.value shouldEqual string
      }
    }
    "fail to construct for illegal formats" in {
      val cases = List("", " ", "a ", " a", "a-", "_")
      forAll(cases) { string =>
        intercept[IllegalArgumentException](Label.unsafe(string))
        Label(string).leftValue shouldEqual s"Label '$string' does not match pattern '${Label.regex.regex}'"
      }
    }
    "return its path representation" in {
      Label.unsafe("abc").toPath shouldEqual Path("/abc").rightValue
    }
    "return an iri representation" in {
      forAll(List("http://localhost", "http://localhost/")) { str =>
        val base  = Url(str).rightValue
        val label = Label.unsafe("abc")
        label.toIri(base) shouldEqual Url("http://localhost/abc").rightValue
      }
    }
  }

} 
Example 62
Source File: JsonConversionSpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.wire

import com.comcast.money.api.{ Note, SpanId, SpanInfo }
import com.comcast.money.core.CoreSpanInfo
import org.scalatest.{ Inspectors, Matchers, WordSpec }

class JsonConversionSpec extends WordSpec with Matchers with Inspectors {

  import JsonConversions._

  import scala.collection.JavaConverters._

  val orig = CoreSpanInfo(
    id = new SpanId("foo", 1L),
    name = "key",
    appName = "app",
    host = "host",
    startTimeMillis = 1L,
    success = true,
    durationMicros = 35L,
    notes = Map[String, Note[_]](
      "what" -> Note.of("what", 1L),
      "when" -> Note.of("when", 2L),
      "bob" -> Note.of("bob", "craig"),
      "none" -> Note.of("none", null),
      "bool" -> Note.of("bool", true),
      "dbl" -> Note.of("dbl", 1.0)).asJava).asInstanceOf[SpanInfo]

  "Json Conversion" should {
    "roundtrip" in {

      val json = orig.convertTo[String]
      val converted = json.convertTo[SpanInfo]

      converted.appName shouldEqual orig.appName
      converted.name shouldEqual orig.name
      converted.durationMicros shouldEqual orig.durationMicros
      converted.host shouldEqual orig.host
      converted.id shouldEqual orig.id
      converted.success shouldEqual orig.success
      converted.startTimeMillis shouldEqual orig.startTimeMillis
      converted.notes shouldEqual orig.notes
    }
  }
} 
Example 63
Source File: AvroConversionSpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.wire

import com.comcast.money.api.{ Note, SpanId, SpanInfo }
import com.comcast.money.core.CoreSpanInfo
import org.scalatest.{ Inspectors, Matchers, WordSpec }

class AvroConversionSpec extends WordSpec with Matchers with Inspectors {

  import AvroConversions._

  import scala.collection.JavaConverters._

  "Avro Conversion" should {
    "roundtrip" in {
      val orig = CoreSpanInfo(
        id = new SpanId("foo", 1L),
        name = "key",
        appName = "app",
        host = "host",
        startTimeMillis = 1L,
        success = true,
        durationMicros = 35L,
        notes = Map[String, Note[_]](
          "what" -> Note.of("what", 1L),
          "when" -> Note.of("when", 2L),
          "bob" -> Note.of("bob", "craig"),
          "none" -> Note.of("none", null),
          "bool" -> Note.of("bool", true),
          "dbl" -> Note.of("dbl", 1.0)).asJava).asInstanceOf[SpanInfo]

      val bytes = orig.convertTo[Array[Byte]]
      val roundtrip = bytes.convertTo[SpanInfo]

      roundtrip.appName shouldEqual orig.appName
      roundtrip.name shouldEqual orig.name
      roundtrip.durationMicros shouldEqual orig.durationMicros
      roundtrip.host shouldEqual orig.host
      roundtrip.id shouldEqual orig.id
      roundtrip.success shouldEqual orig.success
      roundtrip.startTimeMillis shouldEqual orig.startTimeMillis
      roundtrip.notes shouldEqual orig.notes
    }
  }
} 
Example 64
Source File: CollectingInstancesProperties.scala    From kontextfrei   with Apache License 2.0 5 votes vote down vote up
package com.danielwestheide.kontextfrei.scalatest

import org.apache.spark.rdd.RDD
import org.scalatest.enablers.Collecting
import org.scalatest.{Inspectors, PropSpec, PropSpecLike}
import org.scalatest.prop.GeneratorDrivenPropertyChecks

trait CollectingInstancesProperties[DColl[_]]
    extends PropSpecLike
    with GeneratorDrivenPropertyChecks
    with KontextfreiSpec[DColl]
    with CollectingInstances {

  property("There is a Collecting instance for DCollection") {
    forAll { (xs: List[String]) =>
      val dcoll = ops.unit(xs)
      Inspectors.forAll(dcoll) { x =>
        assert(xs.contains(x))
      }
    }
  }

  property(
    "Collecting nature of DCollection returns the original size of the input sequence") {
    forAll { (xs: List[String]) =>
      val dcoll = ops.unit(xs)
      assert(
        implicitly[Collecting[String, DColl[String]]]
          .sizeOf(dcoll) === xs.size)
    }
  }

  property(
    "Collecting nature of DCollection returns the Some loneElement if input sequence has exactly one element") {
    forAll { (x: String) =>
      val dcoll = ops.unit(List(x))
      assert(
        implicitly[Collecting[String, DColl[String]]]
          .loneElementOf(dcoll) === Some(x))
    }
  }

  property(
    "Collecting nature of DCollection returns the None as loneElement if input sequence as more than one element") {
    forAll { (xs: List[String]) =>
      whenever(xs.size > 1) {
        val dcoll = ops.unit(xs)
        assert(
          implicitly[Collecting[String, DColl[String]]]
            .loneElementOf(dcoll)
            .isEmpty)
      }
    }
  }

  property(
    "Collecting nature of DCollection returns the None as loneElement if input sequence is empty") {
    val dcoll = ops.unit(List.empty[String])
    assert(
      implicitly[Collecting[String, DColl[String]]]
        .loneElementOf(dcoll)
        .isEmpty)
  }

}

class CollectionInstancesStreamSpec
    extends CollectingInstancesProperties[Stream]
    with StreamSpec

class CollectionInstancesRDDSpec
    extends CollectingInstancesProperties[RDD]
    with RDDSpec 
Example 65
Source File: ScapegoatQualityProfileSpec.scala    From sonar-scala   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.mwz.sonar.scala.scapegoat

import com.mwz.sonar.scala.metadata.scapegoat.ScapegoatRules
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.scalatest.{Inspectors, LoneElement}
import org.sonar.api.server.profile.BuiltInQualityProfilesDefinition.Context


class ScapegoatQualityProfileSpec extends AnyFlatSpec with Inspectors with LoneElement with Matchers {
  trait Ctx {
    val context = new Context()
    new ScapegoatQualityProfile().define(context)
    val qualityProfile = context.profilesByLanguageAndName.loneElement.value.loneElement.value
    val rules = qualityProfile.rules
  }

  "ScapegoatQualityProfile" should "define only one quality profile" in new Ctx {
    context.profilesByLanguageAndName should have size 1 // by language
    context.profilesByLanguageAndName.loneElement.value should have size 1 // by language and name
  }

  it should "properly define the properties of the quality profile" in new Ctx {
    qualityProfile.name shouldBe "Scapegoat"
    qualityProfile.language shouldBe "scala"
  }

  it should "not be the default quality profile" in new Ctx {
    qualityProfile.isDefault shouldBe false
  }

  it should "activate one rule for each scapegoat inspection" in new Ctx {
    qualityProfile.rules should have size ScapegoatRules.rules.length
  }

  it should "have all rules come from the Scapegaot rules repository" in new Ctx {
    forEvery(rules)(rule => rule.repoKey shouldBe "sonar-scala-scapegoat")
  }

  it should "not have overridden any of the default params" in new Ctx {
    forEvery(rules)(rule => rule.overriddenParams shouldBe empty)
  }
} 
Example 66
Source File: ScapegoatRulesRepositorySpec.scala    From sonar-scala   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.mwz.sonar.scala.scapegoat

import com.mwz.sonar.scala.metadata.scapegoat.ScapegoatRules
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.scalatest.{Inspectors, LoneElement}
import org.sonar.api.rule.{RuleStatus, Severity}
import org.sonar.api.rules.RuleType
import org.sonar.api.server.rule.RulesDefinition.Context


class ScapegoatRulesRepositorySpec extends AnyFlatSpec with Inspectors with LoneElement with Matchers {
  trait Ctx {
    val context = new Context()
    new ScapegoatRulesRepository().define(context)
    val repository = context.repositories.loneElement
    val rules = repository.rules
  }

  "ScapegoatRulesRepository" should "define only one repository" in new Ctx {
    context.repositories should have size 1
  }

  it should "properly define the properties of the repository" in new Ctx {
    repository.key shouldBe "sonar-scala-scapegoat"
    repository.name shouldBe "Scapegoat"
    repository.language shouldBe "scala"
  }

  it should "define one rule for each scapegoat inspection" in new Ctx {
    rules should have size ScapegoatRules.rules.length
  }

  it should "properly define the properties of the ArrayEquals rule" in new Ctx {
    val arrayEqualsRule = repository.rule("com.sksamuel.scapegoat.inspections.collections.ArrayEquals")

    arrayEqualsRule.internalKey shouldBe "com.sksamuel.scapegoat.inspections.collections.ArrayEquals"
    arrayEqualsRule.name shouldBe "Array equals"
    arrayEqualsRule.markdownDescription shouldBe "*Checks for comparison of arrays using == which will always return false.*\n\n======= Array equals is not an equality check. Use a.deep == b.deep or convert to another collection type."
    arrayEqualsRule.activatedByDefault shouldBe true
    arrayEqualsRule.status shouldBe RuleStatus.READY
    arrayEqualsRule.severity shouldBe Severity.INFO
    arrayEqualsRule.`type` shouldBe RuleType.CODE_SMELL
  }

  "All Scapegoat Rules" should "have a valid internal key" in new Ctx {
    forEvery(rules)(rule => rule.internalKey should startWith("com.sksamuel.scapegoat.inspections"))
  }

  it should "have a non-empty name" in new Ctx {
    forEvery(rules)(rule => rule.name should not be empty)
  }

  it should "have a non-empty description" in new Ctx {
    forEvery(rules)(rule => rule.markdownDescription should not be empty)
  }

  it should "be activated by default" in new Ctx {
    forEvery(rules)(rule => rule.activatedByDefault shouldBe true)
  }

  it should "have a READY status" in new Ctx {
    forEvery(rules)(rule => rule.status shouldBe RuleStatus.READY)
  }

  it should "have a valid severity" in new Ctx {
    forEvery(rules) { rule =>
      val ruleSeverity = rule.severity
      forExactly(1, Severity.ALL)(severity => ruleSeverity shouldBe severity)
    }
  }

  it should "be a CODE_SMELL" in new Ctx {
    forEvery(rules)(rule => rule.`type` shouldBe RuleType.CODE_SMELL)
  }
} 
Example 67
Source File: ScalastyleScapegoatQualityProfileSpec.scala    From sonar-scala   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.mwz.sonar.scala.qualityprofiles

import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.scalatest.{Inspectors, LoneElement}
import org.sonar.api.server.profile.BuiltInQualityProfilesDefinition.{BuiltInQualityProfile, Context}


class ScalastyleScapegoatQualityProfileSpec
    extends AnyFlatSpec
    with Inspectors
    with LoneElement
    with Matchers {
  trait Ctx {
    val context = new Context()
    new ScalastyleScapegoatQualityProfile().define(context)
    val qualityProfile: BuiltInQualityProfile =
      context.profilesByLanguageAndName.loneElement.value.loneElement.value
    val rules = qualityProfile.rules
  }

  "Scalastyle+ScapegoatQualityProfile" should "define only one quality profile" in new Ctx {
    context.profilesByLanguageAndName should have size 1 // by language
    context.profilesByLanguageAndName.loneElement.value should have size 1 // by language and name
  }

  it should "properly define the properties of the quality profile" in new Ctx {
    qualityProfile.name shouldBe "Scalastyle+Scapegoat"
    qualityProfile.language shouldBe "scala"
  }

  it should "not be the default quality profile" in new Ctx {
    qualityProfile.isDefault shouldBe false
  }

  it should "define all Scalastyle + Scapegoat rules" in new Ctx {
    qualityProfile.rules should have size 187 // 69 from Scalastyle + 118 from Scapegoat
  }

  it should "have all rules come from either the Scalastyle or the Scapegaot rules repositories" in new Ctx {
    forEvery(rules) { rule =>
      rule.repoKey should (be("sonar-scala-scalastyle") or be("sonar-scala-scapegoat"))
    }
  }

  it should "not have overridden any of the default params" in new Ctx {
    forEvery(rules)(rule => rule.overriddenParams shouldBe empty)
  }
} 
Example 68
Source File: ScalastyleRulesRepositorySpec.scala    From sonar-scala   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.mwz.sonar.scala
package scalastyle

import scala.jdk.CollectionConverters._

import com.mwz.sonar.scala.metadata.scalastyle.ScalastyleRules
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.scalatest.{Inspectors, LoneElement}
import org.sonar.api.rule.RuleStatus
import org.sonar.api.rules.RuleType
import org.sonar.api.server.rule.RulesDefinition.{Context, Repository, Rule}
import org.sonar.api.server.rule.{RuleParamType, RulesDefinition}

class ScalastyleRulesRepositorySpec extends AnyFlatSpec with Matchers with Inspectors with LoneElement {
  trait Ctx {
    val context = new Context()
    new ScalastyleRulesRepository().define(context)
    val repository: Repository = context.repositories.loneElement
  }

  "ScalastyleRulesRepository" should "define rules repository" in new Ctx {
    context.repositories should have size 1
  }

  it should "correctly define repository properties" in new Ctx {
    repository.key shouldBe "sonar-scala-scalastyle"
    repository.name shouldBe "Scalastyle"
    repository.language shouldBe "scala"
  }

  it should "include all Scalastyle inspections" in new Ctx {
    ScalastyleRules.rules.length shouldBe 73 // 31 templates + 42 default rules
    ScalastyleRules.rules.map(r => r.key -> r).iterator.toMap.size shouldBe ScalastyleRules.rules.length
    repository.rules should have size 101 // 31 templates + 42 default rules + 28 template instances
  }

  it should "have all rules with non-empty properties" in new Ctx {
    forEvery(repository.rules) { rule =>
      rule.key should not be empty
      rule.internalKey should not be empty
      rule.name should not be empty
      rule.markdownDescription should not be empty
      rule.severity should not be empty
    }
  }

  it should "have all rules' keys start with org.scalastyle" in new Ctx {
    forEvery(repository.rules)(rule => rule.key should startWith("org.scalastyle"))
  }

  it should "have all rules activated by default" in new Ctx {
    forEvery(repository.rules)(rule => rule.activatedByDefault shouldBe true)
  }

  it should "have all rules with READY status" in new Ctx {
    forEvery(repository.rules)(rule => rule.status shouldBe RuleStatus.READY)
  }

  it should "have all rules marked as CODE_SMELL" in new Ctx {
    forEvery(repository.rules)(rule => rule.`type` shouldBe RuleType.CODE_SMELL)
  }

  it should "have rules with parameters" in new Ctx {
    forAtLeast(1, repository.rules)(rule => rule.params should not be empty)
  }

  it should "not have rules with empty parameters" in new Ctx {
    val params: Seq[RulesDefinition.Param] =
      repository.rules.asScala
        .filter(r => !r.params.isEmpty)
        .flatMap(_.params.asScala)
        .toSeq

    forAll(params) { param =>
      param.name should not be empty
      param.description should not be empty
    }
  }

  it should "have all rules contain ruleClass parameter" in new Ctx {
    val rules: Seq[Rule] = repository.rules.asScala.filter(r => !r.params.isEmpty).toSeq
    forEvery(rules) { rule =>
      rule.params.asScala.exists(p => p.key === "ruleClass" && p.defaultValue.startsWith("org.scalastyle"))
    }
  }

  it should "create rules with correct parameters" in new Ctx {
    val rule: Rule = repository.rule("org.scalastyle.file.FileLineLengthChecker")
    val params: Seq[(String, RuleParamType, String)] =
      rule.params().asScala.map(p => (p.name, p.`type`, p.defaultValue)).toSeq
    val expected = Seq(
      ("maxLineLength", RuleParamType.INTEGER, "160"),
      ("tabSize", RuleParamType.INTEGER, "4"),
      ("ignoreImports", RuleParamType.BOOLEAN, "false"),
      ("ruleClass", RuleParamType.STRING, "org.scalastyle.file.FileLineLengthChecker")
    )

    params should contain theSameElementsAs expected
  }
} 
Example 69
Source File: ScalastyleQualityProfileSpec.scala    From sonar-scala   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.mwz.sonar.scala
package scalastyle

import scala.jdk.CollectionConverters._

import com.mwz.sonar.scala.metadata.scalastyle.ScalastyleRules
import com.mwz.sonar.scala.metadata.scalastyle.ScalastyleRulesRepository.SkipTemplateInstances
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.scalatest.{Inspectors, LoneElement}
import org.sonar.api.server.profile.BuiltInQualityProfilesDefinition.{
  BuiltInActiveRule,
  BuiltInQualityProfile,
  Context
}

class ScalastyleQualityProfileSpec extends AnyFlatSpec with Matchers with LoneElement with Inspectors {
  trait Ctx {
    val context = new Context()
    new ScalastyleQualityProfile().define(context)
    val qualityProfile: BuiltInQualityProfile =
      context.profilesByLanguageAndName.loneElement.value.loneElement.value
    val rules: Seq[BuiltInActiveRule] = qualityProfile.rules.asScala.toSeq
  }

  "ScalastyleQualityProfile" should "define a quality profile" in new Ctx {
    qualityProfile.language shouldBe "scala"
    qualityProfile.name shouldBe "Scalastyle"
  }

  it should "not be the default profile" in new Ctx {
    qualityProfile.isDefault shouldBe false
  }

  it should "activate all default (non-template) rules" in new Ctx {
    val activated =
      ScalastyleRules.rules
        .filter(i => i.params.isEmpty && !ScalastyleQualityProfile.BlacklistRules.contains(i.key))
        .map(_.key)
        .toList

    rules.map(_.ruleKey) should contain allElementsOf activated
  }

  it should "have 69 rules" in new Ctx {
    rules should have size 69 // 41 default rules + 28 template instances
  }

  it should "not activate templates" in new Ctx {
    val templates =
      ScalastyleRules.rules
        .filter(_.params.nonEmpty)
        .map(i => s"${i.key}-template")
        .toList

    rules.map(_.ruleKey) should contain noElementsOf templates
  }

  it should "activate not excluded template rules" in new Ctx {
    val templateInstances =
      ScalastyleRules.rules
        .filter(i => i.params.nonEmpty && !SkipTemplateInstances.contains(i.key))
        .map(_.key)
        .toList

    rules.map(_.ruleKey) should contain allElementsOf templateInstances

    val excluded =
      ScalastyleRules.rules
        .filter(i => SkipTemplateInstances.contains(i.key))
        .map(_.key)
        .toList

    rules.map(_.ruleKey) should contain noElementsOf excluded
  }

  it should "have all rules come from the Scalastyle rules repository" in new Ctx {
    forEvery(rules)(rule => rule.repoKey shouldBe "sonar-scala-scalastyle")
  }

  it should "not have overridden any of the default params" in new Ctx {
    forEvery(rules)(rule => rule.overriddenParams shouldBe empty)
  }
} 
Example 70
Source File: GlobalIssuesSpec.scala    From sonar-scala   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.mwz.sonar.scala.pr

import com.mwz.sonar.scala.pr.Generators._
import org.scalacheck.ScalacheckShapeless._
import org.scalatest.Inspectors
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks
import org.sonar.api.batch.fs.internal.TestInputFileBuilder
import org.sonar.api.batch.rule.Severity
import org.sonar.api.rule.RuleKey

class GlobalIssuesSpec extends AnyFlatSpec with Matchers with ScalaCheckDrivenPropertyChecks {
  it should "add a new issue" in {
    val issues = new GlobalIssues
    val file = TestInputFileBuilder
      .create("", "test.scala")
      .build
    val issue = Issue(
      key = RuleKey.of("repo", "rule"),
      file = file,
      line = 10,
      severity = Severity.MAJOR,
      message = "test"
    )

    issues.add(issue)
    issues.allIssues shouldBe Map(file -> List(issue))
  }

  it should "return all issues" in {
    forAll { (issues: List[Issue]) =>
      whenever(issues.nonEmpty) {
        val globalIssues = new GlobalIssues
        val expected = issues.groupBy(_.file)

        issues.foreach(globalIssues.add)

        Inspectors.forAll(globalIssues.allIssues) {
          case (file, issues) =>
            issues should contain theSameElementsAs expected(file)
        }
      }
    }
  }
} 
Example 71
Source File: ArrayUtilSpec.scala    From hacktoberfest-scala-algorithms   with GNU General Public License v3.0 5 votes vote down vote up
package io.github.sentenza.hacktoberfest.util

import org.scalatest.{Assertion, Matchers, WordSpec, Inspectors}

class ArrayUtilSpec extends WordSpec with Matchers {
  "The ArrayUtil" should {
    "return an array of n elements" in {
      val arrayUtil = new ArrayUtil()
      val length    = scala.util.Random.nextInt(1000)
      arrayUtil.buildRandomArray(length).length shouldBe length
    }

    "return an array whose values are capped by the length" in {
      val arrayUtil = new ArrayUtil()
      val length    = scala.util.Random.nextInt(1000)
      import Inspectors._
      val maximum = scala.util.Random.nextInt(900)
      forAll(arrayUtil.buildRandomArray(length, maximum)) { elem =>
        elem should be <= maximum
      }
    }
  }
} 
Example 72
Source File: StatsImplSpec.scala    From opencensus-scala   with Apache License 2.0 5 votes vote down vote up
package io.opencensus.scala.stats

import io.opencensus.implcore.internal.SimpleEventQueue
import io.opencensus.implcore.stats.StatsComponentImplBase
import io.opencensus.stats.AggregationData
import io.opencensus.stats.AggregationData.{SumDataDouble, SumDataLong}
import io.opencensus.tags.{Tags => JavaTags}
import io.opencensus.testing.common.TestClock
import org.scalatest.Inspectors

import scala.jdk.CollectionConverters._
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers

class StatsImplSpec extends StatsSpecs {

  def measureLong(name: String) =
    Measure.long(name, "desc", "unit").get
  def measureDouble(name: String) =
    Measure.double(name, "desc", "unit").get

  val measurementsLong =
    List(
      Measurement.long(measureLong("name"), 4L)   -> SumDataLong.create(4L),
      Measurement.long(measureLong("name2"), 12L) -> SumDataLong.create(12L)
    )

  val measurementsDouble = List(
    Measurement.double(measureDouble("name"), 4.0) -> SumDataDouble.create(4.0),
    Measurement.double(measureDouble("name2"), 12.0) -> SumDataDouble.create(
      12.0
    )
  )

  "record single measure long" should behave like recordingSpecs(
    measurementsLong.take(1)
  )

  "record single measure double" should behave like recordingSpecs(
    measurementsDouble.take(1)
  )

  "record different long measures in batch" should behave like recordingSpecs(
    measurementsLong
  )

  "record different double measures in batch" should behave like recordingSpecs(
    measurementsDouble
  )
}

trait StatsSpecs extends AnyFlatSpec with Matchers with Inspectors {

  def recordingSpecs(
      measurements: List[(Measurement, AggregationData)]
  ): Unit = {
    def view(measure: Measure, name: String) =
      View(name, "viewdesc", measure, List("col1"), Sum).get

    it should "record measurements" in {
      val (statsComponent, stats) = createStats()

      val views = measurements.zipWithIndex.map {
        case ((measurment, result), i) =>
          val testView = view(measurment.measure, i.toString)
          stats.registerView(testView)

          (testView, result)
      }

      stats.record(measurements.map(_._1): _*)

      forAll(views) {
        case (view, result) =>
          val jView =
            statsComponent.getViewManager.getView(view.javaView.getName)
          val values = jView.getAggregationMap.asScala.values
          values.head shouldBe result
      }
    }
  }

  private def createStats() = {
    val statsComponent =
      new StatsComponentImplBase(new SimpleEventQueue, TestClock.create())

    (
      statsComponent,
      new StatsImpl(
        statsComponent.getViewManager,
        statsComponent.getStatsRecorder,
        JavaTags.getTagger
      )
    )

  }

} 
Example 73
Source File: DayOfMonthTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.dt

import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.dt.DayOfMonthTimeSeries
import be.cetic.tsimulus.timeseries.primary.NowTimeSeries
import org.scalatest.{FlatSpec, Inspectors, Matchers}

class DayOfMonthTimeSeriesTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   "A DayOfMonth time series" should "produce the day of month of the underlying timestamp" in
      {
         (new DayOfMonthTimeSeries(NowTimeSeries()).compute(fixedDate)) match
         {
            case Some(x) => x == 27
            case _ => false
         }
      }
} 
Example 74
Source File: SinusTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.primary

import be.cetic.tsimulus.Utils
import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.primary.SinusTimeSeries
import org.joda.time.LocalDateTime
import org.scalatest.{FlatSpec, Inspectors, Matchers}

class SinusSeriesTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   "A sinus series" should "provide a value close to 0 for its origin" in
   {
      val time = new LocalDateTime(2020, 6, 7, 1, 2, 3)
      val ts = SinusTimeSeries(time, 1000)
      val value = ts.compute(time).get

      value shouldBe (0.0 +- 0.0001)
   }

   it should "provide a value close to 0 for its origin + 1 period" in
   {
      val time = new LocalDateTime(2020, 6, 7, 1, 2, 3)
      val ts = SinusTimeSeries(time, 1000)
      val value = ts.compute(time.plusMillis(1000)).get

      value shouldBe (0.0 +- 0.0001)
   }

   it should "provide a value close to 0 for its origin - 1 period" in
   {
     val time = new LocalDateTime(2020, 6, 7, 1, 2, 3)
     val ts = SinusTimeSeries(time, 1000)
     val value = ts.compute(time.plusMillis(-1000)).get

     value shouldBe (0.0 +- 0.0001)
   }

   it should "provide a value close to 0 for its origin + half its period" in
   {
      val time = new LocalDateTime(2020, 6, 7, 1, 2, 3)
      val ts = SinusTimeSeries(time, 1000)
      val value = ts.compute(time.plusMillis(500)).get

      value shouldBe (0.0 +- 0.0001)
   }

   it should "provide a value close to 1 for its origin + 1/4 its period" in
   {
      val time = new LocalDateTime(2020, 6, 7, 1, 2, 3)
      val ts = SinusTimeSeries(time, 1000)
      val value = ts.compute(time.plusMillis(250)).get

      value shouldBe (1.0 +- 0.0001)
   }

   it should "provide a value close to 0 for its origin - half its period" in
   {
      val time = new LocalDateTime(2020, 6, 7, 1, 2, 3)
      val ts = SinusTimeSeries(time, 1000)
      val value = ts.compute(time.plusMillis(-500)).get

      value shouldBe (0.0 +- 0.0001)
   }

   it should "provide a value close to -1 for its origin - 1/4 its period" in
   {
      val time = new LocalDateTime(2020, 6, 7, 1, 2, 3)
      val ts = SinusTimeSeries(time, 1000)
      val value = ts.compute(time.plusMillis(-250)).get

      value shouldBe (-1.0 +- 0.0001)
   }
} 
Example 75
Source File: MonthTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.dt

import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.dt.{MonthTimeSeries, YearTimeSeries}
import be.cetic.tsimulus.timeseries.primary.NowTimeSeries
import org.scalatest.{FlatSpec, Inspectors, Matchers}

class MonthTimeSeriesTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   "A Month time series" should "produce the month of the underlying timestamp" in
      {
         (new MonthTimeSeries(NowTimeSeries()).compute(fixedDate)) match
         {
            case Some(x) => x == 3
            case _ => false
         }
      }
} 
Example 76
Source File: DateTimeDifferenceTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.dt

import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.dt.DateTimeDifferenceTimeSeries
import be.cetic.tsimulus.timeseries.primary.NowTimeSeries
import org.joda.time.Duration
import org.scalatest.{FlatSpec, Inspectors, Matchers}


class DateTimeDifferenceTimeSeriesTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   "A DateTimeDifference time series" should "produce a duration of 0 if both base time series are equals" in
      {
         (new DateTimeDifferenceTimeSeries(NowTimeSeries(), NowTimeSeries()).compute(fixedDate)) match
         {
            case Some(x) => x == Duration.ZERO
            case _ => false
         }
      }
} 
Example 77
Source File: MinuteTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.dt

import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.dt.MinuteTimeSeries
import be.cetic.tsimulus.timeseries.primary.NowTimeSeries
import org.scalatest.{FlatSpec, Inspectors, Matchers}

class MinuteTimeSeriesTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   "A Minute time series" should "produce the minute of the underlying timestamp" in
      {
         (new MinuteTimeSeries(NowTimeSeries()).compute(fixedDate)) match
         {
            case Some(x) => x == 33
            case _ => false
         }
      }
} 
Example 78
Source File: MillisecondTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.dt

import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.dt.MillisecondTimeSeries
import be.cetic.tsimulus.timeseries.primary.NowTimeSeries
import org.scalatest.{FlatSpec, Inspectors, Matchers}

class MillisecondTimeSeriesTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   "A Millisecond time series" should "produce the milliseconds of the underlying timestamp" in
      {
         new MillisecondTimeSeries(NowTimeSeries()).compute(fixedDate) match
         {
            case Some(x) => x == 0
            case _ => false
         }
      }
} 
Example 79
Source File: YearTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.dt

import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.dt.YearTimeSeries
import be.cetic.tsimulus.timeseries.primary.NowTimeSeries
import org.scalatest.{FlatSpec, Inspectors, Matchers}

class YearTimeSeriesTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   "A Year time series" should "produce the year of the underlying timestamp" in
      {
         (new YearTimeSeries(NowTimeSeries()).compute(fixedDate)) match
         {
            case Some(x) => x == 2019
            case _ => false
         }
      }
} 
Example 80
Source File: WeekTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.dt

import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.dt.WeekTimeSeries
import be.cetic.tsimulus.timeseries.primary.NowTimeSeries
import org.scalatest.{FlatSpec, Inspectors, Matchers}

class WeekTimeSeriesTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   "A Week time series" should "produce the week of the underlying timestamp" in
      {
         new WeekTimeSeries(NowTimeSeries()).compute(fixedDate) match
         {
            case Some(x) => x == 13
            case _ => false
         }
      }
} 
Example 81
Source File: HourTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.dt

import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.dt.HourTimeSeries
import be.cetic.tsimulus.timeseries.primary.NowTimeSeries
import org.scalatest.{FlatSpec, Inspectors, Matchers}

class HourTimeSeriesTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   "A Hour time series" should "produce the hour of the underlying timestamp" in
      {
         (new HourTimeSeries(NowTimeSeries()).compute(fixedDate)) match
         {
            case Some(x) => x == 10
            case _ => false
         }
      }
} 
Example 82
Source File: DayOfWeekTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.dt

import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.dt.DayOfWeekTimeSeries
import be.cetic.tsimulus.timeseries.primary.NowTimeSeries
import org.scalatest.{FlatSpec, Inspectors, Matchers}

class DayOfWeekTimeSeriesTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   "A DayOfWeek time series" should "produce the day of week of the underlying timestamp" in
      {
         (new DayOfWeekTimeSeries(NowTimeSeries()).compute(fixedDate)) match
         {
            case Some(x) => x == 3
            case _ => false
         }
      }
} 
Example 83
Source File: SecondTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.dt

import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.dt.{HourTimeSeries, SecondTimeSeries}
import be.cetic.tsimulus.timeseries.primary.NowTimeSeries
import org.scalatest.{FlatSpec, Inspectors, Matchers}

class SecondTimeSeriesTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   "A Second time series" should "produce the second of the underlying timestamp" in
      {
         new SecondTimeSeries(NowTimeSeries()).compute(fixedDate) match
         {
            case Some(x) => x == 17
            case _ => false
         }
      }
} 
Example 84
Source File: DayOfYearTimeSeriesTest.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.test.timeseries.dt

import be.cetic.tsimulus.test.RTSTest
import be.cetic.tsimulus.timeseries.dt.DayOfYearTimeSeries
import be.cetic.tsimulus.timeseries.primary.NowTimeSeries
import org.scalatest.{FlatSpec, Inspectors, Matchers}

class DayOfYearTimeSeriesTest extends FlatSpec with Matchers with Inspectors with RTSTest
{
   "A DayOfYear time series" should "produce the day of week of the underlying timestamp" in
      {
         (new DayOfYearTimeSeries(NowTimeSeries()).compute(fixedDate)) match
         {
            case Some(x) => x == 3
            case _ => false
         }
      }
} 
Example 85
Source File: LedgerEntriesSpec.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.platform.sandbox.stores.ledger.inmemory

import akka.stream.ThrottleMode
import akka.stream.scaladsl.{Flow, Keep, Sink, Source}
import com.daml.ledger.participant.state.v1.Offset
import com.daml.ledger.api.testing.utils.AkkaBeforeAndAfterAll
import org.scalatest.{AsyncWordSpec, Inspectors, Matchers}

import scala.concurrent.Future
import scala.concurrent.duration._
import scala.util.Random

class LedgerEntriesSpec
    extends AsyncWordSpec
    with Matchers
    with AkkaBeforeAndAfterAll
    with Inspectors {

  case class Error(msg: String)

  case class Transaction(content: String)

  val NO_OF_MESSAGES = 10000
  val NO_OF_SUBSCRIBERS = 50

  private def genTransactions() = (1 to NO_OF_MESSAGES).map { i =>
    if (Random.nextBoolean())
      Right(Transaction(i.toString))
    else
      Left(Error(i.toString))
  }

  "LedgerEntries" should {

    "store new blocks and a late subscriber can read them" in {
      val ledger = new LedgerEntries[Either[Error, Transaction]](_.toString)
      val transactions = genTransactions()

      transactions.foreach(t => ledger.publish(t))

      val sink =
        Flow[(Offset, Either[Error, Transaction])]
          .take(NO_OF_MESSAGES.toLong)
          .toMat(Sink.seq)(Keep.right)

      val blocksF = ledger.getSource(None, None).runWith(sink)

      blocksF.map { blocks =>
        val readTransactions = blocks.collect { case (_, transaction) => transaction }
        readTransactions shouldEqual transactions
      }
    }

    "store new blocks while multiple subscribers are reading them with different pace" in {
      val transactions = genTransactions()

      val ledger = new LedgerEntries[Either[Error, Transaction]](_.toString)

      val publishRate = NO_OF_MESSAGES / 10

      val blocksInStream =
        Source(transactions)
          .throttle(publishRate, 100.milliseconds, publishRate, ThrottleMode.shaping)
          .to(Sink.foreach { t =>
            ledger.publish(t)
            ()
          })

      def subscribe() = {
        val subscribeRate = NO_OF_MESSAGES / (Random.nextInt(100) + 1)
        ledger
          .getSource(None, None)
          .runWith(
            Flow[(Offset, Either[Error, Transaction])]
              .throttle(subscribeRate, 100.milliseconds, subscribeRate, ThrottleMode.shaping)
              .take(NO_OF_MESSAGES.toLong)
              .toMat(Sink.seq)(Keep.right)
          )
      }

      val readBlocksF = Future.sequence((1 to NO_OF_SUBSCRIBERS).map(_ => subscribe()))
      blocksInStream.run()

      readBlocksF.map { readBlocksForAll =>
        forAll(readBlocksForAll) { readBlocks =>
          val readTransactions = readBlocks.collect { case (_, transaction) => transaction }
          readTransactions shouldEqual transactions
        }
      }
    }
  }
} 
Example 86
Source File: AggregationTests.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.it

import com.typesafe.scalalogging.LazyLogging
import org.scalatest.{AsyncFunSpec, Inspectors, Matchers}
import play.api.libs.json.{JsValue, _}

import scala.collection.mutable.ArrayBuffer
import scala.concurrent.duration._

class AggregationTests extends AsyncFunSpec with Matchers with Inspectors with Helpers with LazyLogging {

  describe("Agg API should") {
    val agg = scala.io.Source.fromURL(this.getClass.getResource("/agg/aggnames_293846.nq"))
    val ingestAgg = {
      Http.post(_in, agg.mkString, Some("text/nquads;charset=UTF-8"), List("format" -> "nquads"), tokenHeader)
    }.map { res =>
        withClue(res) {
          res.status should be(200)
          jsonSuccessPruner(Json.parse(res.payload)) shouldEqual jsonSuccess
        }
    }

    agg.close()


    val path = cmw / "test.agg.org" / "Test201903_05_1501_11" / "testStatsApiTerms"

    val aggForIntField = executeAfterCompletion(ingestAgg) {
      spinCheck(100.millis, true)(Http.get(
        uri = path,
        queryParams = List("op" -> "stats", "format" -> "json", "ap" -> "type:term,field::$http://qa.test.rfnt.com/v1.1/testns/num$,size:3")))
      { r =>
        (Json.parse(r.payload) \ "AggregationResponse" \\ "buckets": @unchecked) match {
          case n: collection.IndexedSeq[JsValue] => (r.status == 200) && n.forall(jsonval=> jsonval.as[JsArray].value.size == 3)
        }
      }.map { res =>
        withClue(res) {
          res.status should be(200)
          val total = (Json.parse(res.payload) \ "AggregationResponse" \\ "buckets").map(jsonval=> jsonval.as[JsArray].value.size)
          total should equal (ArrayBuffer(3))
        }
      }
    }


    val aggForExactTextField = executeAfterCompletion(ingestAgg) {
      spinCheck(100.millis, true)(Http.get(
        uri = path,
        queryParams = List("op" -> "stats", "format" -> "json", "ap" -> "type:term,field::$http://qa.test.rfnt.com/v1.1/testns/Test_Data$,size:2")))
      { r =>
        (Json.parse(r.payload) \ "AggregationResponse" \\ "buckets": @unchecked) match {
          case n: collection.IndexedSeq[JsValue] => (r.status == 200) && n.forall(jsonval=> jsonval.as[JsArray].value.size == 2)
        }
      }.map { res =>
        withClue(res) {
          res.status should be(200)
          val total = (Json.parse(res.payload) \ "AggregationResponse" \\ "buckets").map(jsonval=> jsonval.as[JsArray].value.size)
          total should equal (ArrayBuffer(2))
        }
      }
    }


    val badQueryNonExactTextMatch = executeAfterCompletion(ingestAgg) {
      spinCheck(100.millis, true)(Http.get(
        uri = path,
        queryParams = List("op" -> "stats", "format" -> "json", "ap" -> "type:term,field:$http://qa.test.rfnt.com/v1.1/testns/Test_Data$,size:2")))
      { r =>
        Json.parse(r.payload).toString()
          .contains("Stats API does not support non-exact value operator for text fields. Please use :: instead of :") && r.status == 400
      }.map { res =>
        withClue(res) {
          res.status should be(400)
          val result = (Json.parse(res.payload) \ "error").as[String]
          result should include ("Stats API does not support non-exact value operator for text fields. Please use :: instead of :")
        }
      }
    }



    it("ingest aggnames data successfully")(ingestAgg)
    it("get stats for int field")(aggForIntField)
    it("get exact stats for string field")(aggForExactTextField)
    it("get stats for non exact string field should be bad response")(badQueryNonExactTextMatch)

  }

} 
Example 87
Source File: DatastreamHandlerUnitSpec.scala    From play-auditing   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.audit.handler

import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import org.scalatest.Inspectors
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
import play.api.inject.DefaultApplicationLifecycle
import play.api.libs.json.{JsString, JsValue}
import uk.gov.hmrc.audit.HandlerResult

import scala.concurrent.duration.DurationInt
import scala.concurrent.{ExecutionContext, Future}
import ExecutionContext.Implicits.global

class DatastreamHandlerUnitSpec extends AnyWordSpecLike with Inspectors with Matchers with ScalaFutures {

  val datastreamHandler = new DatastreamHandler(
    scheme         = "http",
    host           = "localhost",
    port           = 1234,
    path           = "/some/path",
    connectTimeout = 2000.millis,
    requestTimeout = 2000.millis,
    userAgent      = "the-micro-service-name",
    materializer   = ActorMaterializer()(ActorSystem()),
    lifecycle      = new DefaultApplicationLifecycle()
    ) {
    override def sendHttpRequest(event: JsValue)(implicit ec: ExecutionContext): Future[HttpResult] =
      Future.successful(HttpResult.Response(event.as[String].toInt))
  }

  "Any Datastream response" should {
    "Return Success for any response code of 204" in {
      val result = datastreamHandler.sendEvent(JsString("204")).futureValue
      result shouldBe HandlerResult.Success
    }

    "Return Failure for any response code of 3XX or 401-412 or 414-499 or 5XX" in {
      forAll((300 to 399) ++ (401 to 412) ++ (414 to 499) ++ (500 to 599)) { code =>
        val result = datastreamHandler.sendEvent(JsString(code.toString)).futureValue
        result shouldBe HandlerResult.Failure
      }
    }

    "Return Rejected for any response code of 400 or 413" in {
      forAll(Seq(400, 413)) { code =>
        val result = datastreamHandler.sendEvent(JsString(code.toString)).futureValue
        result shouldBe HandlerResult.Rejected
      }
    }
  }
} 
Example 88
Source File: EnumTest.scala    From pureconfig   with Mozilla Public License 2.0 5 votes vote down vote up
package pureconfig.module.enum

import _root_.enum.Enum
import com.typesafe.config.ConfigFactory
import org.scalatest.Inspectors
import pureconfig.BaseSuite
import pureconfig.error.CannotConvert
import pureconfig.generic.auto._
import pureconfig.syntax._

sealed trait Greeting

object Greeting {
  case object Hello extends Greeting
  case object GoodBye extends Greeting
  case object ShoutGoodBye extends Greeting

  final implicit val EnumInstance: Enum[Greeting] = Enum.derived[Greeting]
}

class EnumTest extends BaseSuite {

  "enum config convert" should "parse an enum" in Inspectors.forAll(Greeting.EnumInstance.values) { greeting =>
    val conf = ConfigFactory.parseString(s"""{greeting:"$greeting"}""")
    case class Conf(greeting: Greeting)
    conf.to[Conf].right.value shouldEqual Conf(greeting)
  }

  it should "politely refuse an invalid member" in {
    val conf = ConfigFactory.parseString(s"""{greeting:"Psych"}""")
    case class Conf(greeting: Greeting)
    conf.to[Conf] should failWithType[CannotConvert]
  }
} 
Example 89
Source File: TarFlowSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.storage

import java.io.ByteArrayInputStream
import java.nio.file.{Files, Path, Paths}

import akka.actor.ActorSystem
import akka.stream.alpakka.file.scaladsl.Directory
import akka.stream.scaladsl.{FileIO, Source}
import akka.testkit.TestKit
import akka.util.ByteString
import ch.epfl.bluebrain.nexus.storage.utils.{EitherValues, IOEitherValues, Randomness}
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream
import org.apache.commons.io.FileUtils
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
import org.scalatest.{BeforeAndAfterAll, Inspectors, OptionValues}

import scala.annotation.tailrec

class TarFlowSpec
    extends TestKit(ActorSystem("TarFlowSpec"))
    with AnyWordSpecLike
    with Matchers
    with IOEitherValues
    with Randomness
    with EitherValues
    with OptionValues
    with Inspectors
    with BeforeAndAfterAll {

  val basePath = Files.createTempDirectory("tarflow")
  val dir1     = basePath.resolve("one")
  val dir2     = basePath.resolve("two")

  override def afterAll(): Unit = {
    super.afterAll()
    FileUtils.cleanDirectory(basePath.toFile)
    ()
  }

  type PathAndContent = (Path, String)

  "A TarFlow" should {

    Files.createDirectories(dir1)
    Files.createDirectories(dir2)

    def relativize(path: Path): String = basePath.getParent().relativize(path).toString

    "generate the byteString for a tar file correctly" in {
      val file1        = dir1.resolve("file1.txt")
      val file1Content = genString()
      val file2        = dir1.resolve("file3.txt")
      val file2Content = genString()
      val file3        = dir2.resolve("file3.txt")
      val file3Content = genString()
      val files        = List(file1 -> file1Content, file2 -> file2Content, file3 -> file3Content)
      forAll(files) {
        case (file, content) => Source.single(ByteString(content)).runWith(FileIO.toPath(file)).futureValue
      }
      val byteString   = Directory.walk(basePath).via(TarFlow.writer(basePath)).runReduce(_ ++ _).futureValue
      val bytes        = new ByteArrayInputStream(byteString.toArray)
      val tar          = new TarArchiveInputStream(bytes)

      @tailrec def readEntries(
          tar: TarArchiveInputStream,
          entries: List[PathAndContent] = Nil
      ): List[PathAndContent] = {
        val entry = tar.getNextTarEntry
        if (entry == null) entries
        else {
          val data = Array.ofDim[Byte](entry.getSize.toInt)
          tar.read(data)
          readEntries(tar, (Paths.get(entry.getName) -> ByteString(data).utf8String) :: entries)
        }
      }
      val directories = List(relativize(basePath) -> "", relativize(dir1) -> "", relativize(dir2) -> "")
      val untarred    = readEntries(tar).map { case (path, content) => path.toString -> content }
      val expected    = files.map { case (path, content) => relativize(path) -> content } ++ directories
      untarred should contain theSameElementsAs expected
    }
  }

} 
Example 90
Source File: StorageDirectivesSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.storage.routes

import java.util.regex.Pattern.quote

import akka.http.scaladsl.model.{StatusCodes, Uri}
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.testkit.ScalatestRouteTest
import ch.epfl.bluebrain.nexus.storage.JsonLdCirceSupport._
import ch.epfl.bluebrain.nexus.storage.routes.Routes.exceptionHandler
import ch.epfl.bluebrain.nexus.storage.routes.StorageDirectives._
import ch.epfl.bluebrain.nexus.storage.utils.Resources
import io.circe.Json
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class StorageDirectivesSpec
    extends AnyWordSpecLike
    with Matchers
    with ScalatestRouteTest
    with Inspectors
    with Resources {

  "the storage directives" when {

    def pathInvalidJson(path: Uri.Path): Json =
      jsonContentOf(
        "/error.json",
        Map(
          quote("{type}") -> "PathInvalid",
          quote(
            "{reason}"
          )               -> s"The provided location inside the bucket 'name' with the relative path '$path' is invalid."
        )
      )

    "dealing with file path extraction" should {
      val route = handleExceptions(exceptionHandler) {
        (extractRelativeFilePath("name") & get) { path =>
          complete(s"$path")
        }
      }

      "reject when path contains 2 slashes" in {
        Get("///") ~> route ~> check {
          status shouldEqual StatusCodes.BadRequest
          responseAs[Json] shouldEqual pathInvalidJson(Uri.Path.Empty)
        }
      }

      "reject when path does not end with a segment" in {
        Get("/some/path/") ~> route ~> check {
          status shouldEqual StatusCodes.BadRequest
          responseAs[Json] shouldEqual pathInvalidJson(Uri.Path("some/path/"))
        }
      }

      "return path" in {
        Get("/some/path/file.txt") ~> route ~> check {
          responseAs[String] shouldEqual "some/path/file.txt"
        }
      }
    }

    "dealing with path validation" should {
      def route(path: Uri.Path) =
        handleExceptions(exceptionHandler) {
          (validatePath("name", path) & get) {
            complete(s"$path")
          }
        }

      "reject when some of the segments is . or .." in {
        val paths = List(Uri.Path("/./other/file.txt"), Uri.Path("/some/../file.txt"))
        forAll(paths) { path =>
          Get(path.toString()) ~> route(path) ~> check {
            status shouldEqual StatusCodes.BadRequest
            responseAs[Json] shouldEqual pathInvalidJson(path)
          }
        }
      }

      "pass" in {
        Get("/some/path") ~> route(Uri.Path("/some/path")) ~> check {
          handled shouldEqual true
        }
      }
    }
  }
} 
Example 91
Source File: InfluxPointSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.cli.clients

import java.time.Instant
import java.util.regex.Pattern.quote

import cats.effect.IO
import cats.implicits._
import ch.epfl.bluebrain.nexus.cli.config.influx.TypeConfig
import ch.epfl.bluebrain.nexus.cli.utils.{Resources, TimeTransformation}
import fs2._
import fs2.text._
import org.http4s.EntityEncoder
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class InfluxPointSpec extends AnyWordSpecLike with Matchers with Resources with Inspectors with TimeTransformation {

  private def writeToString[A](a: A)(implicit W: EntityEncoder[IO, A]): String =
    Stream
      .emit(W.toEntity(a))
      .covary[IO]
      .flatMap(_.body)
      .through(utf8Decode)
      .foldMonoid
      .compile
      .last
      .map(_.getOrElse(""))
      .unsafeRunSync

  "An InfluxPoint" should {

    val created = Instant.now()
    val updated = created.plusSeconds(5)

    "be created from SparqlResults" in {

      val sparqlResults = jsonContentOf(
        "/templates/sparql-results-influx.json",
        Map(
          quote("{created}") -> created.toString,
          quote("{updated}") -> updated.toString,
          quote("{bytes}")   -> 1234.toString,
          quote("{project}") -> "myorg/myproject"
        )
      ).as[SparqlResults].getOrElse(throw new IllegalArgumentException)

      val typeConfig = TypeConfig("https://neuroshapes.org/Subject", "", "datastats", Set("bytes"), "updated")

      val expected = InfluxPoint(
        "datastats",
        Map("created" -> created.toString, "project" -> "myorg/myproject", "deprecated" -> "false"),
        Map("bytes"   -> "1234"),
        Some(updated)
      )

      InfluxPoint.fromSparqlResults(sparqlResults, typeConfig) shouldEqual
        List(expected)

    }

    "converted to string" in {
      val point      = InfluxPoint(
        "m1",
        Map("created" -> created.toString, "project" -> "org/proj", "deprecated" -> "false"),
        Map("bytes"   -> "1234"),
        Some(updated)
      )
      val pointNoTag = InfluxPoint(
        "m2",
        Map.empty,
        Map("bytes" -> "2345"),
        Some(updated)
      )

      val list = List(
        point      -> s"m1,created=${created.toString},project=org/proj,deprecated=false bytes=1234 ${toNano(updated)}",
        pointNoTag -> s"m2 bytes=2345 ${toNano(updated)}"
      )

      forAll(list) {
        case (point, str) => writeToString(point) shouldEqual str
      }
    }
  }

} 
Example 92
Source File: TaggingAdapterSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.io

import java.time.Instant

import akka.persistence.journal.Tagged
import ch.epfl.bluebrain.nexus.iam.acls.AclEvent.AclDeleted
import ch.epfl.bluebrain.nexus.iam.permissions.PermissionsEvent.PermissionsDeleted
import ch.epfl.bluebrain.nexus.iam.realms.RealmEvent.RealmDeprecated
import ch.epfl.bluebrain.nexus.iam.types.Identity.Anonymous
import ch.epfl.bluebrain.nexus.iam.types.Label
import ch.epfl.bluebrain.nexus.rdf.Iri.Path
import ch.epfl.bluebrain.nexus.util.EitherValues
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class TaggingAdapterSpec extends AnyWordSpecLike with Matchers with Inspectors with EitherValues {

  private val pd = PermissionsDeleted(2L, Instant.EPOCH, Anonymous)
  private val ad = AclDeleted(Path("/a/b/c").rightValue, 2L, Instant.EPOCH, Anonymous)
  private val rd = RealmDeprecated(Label.unsafe("blah"), 2L, Instant.EPOCH, Anonymous)

  private val data = Map[AnyRef, (String, AnyRef)](
    pd  -> ("permissions-event" -> Tagged(pd, Set("permissions", "event"))),
    ad  -> ("acl-event"         -> Tagged(ad, Set("acl", "event"))),
    rd  -> ("realm-event"       -> Tagged(rd, Set("realm", "event"))),
    "a" -> (""                  -> "a")
  )

  "A TaggingAdapter" should {
    val adapter = new TaggingAdapter
    "return the correct manifests" in {
      forAll(data.toList) {
        case (event, (manifest, _)) => adapter.manifest(event) shouldEqual manifest
      }
    }
    "return the correct transformed event" in {
      forAll(data.toList) {
        case (event, (_, transformed)) => adapter.toJournal(event) shouldEqual transformed
      }
    }
  }

} 
Example 93
Source File: ResourceFSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.types

import java.time.{Clock, Instant, ZoneId}

import ch.epfl.bluebrain.nexus.util.{EitherValues, Resources}
import ch.epfl.bluebrain.nexus.iam.testsyntax._
import ch.epfl.bluebrain.nexus.iam.types.Identity.User
import ch.epfl.bluebrain.nexus.rdf.implicits._
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig.HttpConfig
import ch.epfl.bluebrain.nexus.service.config.Vocabulary.nxv
import io.circe.Printer
import io.circe.syntax._
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

//noinspection TypeAnnotation
class ResourceFSpec extends AnyWordSpecLike with Matchers with Inspectors with EitherValues with Resources {

  "A ResourceMetadata" should {
    val user          = User("mysubject", "myrealm")
    val user2         = User("mysubject2", "myrealm")
    implicit val http = HttpConfig("some", 8080, "v1", "http://nexus.example.com")
    val clock: Clock  = Clock.fixed(Instant.ofEpochSecond(3600), ZoneId.systemDefault())
    val instant       = clock.instant()
    val id            = url"http://example.com/id"
    val printer       = Printer.spaces2.copy(dropNullValues = true)

    "be converted to Json correctly" when {
      "using multiple types" in {
        val json  = jsonContentOf("/resources/write-response.json")
        val model =
          ResourceMetadata(id, 1L, Set(nxv.AccessControlList.value, nxv.Realm.value), instant, user, instant, user2)
        model.asJson.sort.printWith(printer) shouldEqual json.printWith(printer)
      }
      "using a single type" in {
        val json  = jsonContentOf("/resources/write-response-singletype.json")
        val model = ResourceMetadata(id, 1L, Set(nxv.AccessControlList.value), instant, user, instant, user2)
        model.asJson.sort.printWith(printer) shouldEqual json.printWith(printer)
      }
      "using no types" in {
        val json  = jsonContentOf("/resources/write-response-notypes.json")
        val model = ResourceMetadata(id, 1L, Set(), instant, user, instant, user2)
        model.asJson.sort.printWith(printer) shouldEqual json.printWith(printer)
      }
    }
  }
} 
Example 94
Source File: GrantTypeSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.types

import ch.epfl.bluebrain.nexus.util.EitherValues
import ch.epfl.bluebrain.nexus.iam.types.GrantType._
import io.circe.{Decoder, Encoder, Json}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
import org.scalatest.Inspectors

class GrantTypeSpec extends AnyWordSpecLike with Matchers with Inspectors with EitherValues {

  "A GrantType" when {
    "using Camel encoders" should {
      import GrantType.Camel._
      val map = Map(
        AuthorizationCode -> "authorizationCode",
        Implicit          -> "implicit",
        Password          -> "password",
        ClientCredentials -> "clientCredentials",
        DeviceCode        -> "deviceCode",
        RefreshToken      -> "refreshToken"
      )
      "be encoded properly" in {
        val encoder = implicitly[Encoder[GrantType]]
        forAll(map.toList) {
          case (gt, expected) =>
            encoder(gt) shouldEqual Json.fromString(expected)
        }
      }
      "be decoded properly" in {
        val decoder = implicitly[Decoder[GrantType]]
        forAll(map.toList) {
          case (expected, gt) =>
            decoder.decodeJson(Json.fromString(gt)).rightValue shouldEqual expected
        }
      }
      "fail to decode for unknown string" in {
        val decoder = implicitly[Decoder[GrantType]]
        decoder.decodeJson(Json.fromString("incorrect")).leftValue
      }
    }
    "using Snake encoders" should {
      import GrantType.Snake._
      val map = Map(
        AuthorizationCode -> "authorization_code",
        Implicit          -> "implicit",
        Password          -> "password",
        ClientCredentials -> "client_credentials",
        DeviceCode        -> "device_code",
        RefreshToken      -> "refresh_token"
      )
      "be encoded properly" in {
        val encoder = implicitly[Encoder[GrantType]]
        forAll(map.toList) {
          case (gt, expected) =>
            encoder(gt) shouldEqual Json.fromString(expected)
        }
      }
      "be decoded properly" in {
        val decoder = implicitly[Decoder[GrantType]]
        forAll(map.toList) {
          case (expected, gtString) =>
            decoder.decodeJson(Json.fromString(gtString)).rightValue shouldEqual expected
        }
      }
      "fail to decode for unknown string" in {
        val decoder = implicitly[Decoder[GrantType]]
        decoder.decodeJson(Json.fromString("incorrect")).leftValue
      }
    }
  }

} 
Example 95
Source File: IdentitySpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.types

import ch.epfl.bluebrain.nexus.util.{EitherValues, Resources}
import ch.epfl.bluebrain.nexus.iam.types.Identity.{Anonymous, Authenticated, Group, Subject, User}
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig.HttpConfig
import io.circe.syntax._
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class IdentitySpec extends AnyWordSpecLike with Matchers with Inspectors with EitherValues with Resources {

  "An Identity" should {
    val user          = User("mysubject", "myrealm")
    val group         = Group("mygroup", "myrealm")
    val authenticated = Authenticated("myrealm")

    implicit val http: HttpConfig = HttpConfig("some", 8080, "v1", "http://nexus.example.com")

    "converted to Json" in {
      val userJson          = jsonContentOf("/identities/produce/user.json")
      val groupJson         = jsonContentOf("/identities/produce/group.json")
      val authenticatedJson = jsonContentOf("/identities/produce/authenticated.json")
      val anonymousJson     = jsonContentOf("/identities/produce/anonymous.json")

      val cases =
        List(user -> userJson, group -> groupJson, Anonymous -> anonymousJson, authenticated -> authenticatedJson)

      forAll(cases) {
        case (model: Subject, json)  =>
          model.asJson shouldEqual json
          (model: Identity).asJson shouldEqual json
        case (model: Identity, json) => model.asJson shouldEqual json
      }
    }
    "convert from Json" in {
      val userJson          = jsonContentOf("/identities/consume/user.json")
      val groupJson         = jsonContentOf("/identities/consume/group.json")
      val authenticatedJson = jsonContentOf("/identities/consume/authenticated.json")
      val anonymousJson     = jsonContentOf("/identities/consume/anonymous.json")
      val cases             =
        List(user -> userJson, group -> groupJson, Anonymous -> anonymousJson, authenticated -> authenticatedJson)
      forAll(cases) {
        case (model: Subject, json)  =>
          json.as[Subject].rightValue shouldEqual model
          json.as[Identity].rightValue shouldEqual (model: Identity)
        case (model: Identity, json) => json.as[Identity].rightValue shouldEqual model

      }
    }
  }
}