org.scalatest.wordspec.AnyWordSpec Scala Examples

The following examples show how to use org.scalatest.wordspec.AnyWordSpec. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: PulsarSinkTaskTest.scala    From stream-reactor   with Apache License 2.0 6 votes vote down vote up
package com.datamountaineer.streamreactor.connect.pulsar.sink

import java.util

import com.datamountaineer.streamreactor.connect.pulsar.config.PulsarConfigConstants
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.connect.sink.SinkTaskContext
import org.mockito.MockitoSugar
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._


class PulsarSinkTaskTest extends AnyWordSpec with Matchers with MockitoSugar {

  val pulsarTopic = "persistent://landoop/standalone/connect/kafka-topic"

  "should start a Sink" in {
    val props = Map(
      PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
      PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $pulsarTopic SELECT * FROM kafka_topic BATCH = 10 WITHPARTITIONER = SinglePartition WITHCOMPRESSION = ZLIB WITHDELAY = 1000"
    ).asJava


    val assignment: util.Set[TopicPartition] = new util.HashSet[TopicPartition]
    val partition: TopicPartition = new TopicPartition("kafka_topic", 1)
    //Set topic assignments
    assignment.add(partition)
    val context = mock[SinkTaskContext]
    when(context.assignment()).thenReturn(assignment)
    when(context.configs()).thenReturn(props)
    val task = new PulsarSinkTask()
    task.initialize(context)
    task.start(props)
  }
} 
Example 2
Source File: GrpcExceptionHandlerSpec.scala    From akka-grpc   with Apache License 2.0 5 votes vote down vote up
package akka.grpc.scaladsl

import akka.actor.ActorSystem
import akka.grpc.GrpcServiceException
import akka.grpc.internal.{ GrpcProtocolNative, GrpcResponseHelpers, Identity }
import akka.grpc.scaladsl.GrpcExceptionHandler.defaultMapper
import akka.http.scaladsl.model.HttpEntity._
import akka.http.scaladsl.model.HttpResponse
import akka.stream.ActorMaterializer
import io.grpc.Status
import org.scalatest._
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.matchers.should.Matchers
import org.scalatest.time.{ Millis, Seconds, Span }
import org.scalatest.wordspec.AnyWordSpec

import scala.concurrent.{ ExecutionException, Future }

class GrpcExceptionHandlerSpec extends AnyWordSpec with Matchers with ScalaFutures with BeforeAndAfterAll {
  implicit val system = ActorSystem("Test")
  implicit val materializer = ActorMaterializer()
  implicit override val patienceConfig =
    PatienceConfig(timeout = scaled(Span(2, Seconds)), interval = scaled(Span(5, Millis)))
  implicit val writer = GrpcProtocolNative.newWriter(Identity)

  val expected: Function[Throwable, Status] = {
    case e: ExecutionException =>
      if (e.getCause == null) Status.INTERNAL
      else expected(e.getCause)
    case grpcException: GrpcServiceException => grpcException.status
    case _: NotImplementedError              => Status.UNIMPLEMENTED
    case _: UnsupportedOperationException    => Status.UNIMPLEMENTED
    case _                                   => Status.INTERNAL
  }

  val otherTypes: Seq[Throwable] = Seq(
    new GrpcServiceException(status = Status.DEADLINE_EXCEEDED),
    new NotImplementedError,
    new UnsupportedOperationException,
    new NullPointerException,
    new RuntimeException)

  val executionExceptions: Seq[Throwable] =
    otherTypes.map(new ExecutionException(_)) :+ new ExecutionException("doh", null)

  "defaultMapper" should {
    (otherTypes ++ executionExceptions).foreach { e =>
      val exp = expected(e)
      s"Map $e to $exp" in {
        defaultMapper(system)(e).status shouldBe exp
      }
    }
  }

  "default(defaultMapper)" should {
    (otherTypes ++ executionExceptions).foreach { e =>
      s"Correctly map $e" in {
        val exp = GrpcResponseHelpers.status(defaultMapper(system)(e))
        val expChunks = getChunks(exp)
        val act = GrpcExceptionHandler.from(defaultMapper(system))(system, writer)(e).futureValue
        val actChunks = getChunks(act)
        // Following is because aren't equal
        act.status shouldBe exp.status
        actChunks.toString shouldEqual expChunks.toString
      }
    }
  }

  def getChunks(resp: HttpResponse): Seq[ChunkStreamPart] =
    (resp.entity match {
      case Chunked(_, chunks) =>
        chunks.runFold(Seq.empty[ChunkStreamPart]) { case (seq, chunk) => seq :+ chunk }
      case _ => Future.successful(Seq.empty[ChunkStreamPart])
    }).futureValue

  override def afterAll(): Unit = {
    super.afterAll()
    system.terminate()
  }
} 
Example 3
Source File: MetadataBuilderSpec.scala    From akka-grpc   with Apache License 2.0 5 votes vote down vote up
package akka.grpc.scaladsl

import akka.util.ByteString
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class MetadataBuilderSpec extends AnyWordSpec with Matchers {
  import akka.grpc.internal.MetadataImplSpec._

  "MetadataBuilder" should {
    "return empty metadata" in {
      MetadataBuilder.empty.asList shouldBe empty
      MetadataBuilder.empty.asMap shouldBe empty
    }
    "handle distinct text entries" in {
      val b = new MetadataBuilder
      TEXT_ENTRIES.foreach {
        case (k, v) => b.addText(k, v)
      }
      val m = b.build()

      TEXT_ENTRIES.foreach {
        case (k, v) => m.getText(k) shouldBe Some(v)
      }
    }

    "handle repeated text entries" in {
      val b = new MetadataBuilder
      DUPE_TEXT_VALUES.foreach { v => b.addText(DUPE_TEXT_KEY, v) }
      val m = b.build()

      m.getText(DUPE_TEXT_KEY) shouldBe Some(DUPE_TEXT_VALUES.last)

      val dupeEntries = DUPE_TEXT_VALUES.map(StringEntry)
      m.asMap(DUPE_TEXT_KEY) shouldBe dupeEntries
      m.asList.collect {
        case (k, e) if k == DUPE_TEXT_KEY => e
      } shouldBe dupeEntries
    }

    "throw exception for '-bin' suffix on text key" in {
      an[IllegalArgumentException] should be thrownBy (new MetadataBuilder).addText("foo-bin", "x")
    }

    "throw exception for missing '-bin' suffix on binary key" in {
      an[IllegalArgumentException] should be thrownBy (new MetadataBuilder).addBinary("foo", ByteString.empty)
    }

    "handle distinct binary entries" in {
      val b = new MetadataBuilder
      BINARY_ENTRIES.foreach {
        case (k, v) => b.addBinary(k, v)
      }
      val m = b.build()

      BINARY_ENTRIES.foreach {
        case (k, v) => m.getBinary(k) shouldBe Some(v)
      }
    }

    "handle repeated binary entries" in {
      val b = new MetadataBuilder
      DUPE_BINARY_VALUES.foreach { v => b.addBinary(DUPE_BINARY_KEY, v) }
      val m = b.build()

      m.getBinary(DUPE_BINARY_KEY) shouldBe Some(DUPE_BINARY_VALUES.last)

      val dupeEntries = DUPE_BINARY_VALUES.map(BytesEntry)
      m.asMap(DUPE_BINARY_KEY) shouldBe dupeEntries
      m.asList.collect {
        case (k, e) if k == DUPE_BINARY_KEY => e
      } shouldBe dupeEntries
    }
  }

} 
Example 4
Source File: NettyClientUtilsSpec.scala    From akka-grpc   with Apache License 2.0 5 votes vote down vote up
package akka.grpc.internal

import com.typesafe.config.ConfigFactory
import akka.actor.ActorSystem
import org.scalatest._
import org.scalatest.concurrent._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class NettyClientUtilsSpec extends AnyWordSpec with Matchers with ScalaFutures with BeforeAndAfterAll {
  implicit val system = ActorSystem(
    "test",
    ConfigFactory
      .parseString("""
      akka.discovery.method = alwaystimingout

      akka.discovery.alwaystimingout.class = akka.grpc.internal.AlwaysTimingOutDiscovery
      """)
      .withFallback(ConfigFactory.load()))

  "The Netty client-utilities" should {
//    The channel can now retry service discovery as needed itself,
//    I guess we should test that instead?
//    "fail to create a channel when service discovery times out" in {
//      val settings = GrpcClientSettings.usingServiceDiscovery("testService")
//
//      val channel = NettyClientUtils.createChannel(settings)
//    }
  }

  override def afterAll(): Unit = {
    super.afterAll()
    system.terminate()
  }
} 
Example 5
Source File: ServerReflectionImplSpec.scala    From akka-grpc   with Apache License 2.0 5 votes vote down vote up
package akka.grpc.internal

import grpc.reflection.v1alpha.reflection.ServerReflection

import org.scalatest.concurrent.ScalaFutures
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ServerReflectionImplSpec extends AnyWordSpec with Matchers with ScalaFutures {
  import ServerReflectionImpl._
  "The Server Reflection implementation utilities" should {
    "split strings up until the next dot" in {
      splitNext("foo.bar") should be(("foo", "bar"))
      splitNext("foo.bar.baz") should be(("foo", "bar.baz"))
    }
    "find a symbol" in {
      containsSymbol("grpc.reflection.v1alpha.ServerReflection", ServerReflection.descriptor) should be(true)
      containsSymbol("grpc.reflection.v1alpha.Foo", ServerReflection.descriptor) should be(false)
      containsSymbol("foo.Foo", ServerReflection.descriptor) should be(false)
    }
  }
} 
Example 6
Source File: ClientStateSpec.scala    From akka-grpc   with Apache License 2.0 5 votes vote down vote up
package akka.grpc.internal

import scala.concurrent.duration._
import scala.concurrent.Promise

import io.grpc.ConnectivityState._

import akka.Done
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer

import org.scalatest.concurrent.{ Eventually, ScalaFutures }
import org.scalatest.BeforeAndAfterAll
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ClientStateSpec extends AnyWordSpec with Matchers with ScalaFutures with Eventually with BeforeAndAfterAll {
  implicit val sys = ActorSystem()
  implicit val mat = ActorMaterializer()
  implicit val ec = sys.dispatcher
  implicit val patience = PatienceConfig(timeout = 10.seconds, interval = 150.milliseconds)

  private def clientState(channelCompletion: Promise[Done] = Promise[Done]()) = {
    val channel =
      new InternalChannel(new ChannelUtilsSpec.FakeChannel(Stream(IDLE, CONNECTING, READY)), channelCompletion.future)
    new ClientState(channel)
  }

  "Client State" should {
    "successfully provide a channel" in {
      // given a state
      val state = clientState()
      // it provides a channel when needed
      state.internalChannel should not be null
    }
    "reuse a valid channel" in {
      // given a state
      val state = clientState()
      // it provides a channel when needed
      val c1 = state.internalChannel.managedChannel
      val c2 = state.internalChannel.managedChannel
      c1 should be(c2)
    }
  }

  override def afterAll(): Unit = {
    super.afterAll()
    sys.terminate()
  }
} 
Example 7
Source File: GrpcMarshallingSpec.scala    From akka-grpc   with Apache License 2.0 5 votes vote down vote up
package akka.grpc.scaladsl

import akka.actor.ActorSystem
import akka.grpc.internal.{ AbstractGrpcProtocol, GrpcProtocolNative, Gzip }
import akka.grpc.scaladsl.headers.`Message-Encoding`
import akka.http.scaladsl.model.{ HttpEntity, HttpRequest }
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.Sink
import io.grpc.{ Status, StatusException }
import io.grpc.testing.integration.messages.{ BoolValue, SimpleRequest }
import io.grpc.testing.integration.test.TestService
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.immutable
import scala.concurrent.{ Await, Future }
import scala.concurrent.duration._

class GrpcMarshallingSpec extends AnyWordSpec with Matchers {
  "The scaladsl GrpcMarshalling" should {
    val message = SimpleRequest(responseCompressed = Some(BoolValue(true)))
    implicit val serializer = TestService.Serializers.SimpleRequestSerializer
    implicit val system = ActorSystem()
    implicit val mat = ActorMaterializer()
    val awaitTimeout = 10.seconds
    val zippedBytes =
      AbstractGrpcProtocol.encodeFrameData(
        AbstractGrpcProtocol.fieldType(Gzip),
        Gzip.compress(serializer.serialize(message)))

    "correctly unmarshal a zipped object" in {
      val request = HttpRequest(
        headers = immutable.Seq(`Message-Encoding`("gzip")),
        entity = HttpEntity.Strict(GrpcProtocolNative.contentType, zippedBytes))

      val marshalled = Await.result(GrpcMarshalling.unmarshal(request), 10.seconds)
      marshalled.responseCompressed should be(Some(BoolValue(true)))
    }

    "correctly unmarshal a zipped stream" in {
      val request = HttpRequest(
        headers = immutable.Seq(`Message-Encoding`("gzip")),
        entity = HttpEntity.Strict(GrpcProtocolNative.contentType, zippedBytes ++ zippedBytes))

      val stream = Await.result(GrpcMarshalling.unmarshalStream(request), 10.seconds)
      val items = Await.result(stream.runWith(Sink.seq), 10.seconds)
      items(0).responseCompressed should be(Some(BoolValue(true)))
      items(1).responseCompressed should be(Some(BoolValue(true)))
    }

    // https://github.com/grpc/grpc/blob/master/doc/compression.md#compression-method-asymmetry-between-peers
    // test case 6
    "fail with INTERNAL when the compressed bit is on but the encoding is identity" in {
      val request = HttpRequest(
        headers = immutable.Seq(`Message-Encoding`("identity")),
        entity = HttpEntity.Strict(GrpcProtocolNative.contentType, zippedBytes))

      assertFailure(GrpcMarshalling.unmarshal(request), Status.Code.INTERNAL, "encoding")
    }

    // https://github.com/grpc/grpc/blob/master/doc/compression.md#compression-method-asymmetry-between-peers
    // test case 6
    "fail with INTERNAL when the compressed bit is on but the encoding is missing" in {
      val request = HttpRequest(entity = HttpEntity.Strict(GrpcProtocolNative.contentType, zippedBytes))

      assertFailure(GrpcMarshalling.unmarshal(request), Status.Code.INTERNAL, "encoding")
    }

    def assertFailure(failure: Future[_], expectedStatusCode: Status.Code, expectedMessageFragment: String): Unit = {
      val e = Await.result(failure.failed, awaitTimeout).asInstanceOf[StatusException]
      e.getStatus.getCode should be(expectedStatusCode)
      e.getStatus.getDescription should include(expectedMessageFragment)
    }
  }
} 
Example 8
Source File: ServiceSpec.scala    From akka-grpc   with Apache License 2.0 5 votes vote down vote up
package akka.grpc.gen.javadsl

import org.scalatest.wordspec.AnyWordSpec
import org.scalatest.matchers.should.Matchers

class ServiceSpec extends AnyWordSpec with Matchers {
  "The Service model" should {
    "correctly camelcase strings" in {
      Service.toCamelCase("foo_bar") should be("FooBar")
      Service.toCamelCase("grpc-example") should be("GrpcExample")
      Service.toCamelCase("grpc02example") should be("Grpc02Example")
    }
    "correctly determine basenames" in {
      Service.basename("helloworld.proto") should be("helloworld")
      Service.basename("grpc/testing/metrics.proto") should be("metrics")
    }
  }
} 
Example 9
Source File: JacksonTest.scala    From pulsar4s   with Apache License 2.0 5 votes vote down vote up
package com.sksamuel.pulsar4s.jacksontest

import org.apache.pulsar.client.api.Schema
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

case class Place(id: Int, name: String)
case class Cafe(name: String, place: Place)

class JacksonTest extends AnyWordSpec with Matchers {

  import com.sksamuel.pulsar4s.jackson._

  "A jackson schema instance" should {
    "create bytes from a class" in {
      val cafe = Cafe("le table", Place(1, "Paris"))
      val bytes = implicitly[Schema[Cafe]].encode(cafe)
      bytes shouldBe """{"name":"le table","place":{"id":1,"name":"Paris"}}""".getBytes("UTF-8")
    }
    "read a class from bytes" in {
      val bytes = """{"name":"le table","place":{"id":1,"name":"Paris"}}""".getBytes("UTF-8")
      implicitly[Schema[Cafe]].decode(bytes) shouldBe Cafe("le table", Place(1, "Paris"))
    }
  }
} 
Example 10
Source File: CirceMarshallerTest.scala    From pulsar4s   with Apache License 2.0 5 votes vote down vote up
package com.sksamuel.pulsar4s.circe

import org.apache.pulsar.client.api.Schema
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class CirceMarshallerTest extends AnyWordSpec with Matchers {

  import io.circe.generic.auto._

  "A circe schema instance" should {
    "create bytes from a class" in {
      val cafe = Cafe("le table", Place(1, "Paris"))
      val bytes = implicitly[Schema[Cafe]].encode(cafe)
      bytes shouldBe """{"name":"le table","place":{"id":1,"name":"Paris"}}""".getBytes("UTF-8")
    }
    "read a class from bytes" in {
      val bytes = """{"name":"le table","place":{"id":1,"name":"Paris"}}""".getBytes("UTF-8")
      implicitly[Schema[Cafe]].decode(bytes) shouldBe Cafe("le table", Place(1, "Paris"))
    }
  }
} 
Example 11
Source File: CodecDerivationTest.scala    From pulsar4s   with Apache License 2.0 5 votes vote down vote up
package com.sksamuel.pulsar4s.circe

import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

case class Place(id: Int, name: String)
case class Cafe(name: String, place: Place)

class CodecDerivationTest extends AnyWordSpec with Matchers {

  "A derived Schema instance" should {

    "be implicitly found if circe.generic.auto is in imported" in {
      """
        import io.circe.generic.auto._
        implicitly[org.apache.pulsar.client.api.Schema[Cafe]]
      """ should compile
    }

    "not compile if no decoder is in scope" in {
      """
        implicitly[org.apache.pulsar.client.api.Schema[Cafe]]
      """ shouldNot compile
    }
  }

} 
Example 12
Source File: CirisDecoderSpec.scala    From fs2-aws   with MIT License 5 votes vote down vote up
package fs2.aws.ciris;

import java.util.Date

import cats.effect.{ ContextShift, IO }
import ciris.{ ConfigException, ConfigValue }
import org.scalatest.Assertion
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import software.amazon.kinesis.common.InitialPositionInStream

import scala.concurrent.ExecutionContext.Implicits.global;

class CirisDecoderSpec extends AnyWordSpec with Matchers {
  implicit val cs: ContextShift[IO] = IO.contextShift(global)

  "InitialPositionDecoderSpec" should {

    "when decoding Either[InitialPositionInStream, Date]" can {

      // same package, so `import fs2.aws.ciris._` not necessary here
      def decode(testStr: String): Either[InitialPositionInStream, Date] =
        ConfigValue
          .default(testStr)
          .as[Either[InitialPositionInStream, Date]]
          .load[IO]
          .unsafeRunSync()

      def expectDecodeFailure(testString: String): Assertion =
        intercept[ConfigException] {
          decode(testString)
        }.getMessage should include(
          s"Unable to convert value $testString to InitialPositionInStream"
        )

      "decode supported strings as initial offsets" in {

        decode("LATEST")           should equal(Left(InitialPositionInStream.LATEST))
        decode("TRIM_HORIZON")     should equal(Left(InitialPositionInStream.TRIM_HORIZON))
        decode("TS_1592404273000") should equal(Right(new Date(1592404273000L)))

      }

      "fail to decode valid strings" in {

        expectDecodeFailure("FOOBAR")
        expectDecodeFailure("TS_FOO")
        expectDecodeFailure("TS_")
        expectDecodeFailure("_1592404273000")

      }
    }

  }

} 
Example 13
Source File: DynamoEventParserSpec.scala    From fs2-aws   with MIT License 5 votes vote down vote up
package fs2.aws.dynamodb.parsers

import java.util

import cats.effect.IO
import com.amazonaws.services.dynamodbv2.model.{
  AttributeValue,
  OperationType,
  Record,
  StreamRecord,
  StreamViewType
}
import com.amazonaws.services.dynamodbv2.streamsadapter.model.RecordAdapter
import io.circe.Json
import org.scalatest.wordspec.AnyWordSpec
import io.github.howardjohn.scanamo.CirceDynamoFormat._
import org.scalatest.matchers.should.Matchers

class DynamoEventParserSpec extends AnyWordSpec with Matchers {
  "Dynamo Event Parser" should {
    "parse insert event type" in {
      val sr = new StreamRecord()
      sr.setStreamViewType(StreamViewType.NEW_IMAGE)
      val newImage = new util.HashMap[String, AttributeValue]()
      newImage.put("name", new AttributeValue().withS("Barry"))
      sr.setNewImage(newImage)
      val r = new Record()
      r.setEventName(OperationType.INSERT)
      r.withDynamodb(sr)
      parseDynamoEvent[IO, Json](new RecordAdapter(r)).unsafeRunSync() should be(
        Insert(Json.obj("name" -> Json.fromString("Barry")))
      )
    }

    "parse modify event type" in {
      val sr = new StreamRecord()
      sr.setStreamViewType(StreamViewType.NEW_AND_OLD_IMAGES)
      val oldImage = new util.HashMap[String, AttributeValue]()
      oldImage.put("name", new AttributeValue().withS("Dmytro"))
      sr.setOldImage(oldImage)
      val newImage = new util.HashMap[String, AttributeValue]()
      newImage.put("name", new AttributeValue().withS("Barry"))
      sr.setNewImage(newImage)
      val r = new Record()
      r.setEventName(OperationType.MODIFY)
      r.withDynamodb(sr)
      parseDynamoEvent[IO, Json](new RecordAdapter(r)).unsafeRunSync() should be(
        Update(
          Json.obj("name" -> Json.fromString("Dmytro")),
          Json.obj("name" -> Json.fromString("Barry"))
        )
      )
    }

    "parse delete event type" in {
      val sr = new StreamRecord()
      sr.setStreamViewType(StreamViewType.NEW_AND_OLD_IMAGES)
      val oldImage = new util.HashMap[String, AttributeValue]()
      oldImage.put("name", new AttributeValue().withS("Dmytro"))
      sr.setOldImage(oldImage)
      val r = new Record()
      r.setEventName(OperationType.REMOVE)
      r.withDynamodb(sr)
      parseDynamoEvent[IO, Json](new RecordAdapter(r)).unsafeRunSync() should be(
        Delete(
          Json.obj("name" -> Json.fromString("Dmytro"))
        )
      )
    }
    "parse modify event type with NewImage view only as Insert" in {
      val sr = new StreamRecord()
      sr.setStreamViewType(StreamViewType.NEW_IMAGE)
      val newImage = new util.HashMap[String, AttributeValue]()
      newImage.put("name", new AttributeValue().withS("Barry"))
      sr.setNewImage(newImage)
      val r = new Record()
      r.setEventName(OperationType.MODIFY)
      r.withDynamodb(sr)
      parseDynamoEvent[IO, Json](new RecordAdapter(r)).unsafeRunSync() should be(
        Insert(Json.obj("name" -> Json.fromString("Barry")))
      )
    }

    "do not support NEW_IMAGE view type with REMOVE operation type" in {
      val sr = new StreamRecord()
      sr.setStreamViewType(StreamViewType.NEW_IMAGE)
      val oldImage = new util.HashMap[String, AttributeValue]()
      oldImage.put("name", new AttributeValue().withS("Barry"))
      sr.setNewImage(oldImage)
      val r = new Record()
      r.setEventName(OperationType.REMOVE)
      r.withDynamodb(sr)
      parseDynamoEvent[IO, Json](new RecordAdapter(r)).unsafeRunSync() should be(
        Unsupported("NEW_IMAGE is not supported with REMOVE")
      )
    }
  }
} 
Example 14
Source File: HttpOriginMatcherSpec.scala    From akka-http-cors   with Apache License 2.0 5 votes vote down vote up
package ch.megard.akka.http.cors.scaladsl.model

import akka.http.scaladsl.model.headers.HttpOrigin
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class HttpOriginMatcherSpec extends AnyWordSpec with Matchers with Inspectors {
  "The `*` matcher" should {
    "match any Origin" in {
      val origins = Seq(
        "http://localhost",
        "http://192.168.1.1",
        "http://test.com",
        "http://test.com:8080",
        "https://test.com",
        "https://test.com:4433"
      ).map(HttpOrigin.apply)

      forAll(origins) { o => HttpOriginMatcher.*.matches(o) shouldBe true }
    }

    "be printed as `*`" in {
      HttpOriginMatcher.*.toString shouldBe "*"
    }
  }

  "The strict() method" should {
    "build a strict matcher, comparing exactly the origins" in {
      val positives = Seq(
        "http://localhost",
        "http://test.com",
        "https://test.ch:12345",
        "https://*.test.uk.co"
      ).map(HttpOrigin.apply)

      val negatives = Seq(
        "http://localhost:80",
        "https://localhost",
        "http://test.com:8080",
        "https://test.ch",
        "https://abc.test.uk.co"
      ).map(HttpOrigin.apply)

      val matcher = HttpOriginMatcher.strict(positives: _*)

      forAll(positives) { o => matcher.matches(o) shouldBe true }

      forAll(negatives) { o => matcher.matches(o) shouldBe false }
    }

    "build a matcher with a toString() method that is a valid range" in {
      val matcher = HttpOriginMatcher(Seq("http://test.com", "https://test.ch:12345").map(HttpOrigin.apply): _*)
      matcher.toString shouldBe "http://test.com https://test.ch:12345"
    }
  }

  "The apply() method" should {
    "build a matcher accepting sub-domains with wildcards" in {
      val matcher = HttpOriginMatcher(
        Seq(
          "http://test.com",
          "https://test.ch:12345",
          "https://*.test.uk.co",
          "http://*.abc.com:8080",
          "http://*abc.com",        // Must start with `*.`
          "http://abc.*.middle.com" // The wildcard can't be in the middle
        ).map(HttpOrigin.apply): _*
      )

      val positives = Seq(
        "http://test.com",
        "https://test.ch:12345",
        "https://sub.test.uk.co",
        "https://sub1.sub2.test.uk.co",
        "http://sub.abc.com:8080"
      ).map(HttpOrigin.apply)

      val negatives = Seq(
        "http://test.com:8080",
        "http://sub.test.uk.co", // must compare the scheme
        "http://sub.abc.com",    // must compare the port
        "http://abc.test.com",   // no wildcard
        "http://sub.abc.com",
        "http://subabc.com",
        "http://abc.sub.middle.com",
        "http://abc.middle.com"
      ).map(HttpOrigin.apply)

      forAll(positives) { o => matcher.matches(o) shouldBe true }

      forAll(negatives) { o => matcher.matches(o) shouldBe false }
    }

    "build a matcher with a toString() method that is a valid range" in {
      val matcher = HttpOriginMatcher(Seq("http://test.com", "https://*.test.ch:12345").map(HttpOrigin.apply): _*)
      matcher.toString shouldBe "http://test.com https://*.test.ch:12345"
    }
  }
} 
Example 15
Source File: InformationSpec.scala    From perfolation   with MIT License 5 votes vote down vote up
package spec

import org.scalatest.matchers.should.Matchers
import perfolation.unit._
import org.scalatest.wordspec.AnyWordSpec

class InformationSpec extends AnyWordSpec with Matchers {
  "Information" when {
    "using binary" should {
      "validate bytes" in {
        Information.useBinary()
        5.b.bytes should be(5L)
      }
      "validate kilobytes" in {
        5.kb.bytes should be(5120L)
      }
      "validate megabytes" in {
        5.mb.bytes should be(5242880L)
      }
      "validate gigabytes" in {
        5.gb.bytes should be(5368709120L)
      }
      "validate terabytes" in {
        5.tb.bytes should be(5497558138880L)
      }
      "validate petabytes" in {
        5.pb.bytes should be(5629499534213120L)
      }
      "validate exabytes" in {
        5.eb.bytes should be(5764607523034234880L)
      }
      "validate zettabytes" in {
        5.zb.bytes should be(BigInt("5902958103587056517120"))
      }
      "validate yottabytes" in {
        5.yb.bytes should be(BigInt("6044629098073145873530880"))
      }
      "format kilobytes" in {
        5.kb.toString should be("5.00 KiB")
      }
    }
    "using decimal" should {
      "validate bytes" in {
        Information.useDecimal()
        5.b.bytes should be(5L)
      }
      "validate kilobytes" in {
        5.kb.bytes should be(5000L)
      }
      "validate megabytes" in {
        5.mb.bytes should be(5000000L)
      }
      "validate gigabytes" in {
        5.gb.bytes should be(5000000000L)
      }
      "validate terabytes" in {
        5.tb.bytes should be(5000000000000L)
      }
      "validate petabytes" in {
        5.pb.bytes should be(5000000000000000L)
      }
      "validate exabytes" in {
        5.eb.bytes should be(5000000000000000000L)
      }
      "validate zettabytes" in {
        5.zb.bytes should be(BigInt("5000000000000000000000"))
      }
      "validate yottabytes" in {
        5.yb.bytes should be(BigInt("5000000000000000000000000"))
      }
      "format kilobytes" in {
        5.kb.toString should be("5.00 kb")
      }
    }
  }
} 
Example 16
Source File: DateFormatSpec.scala    From perfolation   with MIT License 5 votes vote down vote up
package tests

import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class DateFormatSpec extends AnyWordSpec with Matchers {
  "Date Formatting" should {
    import perfolation._

    val date1: Long = 1524606965775L
    val hour = (21 - date1.t.timeZoneOffsetHH) % 24

    "retrieve millisecond info" in {
      date1.t.milliseconds should be(date1)
      date1.t.milliOfSecond should be(775)
      date1.t.Q should be(date1.toString)
      date1.t.L should be("775")
    }
    "retrieve seconds info" in {
      date1.t.secondOfMinute should be(5)
      date1.t.secondsOfEpoch should be(date1 / 1000L)
      date1.t.S should be("05")
      date1.t.s.toInt should be(date1 / 1000L)
    }
    "retrieve minutes info" in {
      date1.t.minuteOfHour should be(56)
      date1.t.M should be("56")
    }
    "retrieve hours info" in {
      date1.t.hour24 should be(hour)
      date1.t.hour12 should be(hour % 12)
      date1.t.isAM should be(false)
      date1.t.isPM should be(true)
      date1.t.H should be(hour.toString)
      date1.t.I should be((hour % 12).f(2))
      date1.t.k should be(hour.toString)
      date1.t.l should be((hour % 12).toString)
      date1.t.p should be("pm")
      date1.t.P should be("PM")
    }
    "retrieve days info" in {
      date1.t.dayOfWeek should be(3)
      date1.t.dayOfMonth should be(24)
      date1.t.dayOfYear should be(113)
      date1.t.A should be("Tuesday")
      date1.t.a should be("Tues")
      date1.t.j should be("114")
      date1.t.d should be("24")
      date1.t.e should be("24")
    }
    "retrieve week info" in {
    }
    "retrieve month info" in {
      date1.t.month should be(3)
      date1.t.B should be("April")
      date1.t.b should be("Apr")
      date1.t.h should be("Apr")
      date1.t.m should be("04")
    }
    "retrieve years info" in {
      date1.t.year should be(2018)
      date1.t.C should be("20")
      date1.t.Y should be("2018")
      date1.t.y should be("18")
    }
    
  }
} 
Example 17
Source File: NumberFormatSpec.scala    From perfolation   with MIT License 5 votes vote down vote up
package tests

import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import perfolation.numeric.{FastNumber, Grouping, RoundingMode}

class NumberFormatSpec extends AnyWordSpec with Matchers {
  "Number Formatting" should {
    import perfolation._

    "use a FastNumber properly" in {
      val fn = new FastNumber
      fn.set(12.345)
      fn.toString should be("12.345")
      fn.setMinimumIntegerDigits(3)
      fn.toString should be("012.345")
      fn.set(1234.567)
      fn.setMaximumIntegerDigits(2)
      fn.toString should be("34.567")
      fn.setMinimumFractionDigits(5)
      fn.toString should be("34.56700")
      fn.setMaximumFractionDigits(2, RoundingMode.HalfUp)
      fn.toString should be("34.57")
      fn.set(123456789.0)
      fn.setMaximumFractionDigits(0, RoundingMode.HalfUp)
      fn.toString should be("123456789")
      fn.group(Grouping.US)
      fn.toString should be("123,456,789")
    }
    "format an integer to two digits" in {
      4.f(2) should be("04")
      40.f(2) should be("40")
      400.f(2) should be("400")
    }
    "format an integer to two fraction digits" in {
      4.f(f = 2) should be("4.00")
      40.f(f = 2) should be("40.00")
      400.f(f = 2) should be("400.00")
    }
    "format a double to two digits" in {
      4.0.f(2) should be("04.00")
      40.0.f(2) should be("40.00")
      400.0.f(2) should be("400.00")
      4.1.f(2) should be("04.10")
      40.1.f(2) should be("40.10")
      400.1.f(2) should be("400.10")
      4.12.f(2) should be("04.12")
      40.12.f(2) should be("40.12")
      400.12.f(2) should be("400.12")
      4.123.f(2) should be("04.12")
      40.123.f(2) should be("40.12")
      400.123.f(2) should be("400.12")
      4.126.f(2) should be("04.13")
    }
    "format a BigDecimal to 0 digits" in {
      BigDecimal(6481415348.78).f(f = 0) should be("6481415349")
      BigDecimal(9999999999.99).f(f = 0) should be("10000000000")
    }
    "format a negative number properly" in {
      (-100.0).f(2) should be("-100.00")
      (-0.5).f(2) should be("-0.50")
      (-0.5).f(2, g = Grouping.US) should be("-0.50")
      (-444).f(g = Grouping.US) should be("-444")
    }
  }
} 
Example 18
Source File: StarWarsMutationSpec.scala    From sangria-relay   with Apache License 2.0 5 votes vote down vote up
package sangria.relay.starWars

import sangria.execution.Executor
import sangria.parser.QueryParser
import sangria.relay.starWars.StarWarsData.ShipRepo
import sangria.relay.util.AwaitSupport
import sangria.marshalling.InputUnmarshaller.mapVars

import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.Success
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class StarWarsMutationSpec extends AnyWordSpec with Matchers with AwaitSupport {
  "Mutation" should {
    "Correctly mutates the data set" in {
      val Success(doc) = QueryParser.parse(
        """
          mutation AddBWingQuery($input: IntroduceShipInput!) {
            introduceShip(input: $input) {
              ship {
                id
                name
              }
              faction {
                name
              }
              clientMutationId
            }
          }
        """)

      val vars = mapVars(
        "input" -> Map(
          "shipName" -> "B-Wing",
          "factionId" -> "RmFjdGlvbjox",
          "clientMutationId" -> "abcde"
        )
      )
      
      Executor.execute(StarWarsSchema.schema, doc, variables = vars, userContext = new ShipRepo).await should be(
        Map(
          "data" -> Map(
            "introduceShip" -> Map(
              "ship" -> Map(
                "id" -> "U2hpcDo5",
                "name" -> "B-Wing"
              ),
              "faction" -> Map(
                "name" -> "Alliance to Restore the Republic"
              ),
              "clientMutationId" -> "abcde"
            ))))
    }
  }
} 
Example 19
Source File: Base64Spec.scala    From sangria-relay   with Apache License 2.0 5 votes vote down vote up
package sangria.relay.util

import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class Base64Spec extends AnyWordSpec with Matchers {
  val TestText = "Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum."
  val TestBase64 = "TG9yZW0gSXBzdW0gaXMgc2ltcGx5IGR1bW15IHRleHQgb2YgdGhlIHByaW50aW5nIGFuZCB0eXBlc2V0dGluZyBpbmR1c3RyeS4gTG9yZW0gSXBzdW0gaGFzIGJlZW4gdGhlIGluZHVzdHJ5J3Mgc3RhbmRhcmQgZHVtbXkgdGV4dCBldmVyIHNpbmNlIHRoZSAxNTAwcywgd2hlbiBhbiB1bmtub3duIHByaW50ZXIgdG9vayBhIGdhbGxleSBvZiB0eXBlIGFuZCBzY3JhbWJsZWQgaXQgdG8gbWFrZSBhIHR5cGUgc3BlY2ltZW4gYm9vay4gSXQgaGFzIHN1cnZpdmVkIG5vdCBvbmx5IGZpdmUgY2VudHVyaWVzLCBidXQgYWxzbyB0aGUgbGVhcCBpbnRvIGVsZWN0cm9uaWMgdHlwZXNldHRpbmcsIHJlbWFpbmluZyBlc3NlbnRpYWxseSB1bmNoYW5nZWQuIEl0IHdhcyBwb3B1bGFyaXNlZCBpbiB0aGUgMTk2MHMgd2l0aCB0aGUgcmVsZWFzZSBvZiBMZXRyYXNldCBzaGVldHMgY29udGFpbmluZyBMb3JlbSBJcHN1bSBwYXNzYWdlcywgYW5kIG1vcmUgcmVjZW50bHkgd2l0aCBkZXNrdG9wIHB1Ymxpc2hpbmcgc29mdHdhcmUgbGlrZSBBbGR1cyBQYWdlTWFrZXIgaW5jbHVkaW5nIHZlcnNpb25zIG9mIExvcmVtIElwc3VtLg=="

  val TestUtf8Text = "Lorem Ipsum ist ein einfacher Demo-Text für die Print- und Schriftindustrie. Lorem Ipsum ist in der Industrie bereits der Standard Demo-Text seit 1500, als ein unbekannter Schriftsteller eine Hand voll Wörter nahm und diese durcheinander warf um ein Musterbuch zu erstellen. Es hat nicht nur 5 Jahrhunderte überlebt, sondern auch in Spruch in die elektronische Schriftbearbeitung geschafft (bemerke, nahezu unverändert). Bekannt wurde es 1960, mit dem erscheinen von \"Letraset\", welches Passagen von Lorem Ipsum enhielt, so wie Desktop Software wie \"Aldus PageMaker\" - ebenfalls mit Lorem Ipsum."
  val TestUtf8Base64 = "TG9yZW0gSXBzdW0gaXN0IGVpbiBlaW5mYWNoZXIgRGVtby1UZXh0IGbDvHIgZGllIFByaW50LSB1bmQgU2NocmlmdGluZHVzdHJpZS4gTG9yZW0gSXBzdW0gaXN0IGluIGRlciBJbmR1c3RyaWUgYmVyZWl0cyBkZXIgU3RhbmRhcmQgRGVtby1UZXh0IHNlaXQgMTUwMCwgYWxzIGVpbiB1bmJla2FubnRlciBTY2hyaWZ0c3RlbGxlciBlaW5lIEhhbmQgdm9sbCBXw7ZydGVyIG5haG0gdW5kIGRpZXNlIGR1cmNoZWluYW5kZXIgd2FyZiB1bSBlaW4gTXVzdGVyYnVjaCB6dSBlcnN0ZWxsZW4uIEVzIGhhdCBuaWNodCBudXIgNSBKYWhyaHVuZGVydGUgw7xiZXJsZWJ0LCBzb25kZXJuIGF1Y2ggaW4gU3BydWNoIGluIGRpZSBlbGVrdHJvbmlzY2hlIFNjaHJpZnRiZWFyYmVpdHVuZyBnZXNjaGFmZnQgKGJlbWVya2UsIG5haGV6dSB1bnZlcsOkbmRlcnQpLiBCZWthbm50IHd1cmRlIGVzIDE5NjAsIG1pdCBkZW0gZXJzY2hlaW5lbiB2b24gIkxldHJhc2V0Iiwgd2VsY2hlcyBQYXNzYWdlbiB2b24gTG9yZW0gSXBzdW0gZW5oaWVsdCwgc28gd2llIERlc2t0b3AgU29mdHdhcmUgd2llICJBbGR1cyBQYWdlTWFrZXIiIC0gZWJlbmZhbGxzIG1pdCBMb3JlbSBJcHN1bS4="

  "Base64" should {
    "encode string" in {
      Base64.encode(TestText) should be (TestBase64)
    }

    "encode bytes" in {
      Base64.encode(TestUtf8Text.getBytes("UTF-8")) should be (TestUtf8Base64)
    }

    "encode UTF-8 string" in {
      Base64.encode(TestUtf8Text) should be (TestUtf8Base64)
    }

    "decode base64 string" in {
      Base64.decode(TestBase64) should be (Some(TestText))
    }

    "decode UTF-8 base64 string" in {
      Base64.decode(TestBase64) should be (Some(TestText))
    }

    "return an empty string for an empty string" in {
      Base64.decode("") should be (Some(""))
    }

    "return None for base64 strings with to little valid bits" in {
      Base64.decode("a3222==") should be (None)
    }

    "return None for base64 strings with invalid characters" in {
      Base64.decode("foobär23") should be (None)
    }

    "return None for base64 strings with wrong 4-byte ending unit" in {
      Base64.decode("TQ=") should be (None)
    }
  }
} 
Example 20
Source File: MassSettingsTest.scala    From fusion-data   with Apache License 2.0 5 votes vote down vote up
package mass

import helloscala.common.Configuration
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class MassSettingsTest extends AnyWordSpec with Matchers {
  "MassSettings" should {
    val config = Configuration.generateConfig()
    val settings = MassSettings(config)

    "compiles" in {
      settings.compiles.scala212Home should not be empty
      println(settings.compiles.scala212Home)
    }

    "test key" in {
      config.getString("test.key") shouldBe "test.key"
    }
  }
} 
Example 21
Source File: TransactionTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.blockchain.data

import com.datamountaineer.streamreactor.connect.blockchain.json.JacksonJson
import com.datamountaineer.streamreactor.connect.blockchain.{GetResourcesFromDirectoryFn, Using}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec


class TransactionTest extends AnyWordSpec with Matchers with Using {
  "Transaction" should {
    "be initialized from json" in {
      GetResourcesFromDirectoryFn("/transactions").foreach { file =>
        val json = scala.io.Source.fromFile(file).mkString
        val message = JacksonJson.fromJson[BlockchainMessage](json)
        message.x.isDefined shouldBe true
        val sr = message.x.get.toSourceRecord("test", 0, None)
      }
    }
    "be return from a list of json objects" in {
      scala.io.Source.fromFile(getClass.getResource("/transactions_bundle.txt").toURI.getPath)
        .mkString
        .split(';')
        .foreach { json =>
          val msg = JacksonJson.fromJson[BlockchainMessage](json)
          msg.x.isDefined shouldBe true
          msg.x.get.toSourceRecord("test", 0, None)
        }

    }
  }

} 
Example 22
Source File: BlockchainSourceTaskTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.blockchain.source

import java.util

import com.datamountaineer.streamreactor.connect.blockchain.config.BlockchainConfigConstants
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class BlockchainSourceTaskTest extends AnyWordSpec with Matchers {
  "BlockchainSourceTask" should {
    "start and stop on request" ignore {
      val task = new BlockchainSourceTask()
      val map = new util.HashMap[String, String]
      map.put(BlockchainConfigConstants.KAFKA_TOPIC, "sometopic")
      task.start(map)

      //Thread.sleep(50000)
      //val records = task.poll()
      task.stop()
    }
  }
} 
Example 23
Source File: SinkRecordToDocumentTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.azure.documentdb.sink

import com.datamountaineer.streamreactor.connect.azure.documentdb.Json
import com.datamountaineer.streamreactor.connect.azure.documentdb.config.DocumentDbSinkSettings
import com.datamountaineer.streamreactor.connect.errors.NoopErrorPolicy
import com.datamountaineer.streamreactor.connect.schemas.ConverterUtil
import com.microsoft.azure.documentdb.{ConsistencyLevel, Document}
import org.apache.kafka.connect.data.Schema
import org.apache.kafka.connect.sink.SinkRecord
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class SinkRecordToDocumentTest extends AnyWordSpec with Matchers with ConverterUtil {
  private val connection = "https://accountName.documents.azure.com:443/"

  "SinkRecordToDocument" should {
    "convert Kafka Struct to a Azure Document Db Document" in {
      for (i <- 1 to 4) {
        val json = scala.io.Source.fromFile(getClass.getResource(s"/transaction$i.json").toURI.getPath).mkString
        val tx = Json.fromJson[Transaction](json)

        val record = new SinkRecord("topic1", 0, null, null, Transaction.ConnectSchema, tx.toStruct(), 0)

        implicit val settings = DocumentDbSinkSettings(
          connection,
          "secret",
          "database",
          Seq.empty,
          Map("topic1" -> Set.empty[String]),
          Map("topic1" -> Map.empty),
          Map("topic1" -> Set.empty),
          NoopErrorPolicy(),
          ConsistencyLevel.Session,
          false,
          None)
        val (document, _) = SinkRecordToDocument(record)
        val expected = new Document(json)

        //comparing string representation; we have more specific types given the schema
        document.toString shouldBe expected.toString
      }
    }

    "convert String Schema + Json payload to a Azure Document DB Document" in {
      for (i <- 1 to 4) {
        val json = scala.io.Source.fromFile(getClass.getResource(s"/transaction$i.json").toURI.getPath).mkString

        val record = new SinkRecord("topic1", 0, null, null, Schema.STRING_SCHEMA, json, 0)

        implicit val settings = DocumentDbSinkSettings(
          connection,
          "secret",
          "database",
          Seq.empty,
          Map("topic1" -> Set.empty[String]),
          Map("topic1" -> Map.empty),
          Map("topic1" -> Set.empty),
          NoopErrorPolicy(),
          ConsistencyLevel.Session,
          false,
          None)

        val (document, _) = SinkRecordToDocument(record)
        val expected = new Document(json)

        //comparing string representation; we have more specific types given the schema
        document.toString() shouldBe expected.toString
      }
    }

    "convert Schemaless + Json payload to a Azure Document DB Document" in {
      for (i <- 1 to 4) {
        val json = scala.io.Source.fromFile(getClass.getResource(s"/transaction$i.json").toURI.getPath).mkString


        val record = new SinkRecord("topic1", 0, null, null, Schema.STRING_SCHEMA, json, 0)

        implicit val settings = DocumentDbSinkSettings(
          connection,
          "secret",
          "database",
          Seq.empty,
          Map("topic1" -> Set.empty[String]),
          Map("topic1" -> Map.empty),
          Map("topic1" -> Set.empty),
          NoopErrorPolicy(),
          ConsistencyLevel.Session,
          false,
          None)

        val (document, _) = SinkRecordToDocument(record)
        val expected = new Document(json)

        //comparing string representation; we have more specific types given the schema
        document.toString() shouldBe expected.toString
      }
    }
  }
} 
Example 24
Source File: KeysExtractorTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.azure.documentdb.sink

import java.util

import com.datamountaineer.streamreactor.connect.azure.documentdb.Json
import com.sksamuel.avro4s.RecordFormat
import io.confluent.connect.avro.AvroData
import org.apache.kafka.common.config.ConfigException
import org.apache.kafka.connect.data.Struct
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._

class KeysExtractorTest extends AnyWordSpec with Matchers {
  private val avroData = new AvroData(4)

  case class WithNested(id: Int, nested: SomeTest)

  case class SomeTest(name: String, value: Double, flags: Seq[Int], map: Map[String, String])

  "KeysExtractor" should {
    "extract keys from JSON" in {
      val json = scala.io.Source.fromFile(getClass.getResource(s"/transaction1.json").toURI.getPath).mkString
      val jvalue = Json.parseJson(json)

      val actual = KeysExtractor.fromJson(jvalue, Set("lock_time", "rbf"))
      actual shouldBe List("lock_time" -> 9223372036854775807L, "rbf" -> true)
    }

    "throw exception when extracting the keys from JSON" in {
      val json = scala.io.Source.fromFile(getClass.getResource(s"/transaction1.json").toURI.getPath).mkString
      val jvalue = Json.parseJson(json)
      intercept[ConfigException] {
        val actual = KeysExtractor.fromJson(jvalue, Set("inputs"))
      }
    }


    "extract keys from a Map" in {
      val actual = KeysExtractor.fromMap(Map("key1" -> 12, "key2" -> 10L, "key3" -> "tripple").asJava, Set("key1", "key3"))
      actual shouldBe Set("key1" -> 12, "key3" -> "tripple")
    }

    "extract keys from a Map should throw an exception if the key is another map" in {
      intercept[ConfigException] {
        KeysExtractor.fromMap(Map("key1" -> 12, "key2" -> 10L, "key3" -> Map.empty[String, String]).asJava, Set("key1", "key3"))
      }
    }

    "extract keys from a Map should throw an exception if the key is an array" in {
      intercept[ConfigException] {
        KeysExtractor.fromMap(Map("key1" -> 12, "key2" -> 10L, "key3" -> new util.ArrayList[String]).asJava, Set("key1", "key3"))
      }
    }

    "extract from a struct" in {
      val format = RecordFormat[SomeTest]
      val avro = format.to(SomeTest("abc", 12.5, Seq.empty, Map.empty))
      val struct = avroData.toConnectData(avro.getSchema, avro)
      KeysExtractor.fromStruct(struct.value().asInstanceOf[Struct], Set("name")) shouldBe
        Set("name" -> "abc")
    }

    "extract from a struct should throw an exception if a key is an array" in {
      val format = RecordFormat[SomeTest]
      val avro = format.to(SomeTest("abc", 12.5, Seq.empty, Map.empty))
      intercept[ConfigException] {
        val struct = avroData.toConnectData(avro.getSchema, avro)
        KeysExtractor.fromStruct(struct.value().asInstanceOf[Struct], Set("flags"))
      }
    }

    "extract from a struct should throw an exception if a key is a map" in {
      val format = RecordFormat[SomeTest]
      val avro = format.to(SomeTest("abc", 12.5, Seq.empty, Map.empty))
      intercept[ConfigException] {
        val struct = avroData.toConnectData(avro.getSchema, avro)
        KeysExtractor.fromStruct(struct.value().asInstanceOf[Struct], Set("map"))
      }
    }

    "extract from a struct should throw an exception if a key is a struct" in {
      val format = RecordFormat[WithNested]
      val avro = format.to(WithNested(1, SomeTest("abc", 12.5, Seq.empty, Map.empty)))
      intercept[ConfigException] {
        val struct = avroData.toConnectData(avro.getSchema, avro)
        KeysExtractor.fromStruct(struct.value().asInstanceOf[Struct], Set("nested"))
      }
    }
  }
} 
Example 25
Source File: MongoSinkConnectorTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.mongodb.sink

import com.datamountaineer.streamreactor.connect.mongodb.config.MongoConfigConstants
import org.mockito.MockitoSugar
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._

class MongoSinkConnectorTest extends AnyWordSpec with Matchers with MockitoSugar {
  "MongoSinkConnector" should {


    "return one task when multiple routes are provided but maxTasks is 1" in {
      val map = Map(
        "topics" -> "topic1, topicA",
        MongoConfigConstants.DATABASE_CONFIG -> "database1",
        MongoConfigConstants.CONNECTION_CONFIG -> "mongodb://localhost:27017",
        MongoConfigConstants.KCQL_CONFIG -> "INSERT INTO collection1 SELECT * FROM topic1; INSERT INTO coll2 SELECT * FROM topicA"
      ).asJava

      val connector = new MongoSinkConnector()
      connector.start(map)
      connector.taskConfigs(1).size() shouldBe 1
    }
  }
} 
Example 26
Source File: TestCassandraSinkConnector.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.cassandra.sink

import com.datamountaineer.streamreactor.connect.cassandra.TestConfig
import com.datamountaineer.streamreactor.connect.cassandra.config.CassandraConfigConstants
import org.scalatest.BeforeAndAfter
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._

class TestCassandraSinkConnector extends AnyWordSpec with BeforeAndAfter with Matchers with TestConfig {
  "Should start a Cassandra Sink Connector" in {
    val props =  Map(
      "topics" -> s"$TOPIC1, $TOPIC2",
      CassandraConfigConstants.CONTACT_POINTS -> CONTACT_POINT,
      CassandraConfigConstants.KEY_SPACE -> CASSANDRA_SINK_KEYSPACE,
      CassandraConfigConstants.USERNAME -> USERNAME,
      CassandraConfigConstants.PASSWD -> PASSWD,
      CassandraConfigConstants.KCQL -> QUERY_ALL
    ).asJava

    val connector = new CassandraSinkConnector()
    connector.start(props)
    val taskConfigs = connector.taskConfigs(1)
    taskConfigs.asScala.head.get(CassandraConfigConstants.KCQL) shouldBe QUERY_ALL
    taskConfigs.asScala.head.get(CassandraConfigConstants.CONTACT_POINTS) shouldBe CONTACT_POINT
    taskConfigs.asScala.head.get(CassandraConfigConstants.KEY_SPACE) shouldBe TOPIC1
    taskConfigs.size() shouldBe 1
    connector.taskClass() shouldBe classOf[CassandraSinkTask]
    //connector.version() shouldBe ""
    connector.stop()
  }
} 
Example 27
Source File: TestCassandraConnectionSecure.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.cassandra

import com.datamountaineer.streamreactor.connect.cassandra.config.{CassandraConfigConstants, CassandraConfigSink}
import org.scalatest.DoNotDiscover
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._


@DoNotDiscover
class TestCassandraConnectionSecure extends AnyWordSpec with Matchers with TestConfig {

  "should return a secured session" in {
    createKeySpace("connection", secure = true, ssl = false)
    val props = Map(
      CassandraConfigConstants.CONTACT_POINTS -> "localhost",
      CassandraConfigConstants.KEY_SPACE -> "connection",
      CassandraConfigConstants.USERNAME -> "cassandra",
      CassandraConfigConstants.PASSWD -> "cassandra",
      CassandraConfigConstants.KCQL -> "INSERT INTO TABLE SELECT * FROM TOPIC"
    ).asJava

    val taskConfig = CassandraConfigSink(props)
    val conn = CassandraConnection(taskConfig)
    val session = conn.session
    session should not be null
    session.getCluster.getConfiguration.getProtocolOptions.getAuthProvider should not be null

    val cluster = session.getCluster
    session.close()
    cluster.close()
  }
} 
Example 28
Source File: TestCassandraSourceConnector.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.cassandra.source

import com.datamountaineer.streamreactor.connect.cassandra.TestConfig
import com.datamountaineer.streamreactor.connect.cassandra.config.CassandraConfigConstants
import org.scalatest.DoNotDiscover
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._


@DoNotDiscover
class TestCassandraSourceConnector extends AnyWordSpec with Matchers with TestConfig {
  "Should start a Cassandra Source Connector" in {
    val props =  Map(
      CassandraConfigConstants.CONTACT_POINTS -> CONTACT_POINT,
      CassandraConfigConstants.KEY_SPACE -> CASSANDRA_SOURCE_KEYSPACE,
      CassandraConfigConstants.USERNAME -> USERNAME,
      CassandraConfigConstants.PASSWD -> PASSWD,
      CassandraConfigConstants.KCQL -> IMPORT_QUERY_ALL,
      CassandraConfigConstants.ASSIGNED_TABLES -> ASSIGNED_TABLES,
      CassandraConfigConstants.POLL_INTERVAL -> "1000"
    ).asJava

    val connector = new CassandraSourceConnector()
    connector.start(props)
    val taskConfigs = connector.taskConfigs(1)
    taskConfigs.asScala.head.get(CassandraConfigConstants.KCQL) shouldBe IMPORT_QUERY_ALL
    taskConfigs.asScala.head.get(CassandraConfigConstants.CONTACT_POINTS) shouldBe CONTACT_POINT
    taskConfigs.asScala.head.get(CassandraConfigConstants.KEY_SPACE) shouldBe CASSANDRA_SOURCE_KEYSPACE
    taskConfigs.asScala.head.get(CassandraConfigConstants.ASSIGNED_TABLES) shouldBe ASSIGNED_TABLES
    taskConfigs.size() shouldBe 1
    connector.taskClass() shouldBe classOf[CassandraSourceTask]
    connector.stop()
  }
} 
Example 29
Source File: TestCassandraSinkConfig.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.cassandra.config

import com.datamountaineer.streamreactor.connect.cassandra.TestConfig
import org.scalatest.BeforeAndAfter
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._

class TestCassandraSinkConfig extends AnyWordSpec with BeforeAndAfter with Matchers with TestConfig {

  "A CassandraConfig should return configured for username and password" in {
    val props = Map(
      CassandraConfigConstants.CONTACT_POINTS -> CONTACT_POINT,
      CassandraConfigConstants.KEY_SPACE -> CASSANDRA_SINK_KEYSPACE,
      CassandraConfigConstants.USERNAME -> USERNAME,
      CassandraConfigConstants.PASSWD -> PASSWD,
      CassandraConfigConstants.KCQL -> QUERY_ALL
    ).asJava

    val taskConfig = CassandraConfigSink(props)
    taskConfig.getString(CassandraConfigConstants.CONTACT_POINTS) shouldBe CONTACT_POINT
    taskConfig.getString(CassandraConfigConstants.KEY_SPACE) shouldBe CASSANDRA_SINK_KEYSPACE
    taskConfig.getString(CassandraConfigConstants.USERNAME) shouldBe USERNAME
    taskConfig.getPassword(CassandraConfigConstants.PASSWD).value shouldBe PASSWD
    taskConfig.getString(CassandraConfigConstants.KCQL) shouldBe QUERY_ALL
    taskConfig.getString(CassandraConfigConstants.CONSISTENCY_LEVEL_CONFIG) shouldBe CassandraConfigConstants.CONSISTENCY_LEVEL_DEFAULT
  }

  "A CassandraConfig should return configured for SSL" in {
    val props = Map(
      CassandraConfigConstants.CONTACT_POINTS -> CONTACT_POINT,
      CassandraConfigConstants.KEY_SPACE -> CASSANDRA_SINK_KEYSPACE,
      CassandraConfigConstants.USERNAME -> USERNAME,
      CassandraConfigConstants.PASSWD -> PASSWD,
      CassandraConfigConstants.SSL_ENABLED -> "true",
      CassandraConfigConstants.TRUST_STORE_PATH -> TRUST_STORE_PATH,
      CassandraConfigConstants.TRUST_STORE_PASSWD -> TRUST_STORE_PASSWORD,
      CassandraConfigConstants.KCQL -> QUERY_ALL
    ).asJava

    val taskConfig  = CassandraConfigSink(props)
    taskConfig.getString(CassandraConfigConstants.CONTACT_POINTS) shouldBe CONTACT_POINT
    taskConfig.getString(CassandraConfigConstants.KEY_SPACE) shouldBe CASSANDRA_SINK_KEYSPACE
    taskConfig.getString(CassandraConfigConstants.USERNAME) shouldBe USERNAME
    taskConfig.getPassword(CassandraConfigConstants.PASSWD).value shouldBe PASSWD
    taskConfig.getBoolean(CassandraConfigConstants.SSL_ENABLED) shouldBe true
    taskConfig.getString(CassandraConfigConstants.TRUST_STORE_PATH) shouldBe TRUST_STORE_PATH
    taskConfig.getPassword(CassandraConfigConstants.TRUST_STORE_PASSWD).value shouldBe TRUST_STORE_PASSWORD
    //taskConfig.getString(CassandraConfigConstants.EXPORT_MAPPINGS) shouldBe EXPORT_TOPIC_TABLE_MAP
    taskConfig.getString(CassandraConfigConstants.KCQL) shouldBe QUERY_ALL
  }

  "A CassandraConfig should return configured for SSL without client certficate authentication" in {
    val props =     Map(
      CassandraConfigConstants.CONTACT_POINTS -> CONTACT_POINT,
      CassandraConfigConstants.KEY_SPACE -> CASSANDRA_SINK_KEYSPACE,
      CassandraConfigConstants.USERNAME -> USERNAME,
      CassandraConfigConstants.PASSWD -> PASSWD,
      CassandraConfigConstants.SSL_ENABLED -> "true",
      CassandraConfigConstants.TRUST_STORE_PATH -> TRUST_STORE_PATH,
      CassandraConfigConstants.TRUST_STORE_PASSWD -> TRUST_STORE_PASSWORD,
      CassandraConfigConstants.USE_CLIENT_AUTH -> "false",
      CassandraConfigConstants.KEY_STORE_PATH -> KEYSTORE_PATH,
      CassandraConfigConstants.KEY_STORE_PASSWD -> KEYSTORE_PASSWORD,
      CassandraConfigConstants.KCQL -> QUERY_ALL
    ).asJava

    val taskConfig  = CassandraConfigSink(props)
    taskConfig.getString(CassandraConfigConstants.CONTACT_POINTS) shouldBe CONTACT_POINT
    taskConfig.getString(CassandraConfigConstants.KEY_SPACE) shouldBe CASSANDRA_SINK_KEYSPACE
    taskConfig.getString(CassandraConfigConstants.USERNAME) shouldBe USERNAME
    taskConfig.getPassword(CassandraConfigConstants.PASSWD).value shouldBe PASSWD
    taskConfig.getBoolean(CassandraConfigConstants.SSL_ENABLED) shouldBe true
    taskConfig.getString(CassandraConfigConstants.KEY_STORE_PATH) shouldBe KEYSTORE_PATH
    taskConfig.getPassword(CassandraConfigConstants.KEY_STORE_PASSWD).value shouldBe KEYSTORE_PASSWORD
    taskConfig.getBoolean(CassandraConfigConstants.USE_CLIENT_AUTH) shouldBe false
    taskConfig.getString(CassandraConfigConstants.KEY_STORE_PATH) shouldBe KEYSTORE_PATH
    taskConfig.getPassword(CassandraConfigConstants.KEY_STORE_PASSWD).value shouldBe KEYSTORE_PASSWORD
    taskConfig.getString(CassandraConfigConstants.KCQL) shouldBe QUERY_ALL
  }
} 
Example 30
Source File: TestCassandraSourceSettings.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.cassandra.config

import com.datamountaineer.streamreactor.connect.cassandra.TestConfig
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._


class TestCassandraSourceSettings extends AnyWordSpec with Matchers with TestConfig {
  "CassandraSettings should return setting for a source" in {
    val props =  Map(
      CassandraConfigConstants.CONTACT_POINTS -> CONTACT_POINT,
      CassandraConfigConstants.KEY_SPACE -> CASSANDRA_SOURCE_KEYSPACE,
      CassandraConfigConstants.USERNAME -> USERNAME,
      CassandraConfigConstants.PASSWD -> PASSWD,
      CassandraConfigConstants.KCQL -> IMPORT_QUERY_ALL,
      CassandraConfigConstants.ASSIGNED_TABLES -> ASSIGNED_TABLES,
      CassandraConfigConstants.POLL_INTERVAL -> "1000"
    ).asJava

    val taskConfig = CassandraConfigSource(props)
    val settings = CassandraSettings.configureSource(taskConfig).toList
    settings.size shouldBe 2
    settings.head.kcql.getSource shouldBe TABLE1
    settings.head.kcql.getTarget shouldBe TABLE1 //no table mapping provided so should be the table
    settings.head.timestampColType shouldBe TimestampType.NONE
    settings(1).kcql.getSource shouldBe TABLE2
    settings(1).kcql.getTarget shouldBe TOPIC2
    settings(1).timestampColType shouldBe TimestampType.NONE
  }

  "CassandraSettings should return setting for a source with one table" in {
    val map = Map(
      CassandraConfigConstants.CONTACT_POINTS -> CONTACT_POINT,
      CassandraConfigConstants.KEY_SPACE -> CASSANDRA_SINK_KEYSPACE,
      CassandraConfigConstants.USERNAME -> USERNAME,
      CassandraConfigConstants.PASSWD -> PASSWD,
      CassandraConfigConstants.KCQL -> "INSERT INTO cassandra-source SELECT * FROM orders PK created",
      CassandraConfigConstants.POLL_INTERVAL -> "1000"
    )
    val taskConfig = CassandraConfigSource(map.asJava)
    val settings = CassandraSettings.configureSource(taskConfig).toList
    settings.size shouldBe 1
  }
} 
Example 31
Source File: MapMessageConverterTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.sink.converters

import java.util.UUID

import com.datamountaineer.streamreactor.connect.jms.config.{JMSConfig, JMSSettings}
import com.datamountaineer.streamreactor.connect.jms.sink.converters.MapMessageConverter
import com.datamountaineer.streamreactor.connect.{TestBase, Using}
import javax.jms.MapMessage
import org.apache.activemq.ActiveMQConnectionFactory
import org.scalatest.BeforeAndAfterAll
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._
import scala.reflect.io.Path

class MapMessageConverterTest extends AnyWordSpec with Matchers with Using with BeforeAndAfterAll with TestBase {
  val converter = new MapMessageConverter()

  val kafkaTopic1 = s"kafka-${UUID.randomUUID().toString}"
  val topicName = UUID.randomUUID().toString
  val queueName = UUID.randomUUID().toString
  val kcqlT = getKCQL(topicName, kafkaTopic1, "TOPIC")
  val kcqlQ = getKCQL(queueName, kafkaTopic1, "QUEUE")
  val props = getProps(s"$kcqlQ;$kcqlT", JMS_URL)
  val config = JMSConfig(props.asJava)
  val settings = JMSSettings(config, true)
  val setting = settings.settings.head

  override def afterAll(): Unit = {
    Path(AVRO_FILE).delete()
  }

  "MapMessageConverter" should {
    "create a JMS MapMessage" in {
      val connectionFactory = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false")

      using(connectionFactory.createConnection()) { connection =>
        using(connection.createSession(false, 1)) { session =>
          val record = getSinkRecords(kafkaTopic1).head
          val msg = converter.convert(record, session, setting)._2.asInstanceOf[MapMessage]

          Option(msg).isDefined shouldBe true

          msg.getBoolean("boolean") shouldBe true
          msg.getByte("int8") shouldBe 12.toByte
          msg.getShort("int16") shouldBe 12.toShort
          msg.getInt("int32") shouldBe 12
          msg.getLong("int64") shouldBe 12L
          msg.getFloat("float32") shouldBe 12.2f
          msg.getDouble("float64") shouldBe 12.2
          msg.getString("string") shouldBe "foo"
          msg.getBytes("bytes") shouldBe "foo".getBytes()
          val arr = msg.getObject("array")
          arr.asInstanceOf[java.util.List[String]].asScala.toArray shouldBe Array("a", "b", "c")

          val map1 = msg.getObject("map").asInstanceOf[java.util.Map[String, Int]].asScala.toMap
          map1 shouldBe Map("field" -> 1)

          val map2 = msg.getObject("mapNonStringKeys").asInstanceOf[java.util.Map[Int, Int]].asScala.toMap
          map2 shouldBe Map(1 -> 1)

        }
      }
    }
  }
} 
Example 32
Source File: ObjectMessageConverterTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.sink.converters

import java.util.UUID

import com.datamountaineer.streamreactor.connect.jms.config.{JMSConfig, JMSSettings}
import com.datamountaineer.streamreactor.connect.jms.sink.converters.ObjectMessageConverter
import com.datamountaineer.streamreactor.connect.{TestBase, Using}
import javax.jms.ObjectMessage
import org.apache.activemq.ActiveMQConnectionFactory
import org.scalatest.BeforeAndAfterAll
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._
import scala.reflect.io.Path

class ObjectMessageConverterTest extends AnyWordSpec with Matchers with Using with TestBase with BeforeAndAfterAll {
  val converter = new ObjectMessageConverter()
  val kafkaTopic1 = s"kafka-${UUID.randomUUID().toString}"
  val topicName = UUID.randomUUID().toString
  val queueName = UUID.randomUUID().toString
  val kcqlT = getKCQL(topicName, kafkaTopic1, "TOPIC")
  val kcqlQ = getKCQL(queueName, kafkaTopic1, "QUEUE")
  val props = getProps(s"$kcqlQ;$kcqlT", JMS_URL)
  val config = JMSConfig(props.asJava)
  val settings = JMSSettings(config, true)
  val setting = settings.settings.head

  override def afterAll(): Unit = {
    Path(AVRO_FILE).delete()
  }

  "ObjectMessageConverter" should {
    "create an instance of jms ObjectMessage" in {
      val connectionFactory = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false")

      using(connectionFactory.createConnection()) { connection =>
        using(connection.createSession(false, 1)) { session =>
          val record = getSinkRecords(kafkaTopic1).head
          val msg = converter.convert(record, session, setting)._2.asInstanceOf[ObjectMessage]

          Option(msg).isDefined shouldBe true

          msg.getBooleanProperty("boolean") shouldBe true
          msg.getByteProperty("int8") shouldBe 12.toByte
          msg.getShortProperty("int16") shouldBe 12.toShort
          msg.getIntProperty("int32") shouldBe 12
          msg.getLongProperty("int64") shouldBe 12L
          msg.getFloatProperty("float32") shouldBe 12.2f
          msg.getDoubleProperty("float64") shouldBe 12.2
          msg.getStringProperty("string") shouldBe "foo"
          msg.getObjectProperty("bytes").asInstanceOf[java.util.List[Byte]].toArray shouldBe "foo".getBytes()
          val arr = msg.getObjectProperty("array")
          arr.asInstanceOf[java.util.List[String]].asScala.toArray shouldBe Array("a", "b", "c")

          val map1 = msg.getObjectProperty("map").asInstanceOf[java.util.Map[String, Int]].asScala.toMap
          map1 shouldBe Map("field" -> 1)

          val map2 = msg.getObjectProperty("mapNonStringKeys").asInstanceOf[java.util.Map[Int, Int]].asScala.toMap
          map2 shouldBe Map(1 -> 1)
        }
      }
    }
  }
} 
Example 33
Source File: AvroMessageConverterTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.sink.converters

import java.nio.ByteBuffer
import java.util.UUID

import com.datamountaineer.streamreactor.connect.jms.config.{JMSConfig, JMSSettings}
import com.datamountaineer.streamreactor.connect.jms.sink.converters.AvroMessageConverter
import com.datamountaineer.streamreactor.connect.sink.AvroDeserializer
import com.datamountaineer.streamreactor.connect.{TestBase, Using}
import io.confluent.connect.avro.AvroData
import javax.jms.BytesMessage
import org.apache.activemq.ActiveMQConnectionFactory
import org.apache.avro.generic.GenericData
import org.apache.avro.util.Utf8
import org.scalatest.BeforeAndAfterAll
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._
import scala.reflect.io.Path

class AvroMessageConverterTest extends AnyWordSpec with Matchers with Using with TestBase with BeforeAndAfterAll {
  val converter = new AvroMessageConverter()
  private lazy val avroData = new AvroData(128)
  val kafkaTopic1 = s"kafka-${UUID.randomUUID().toString}"
  val topicName = UUID.randomUUID().toString
  val queueName = UUID.randomUUID().toString

  val kcqlT = getKCQL(topicName, kafkaTopic1, "TOPIC")
  val kcqlQ = getKCQL(queueName, kafkaTopic1, "QUEUE")

  val props = getProps(s"$kcqlQ;$kcqlT", JMS_URL)
  val config = JMSConfig(props.asJava)
  val settings = JMSSettings(config, true)
  val setting = settings.settings.head

  override def afterAll(): Unit = {
    Path(AVRO_FILE).delete()
  }

  "AvroMessageConverter" should {
    "create a BytesMessage with avro payload" in {
      val connectionFactory = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false")

      using(connectionFactory.createConnection()) { connection =>
        using(connection.createSession(false, 1)) { session =>

          val record = getSinkRecords(kafkaTopic1).head
          val msg = converter.convert(record, session, setting)._2.asInstanceOf[BytesMessage]

          Option(msg).isDefined shouldBe true

          msg.reset()

          val size = msg.getBodyLength

          size > 0 shouldBe true
          val data = new Array[Byte](size.toInt)
          msg.readBytes(data)

          val avroRecord = AvroDeserializer(data, avroData.fromConnectSchema(record.valueSchema()))
          avroRecord.get("int8") shouldBe 12.toByte
          avroRecord.get("int16") shouldBe 12.toShort
          avroRecord.get("int32") shouldBe 12
          avroRecord.get("int64") shouldBe 12L
          avroRecord.get("float32") shouldBe 12.2f
          avroRecord.get("float64") shouldBe 12.2
          avroRecord.get("boolean") shouldBe true
          avroRecord.get("string").toString shouldBe "foo"
          avroRecord.get("bytes").asInstanceOf[ByteBuffer].array() shouldBe "foo".getBytes()
          val array = avroRecord.get("array").asInstanceOf[GenericData.Array[Utf8]]
          val iter = array.iterator()
          new Iterator[String] {
            override def hasNext: Boolean = iter.hasNext

            override def next(): String = iter.next().toString
          }.toSeq shouldBe Seq("a", "b", "c")
          val map = avroRecord.get("map").asInstanceOf[java.util.Map[Utf8, Int]].asScala
          map.size shouldBe 1
          map.keys.head.toString shouldBe "field"
          map.get(map.keys.head) shouldBe Some(1)

          val iterRecord = avroRecord.get("mapNonStringKeys").asInstanceOf[GenericData.Array[GenericData.Record]].iterator()
          iterRecord.hasNext shouldBe true
          val r = iterRecord.next()
          r.get("key") shouldBe 1
          r.get("value") shouldBe 1
        }
      }
    }
  }
} 
Example 34
Source File: JsonMessageConverterTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.sink.converters

import java.util.UUID

import com.datamountaineer.streamreactor.connect.jms.config.{JMSConfig, JMSSettings}
import com.datamountaineer.streamreactor.connect.jms.sink.converters.JsonMessageConverter
import com.datamountaineer.streamreactor.connect.{TestBase, Using}
import javax.jms.TextMessage
import org.apache.activemq.ActiveMQConnectionFactory
import org.apache.kafka.connect.sink.SinkRecord
import org.scalatest.BeforeAndAfterAll
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._
import scala.reflect.io.Path


class JsonMessageConverterTest extends AnyWordSpec with Matchers with Using with TestBase with BeforeAndAfterAll {

  val converter = new JsonMessageConverter()

  val kafkaTopic1 = s"kafka-${UUID.randomUUID().toString}"
  val queueName = UUID.randomUUID().toString
  val kcql = getKCQL(queueName, kafkaTopic1, "QUEUE")
  val props = getProps(kcql, JMS_URL)
  val config = JMSConfig(props.asJava)
  val settings = JMSSettings(config, true)
  val setting = settings.settings.head

  override def afterAll(): Unit = {
    Path(AVRO_FILE).delete()
  }

  "JsonMessageConverter" should {
    "create a TextMessage with Json payload" in {
      val connectionFactory = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false")

      using(connectionFactory.createConnection()) { connection =>
        using(connection.createSession(false, 1)) { session =>
          val schema = getSchema
          val struct = getStruct(schema)

          val record = new SinkRecord(kafkaTopic1, 0, null, null, schema, struct, 1)
          val msg = converter.convert(record, session, setting)._2.asInstanceOf[TextMessage]
          Option(msg).isDefined shouldBe true

          val json = msg.getText
          json shouldBe
            """{"int8":12,"int16":12,"int32":12,"int64":12,"float32":12.2,"float64":12.2,"boolean":true,"string":"foo","bytes":"Zm9v","array":["a","b","c"],"map":{"field":1},"mapNonStringKeys":[[1,1]]}""".stripMargin

        }
      }
    }
  }
} 
Example 35
Source File: AvroRecordFieldExtractorMapFnTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.hbase.avro

import java.nio.file.Paths

import org.apache.avro.Schema
import org.apache.hadoop.hbase.util.Bytes
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class AvroRecordFieldExtractorMapFnTest extends AnyWordSpec with Matchers {

  val schema: Schema = new Schema.Parser().parse(Paths.get(getClass.getResource("/person.avsc").toURI).toFile)

  "AvroRecordFieldExtractorMapFn" should {
    "raise an exception if the given field does not exist in the schema" in {
      intercept[IllegalArgumentException] {
        AvroRecordFieldExtractorMapFn(schema, Seq("wrongField"))
      }
    }

    "raise an exception if the given field is not a primitive" in {
      intercept[IllegalArgumentException] {
        AvroRecordFieldExtractorMapFn(schema, Seq("address"))
      }
    }

    "create the mappings for all the given fields" in {
      val mappings = AvroRecordFieldExtractorMapFn(schema, Seq("firstName", "age"))

      val fnFirstName = mappings("firstName")
      val firstName = "Beaky"
      fnFirstName(firstName) shouldBe Bytes.toBytes(firstName)

      val fnAge = mappings("age")
      val age = 31
      fnAge(age) shouldBe Bytes.toBytes(age)
      intercept[ClassCastException] {
        fnAge(12.4)
      }
    }
  }
} 
Example 36
Source File: AvroSchemaFieldsExistFnTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.hbase.avro

import com.datamountaineer.streamreactor.connect.hbase.PersonAvroSchema
import org.apache.avro.Schema
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec


class AvroSchemaFieldsExistFnTest extends AnyWordSpec with Matchers {
  val schema: Schema = new Schema.Parser().parse(PersonAvroSchema.schema)

  "AvroSchemaFieldsExistFn" should {
    "raise an exception if the field is not present" in {
      intercept[IllegalArgumentException] {
        AvroSchemaFieldsExistFn(schema, Seq("notpresent"))
      }

      intercept[IllegalArgumentException] {
        AvroSchemaFieldsExistFn(schema, Seq(" lastName"))
      }
    }

    "not raise an exception if the fields are present" in {
      AvroSchemaFieldsExistFn(schema, Seq("lastName", "age", "address"))
    }
  }
} 
Example 37
Source File: GenericRowKeyBuilderTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.hbase

import com.datamountaineer.streamreactor.connect.hbase.BytesHelper._
import org.apache.hadoop.hbase.util.Bytes
import org.apache.kafka.connect.data.Schema
import org.apache.kafka.connect.sink.SinkRecord
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class GenericRowKeyBuilderTest extends AnyWordSpec with Matchers {
  "GenericRowKeyBuilder" should {
    "use the topic, partition and offset to make the key" in {

      val topic = "sometopic"
      val partition = 2
      val offset = 1243L
      val sinkRecord = new SinkRecord(topic, partition, Schema.INT32_SCHEMA, 345, Schema.STRING_SCHEMA, "", offset)

      val keyBuilder = new GenericRowKeyBuilderBytes()
      val expected = Bytes.add(Array(topic.fromString(), keyBuilder.delimiterBytes, partition.fromString(),
        keyBuilder.delimiterBytes, offset.fromString()))
      keyBuilder.build(sinkRecord, Nil) shouldBe expected
    }
  }
} 
Example 38
Source File: StructFieldsRowKeyBuilderTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.hbase

import com.datamountaineer.streamreactor.connect.hbase.BytesHelper._
import org.apache.hadoop.hbase.util.Bytes
import org.apache.kafka.connect.data.{Schema, SchemaBuilder, Struct}
import org.apache.kafka.connect.sink.SinkRecord
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class StructFieldsRowKeyBuilderTest extends AnyWordSpec with Matchers {
  "StructFieldsRowKeyBuilder" should {
    "raise an exception if the field is not present in the struct" in {
      intercept[IllegalArgumentException] {
        val schema = SchemaBuilder.struct().name("com.example.Person")
          .field("firstName", Schema.STRING_SCHEMA)
          .field("age", Schema.INT32_SCHEMA)
          .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build()

        val struct = new Struct(schema).put("firstName", "Alex").put("age", 30)

        val sinkRecord = new SinkRecord("sometopic", 1, null, null, schema, struct, 1)
        //val field = Field("threshold", "threshold", false)

        StructFieldsRowKeyBuilderBytes(List("threshold")).build(sinkRecord, null)
      }
    }

    "create the row key based on one single field in the struct" in {
      val schema = SchemaBuilder.struct().name("com.example.Person")
        .field("firstName", Schema.STRING_SCHEMA)
        .field("age", Schema.INT32_SCHEMA)
        .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build()

      val struct = new Struct(schema).put("firstName", "Alex").put("age", 30)

      //val field = Field("firstName", "firstName", true)
      val sinkRecord = new SinkRecord("sometopic", 1, null, null, schema, struct, 1)
      StructFieldsRowKeyBuilderBytes(List("firstName")).build(sinkRecord, null) shouldBe "Alex".fromString
    }

    "create the row key based on more thant one field in the struct" in {
      val schema = SchemaBuilder.struct().name("com.example.Person")
        .field("firstName", Schema.STRING_SCHEMA)
        .field("age", Schema.INT32_SCHEMA)
        .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build()

      val struct = new Struct(schema).put("firstName", "Alex").put("age", 30)

      //val field = Field("firstName", "firstName", true)
      //val field2 = Field("age", "age", true)
      val sinkRecord = new SinkRecord("sometopic", 1, null, null, schema, struct, 1)
      StructFieldsRowKeyBuilderBytes(List("firstName", "age")).build(sinkRecord, null) shouldBe
        Bytes.add("Alex".fromString(), "\n".fromString(), 30.fromInt())
    }
  }
} 
Example 39
Source File: AvroRecordRowKeyBuilderTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.hbase

import com.datamountaineer.streamreactor.connect.hbase.BytesHelper._
import com.datamountaineer.streamreactor.connect.hbase.avro.AvroRecordFieldExtractorMapFn
import org.apache.avro.Schema
import org.apache.avro.generic.GenericRecord
import org.apache.hadoop.hbase.util.Bytes
import org.apache.kafka.connect.sink.SinkRecord
import org.mockito.MockitoSugar
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class AvroRecordRowKeyBuilderTest extends AnyWordSpec with Matchers with MockitoSugar {
  val schema: Schema = new Schema.Parser().parse(PersonAvroSchema.schema)

  "AvroRecordRowKeyBuilder" should {
    "extract the values from the avro record and create the key" in {
      val keys = Seq("firstName", "lastName", "age")
      val rowKeyBuilder = new AvroRecordRowKeyBuilderBytes(AvroRecordFieldExtractorMapFn(schema, keys), keys)

      val sinkRecord = mock[SinkRecord]
      val firstName = "Jack"
      val lastName = "Smith"
      val age = 29

      val record = new GenericRecord {

        val values: Map[String, AnyRef] = Map("firstName" -> firstName, "lastName" -> lastName, "age" -> Int.box(age))

        override def get(key: String): AnyRef = values(key)

        override def put(key: String, v: scala.Any): Unit = sys.error("not supported")

        override def get(i: Int): AnyRef = sys.error("not supported")


        override def put(i: Int, v: scala.Any): Unit = sys.error("not supported")


        override def getSchema: Schema = sys.error("not supported")
      }

      val expectedValue = Bytes.add(
        Array(
          firstName.fromString(),
          rowKeyBuilder.delimBytes,
          lastName.fromString(),
          rowKeyBuilder.delimBytes,
          age.fromInt()))
      rowKeyBuilder.build(sinkRecord, record) shouldBe expectedValue
    }
  }
} 
Example 40
Source File: SinkRecordKeyRowKeyBuilderTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.hbase

import com.datamountaineer.streamreactor.connect.hbase.BytesHelper._
import org.apache.kafka.connect.data.Schema
import org.apache.kafka.connect.sink.SinkRecord
import org.mockito.MockitoSugar
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class SinkRecordKeyRowKeyBuilderTest extends AnyWordSpec with Matchers with MockitoSugar {
  val keyRowKeyBuilder = new SinkRecordKeyRowKeyBuilderBytes()

  "SinkRecordKeyRowKeyBuilder" should {
    "create the right key from the Schema key value - Byte" in {
      val b = 123.toByte
      val sinkRecord = new SinkRecord("", 1, Schema.INT8_SCHEMA, b, Schema.FLOAT64_SCHEMA, Nil, 0)

      keyRowKeyBuilder.build(sinkRecord, "Should not matter") shouldBe Array(b)

    }
    "create the right key from the Schema key value - String" in {
      val s = "somekey"
      val sinkRecord = new SinkRecord("", 1, Schema.STRING_SCHEMA, s, Schema.FLOAT64_SCHEMA, Nil, 0)

      keyRowKeyBuilder.build(sinkRecord, Nil) shouldBe s.fromString()
    }

    "create the right key from the Schema key value - Bytes" in {
      val bArray = Array(23.toByte, 24.toByte, 242.toByte)
      val sinkRecord = new SinkRecord("", 1, Schema.BYTES_SCHEMA, bArray, Schema.FLOAT64_SCHEMA, Nil, 0)
      keyRowKeyBuilder.build(sinkRecord, Nil) shouldBe bArray
    }
    "create the right key from the Schema key value - Boolean" in {
      val bool = true
      val sinkRecord = new SinkRecord("", 1, Schema.BOOLEAN_SCHEMA, bool, Schema.FLOAT64_SCHEMA, Nil, 0)

      keyRowKeyBuilder.build(sinkRecord, Nil) shouldBe bool.fromBoolean()

    }

  }
} 
Example 41
Source File: StageManagerTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.landoop.streamreactor.connect.hive.sink.staging

import com.landoop.streamreactor.connect.hive.{Offset, Topic, TopicPartition, TopicPartitionOffset}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class StageManagerTest extends AnyWordSpec with Matchers {

  implicit val conf = new Configuration()
  implicit val fs = FileSystem.getLocal(conf)

  val dir = new Path("stageman")
  fs.mkdirs(dir)

  val manager = new StageManager(DefaultFilenamePolicy)

  "StageManager" should {

    "stage file as hidden" in {
      val stagePath = manager.stage(dir, TopicPartition(Topic("mytopic"), 1))
      stagePath.getName.startsWith(".") shouldBe true
    }

    "delete existing file" in {

      val stagePath = manager.stage(dir, TopicPartition(Topic("mytopic"), 1))
      fs.create(stagePath)

      manager.stage(dir, TopicPartition(Topic("mytopic"), 1))
      fs.exists(stagePath) shouldBe false
    }
    "commit file using offset" in {

      val stagePath = manager.stage(dir, TopicPartition(Topic("mytopic"), 1))
      fs.create(stagePath)

      val tpo = TopicPartitionOffset(Topic("mytopic"), 1, Offset(100))
      val finalPath = manager.commit(stagePath, tpo)
      finalPath.getName shouldBe "streamreactor_mytopic_1_100"
    }
  }
} 
Example 42
Source File: DefaultCommitPolicyTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.landoop.streamreactor.connect.hive.sink.staging

import com.landoop.streamreactor.connect.hive.{Offset, Topic, TopicPartitionOffset}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, LocalFileSystem, Path}
import org.apache.kafka.connect.data.{Schema, SchemaBuilder, Struct}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.concurrent.duration._

class DefaultCommitPolicyTest extends AnyWordSpec with Matchers {

  val schema: Schema = SchemaBuilder.struct()
    .field("name", SchemaBuilder.string().required().build())
    .build()

  val struct = new Struct(schema)

  implicit val conf: Configuration = new Configuration()
  implicit val fs: LocalFileSystem = FileSystem.getLocal(conf)
  val tpo = TopicPartitionOffset(Topic("mytopic"), 1, Offset(100))

  private def shouldFlush(policy: CommitPolicy, path: Path, count: Long) = {
    val status = fs.getFileStatus(path)
    policy.shouldFlush(CommitContext(tpo, path, count, status.getLen, status.getModificationTime))
  }

  "DefaultCommitPolicy" should {
    "roll over after interval" in {

      val policy = DefaultCommitPolicy(None, Option(2.seconds), None)
      val path = new Path("foo")
      fs.create(path)

      shouldFlush(policy, path, 10) shouldBe false
      Thread.sleep(2000)
      shouldFlush(policy, path, 10) shouldBe true

      fs.delete(path, false)
    }
    "roll over after file count" in {
      val policy = DefaultCommitPolicy(None, None, Some(9))
      val path = new Path("foo")
      fs.create(path)

      shouldFlush(policy, path, 7) shouldBe false
      shouldFlush(policy, path, 8) shouldBe false
      shouldFlush(policy, path, 9) shouldBe true
      shouldFlush(policy, path, 10) shouldBe true

      fs.delete(path, false)
    }
    "roll over after file size" in {
      val policy = DefaultCommitPolicy(Some(10), None, None)
      val path = new Path("foo")
      val out = fs.create(path)
      shouldFlush(policy, path, 7) shouldBe false
      out.writeBytes("wibble wobble wabble wubble")
      out.close()
      shouldFlush(policy, path, 9) shouldBe true
      fs.delete(path, false)
    }
  }
} 
Example 43
Source File: HiveSourceConfigTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.landoop.streamreactor.connect.hive.source

import com.landoop.streamreactor.connect.hive.source.config.{HiveSourceConfig, ProjectionField}
import com.landoop.streamreactor.connect.hive.{TableName, Topic}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class HiveSourceConfigTest extends AnyWordSpec with Matchers {

  "HiveSource" should {
    "populate required table properties from KCQL" in {
      val config = HiveSourceConfig.fromProps(Map(
        "connect.hive.database.name" -> "mydatabase",
        "connect.hive.metastore" -> "thrift",
        "connect.hive.metastore.uris" -> "thrift://localhost:9083",
        "connect.hive.fs.defaultFS" -> "hdfs://localhost:8020",
        "connect.hive.kcql" -> "insert into mytopic select a,b from mytable"
      ))
      val tableConfig = config.tableOptions.head
      tableConfig.topic shouldBe Topic("mytopic")
      tableConfig.tableName shouldBe TableName("mytable")
      tableConfig.projection.get.toList shouldBe Seq(ProjectionField("a", "a"), ProjectionField("b", "b"))
    }
    "populate aliases from KCQL" in {
      val config = HiveSourceConfig.fromProps(Map(
        "connect.hive.database.name" -> "mydatabase",
        "connect.hive.metastore" -> "thrift",
        "connect.hive.metastore.uris" -> "thrift://localhost:9083",
        "connect.hive.fs.defaultFS" -> "hdfs://localhost:8020",
        "connect.hive.kcql" -> "insert into mytopic select a as x,b from mytable"
      ))
      val tableConfig = config.tableOptions.head
      tableConfig.projection.get.toList shouldBe Seq(ProjectionField("a", "x"), ProjectionField("b", "b"))
    }
    "set projection to None for *" in {
      val config = HiveSourceConfig.fromProps(Map(
        "connect.hive.database.name" -> "mydatabase",
        "connect.hive.metastore" -> "thrift",
        "connect.hive.metastore.uris" -> "thrift://localhost:9083",
        "connect.hive.fs.defaultFS" -> "hdfs://localhost:8020",
        "connect.hive.kcql" -> "insert into mytopic select * from mytable"
      ))
      val tableConfig = config.tableOptions.head
      tableConfig.projection shouldBe None
    }
    "set table limit" in {
      val config = HiveSourceConfig.fromProps(Map(
        "connect.hive.database.name" -> "mydatabase",
        "connect.hive.metastore" -> "thrift",
        "connect.hive.metastore.uris" -> "thrift://localhost:9083",
        "connect.hive.fs.defaultFS" -> "hdfs://localhost:8020",
        "connect.hive.kcql" -> "insert into mytopic select a from mytable limit 200"
      ))
      val tableConfig = config.tableOptions.head
      tableConfig.limit shouldBe 200
    }
  }
} 
Example 44
Source File: ParquetWriterTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.landoop.streamreactor.connect.hive.parquet

import com.landoop.streamreactor.connect.hive.StructUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.kafka.connect.data.{SchemaBuilder, Struct}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ParquetWriterTest extends AnyWordSpec with Matchers {

  implicit val conf = new Configuration()
  implicit val fs = FileSystem.getLocal(conf)

  "ParquetWriter" should {
    "write parquet files" in {

      val schema = SchemaBuilder.struct()
        .field("name", SchemaBuilder.string().required().build())
        .field("title", SchemaBuilder.string().optional().build())
        .field("salary", SchemaBuilder.float64().optional().build())
        .build()

      val users = List(
        new Struct(schema).put("name", "sam").put("title", "mr").put("salary", 100.43),
        new Struct(schema).put("name", "laura").put("title", "ms").put("salary", 429.06)
      )

      val path = new Path("sinktest.parquet")

      val writer = parquetWriter(path, schema, ParquetSinkConfig(overwrite = true))
      users.foreach(writer.write)
      writer.close()

      val reader = parquetReader(path)
      val actual = Iterator.continually(reader.read).takeWhile(_ != null).toList
      reader.close()

      actual.map(StructUtils.extractValues) shouldBe users.map(StructUtils.extractValues)

      fs.delete(path, false)
    }
    "support writing nulls" in {

      val schema = SchemaBuilder.struct()
        .field("name", SchemaBuilder.string().required().build())
        .field("title", SchemaBuilder.string().optional().build())
        .field("salary", SchemaBuilder.float64().optional().build())
        .build()

      val users = List(
        new Struct(schema).put("name", "sam").put("title", null).put("salary", 100.43),
        new Struct(schema).put("name", "laura").put("title", "ms").put("salary", 429.06)
      )

      val path = new Path("sinktest.parquet")

      val writer = parquetWriter(path, schema, ParquetSinkConfig(overwrite = true))
      users.foreach(writer.write)
      writer.close()

      val reader = parquetReader(path)
      val actual = Iterator.continually(reader.read).takeWhile(_ != null).toList
      reader.close()

      actual.map(StructUtils.extractValues) shouldBe users.map(StructUtils.extractValues)

      fs.delete(path, false)
    }
  }
} 
Example 45
Source File: HiveSchemaTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.landoop.streamreactor.hive.it

import java.util.concurrent.TimeUnit

import com.landoop.streamreactor.connect.hive.{DatabaseName, TableName}
import org.apache.kafka.connect.data.Schema
import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.should.Matchers
import org.scalatest.time.{Millis, Span}
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._
import scala.io.Source
import scala.util.Random

class HiveSchemaTest extends AnyWordSpec with Matchers with PersonTestData with Eventually with HiveTests {

  private implicit val patience: PatienceConfig = PatienceConfig(Span(60000, Millis), Span(5000, Millis))

  case class Foo(s: String, l: Long, b: Boolean, d: Double)
  def foo = Foo("string", Random.nextLong, Random.nextBoolean, Random.nextDouble)

  "Hive" should {
    "create correct schema for table" in {

      val topic = createTopic()
      val taskDef = Source.fromInputStream(getClass.getResourceAsStream("/hive_sink_task_no_partitions.json")).getLines().mkString("\n")
        .replace("{{TOPIC}}", topic)
        .replace("{{TABLE}}", topic)
        .replace("{{NAME}}", topic)
      postTask(taskDef)

      val producer = stringStringProducer()
      writeRecords(producer, topic, JacksonSupport.mapper.writeValueAsString(foo), 2000)
      producer.close(30, TimeUnit.SECONDS)

      // wait for some data to have been flushed
      eventually {
        withConn { conn =>
          val stmt = conn.createStatement
          val rs = stmt.executeQuery(s"select count(*) FROM $topic")
          rs.next()
          rs.getLong(1) should be > 0L
        }
      }

      // check that the schema is correct
      val schema = com.landoop.streamreactor.connect.hive.schema(DatabaseName("default"), TableName(topic))
      schema.fields().asScala.map(_.name).toSet shouldBe Set("s", "b", "l", "d")
      schema.field("s").schema().`type`() shouldBe Schema.Type.STRING
      schema.field("l").schema().`type`() shouldBe Schema.Type.INT64
      schema.field("d").schema().`type`() shouldBe Schema.Type.FLOAT64
      schema.field("b").schema().`type`() shouldBe Schema.Type.BOOLEAN

      stopTask(topic)
    }
  }
} 
Example 46
Source File: HiveParquetWithPartitionTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.landoop.streamreactor.hive.it

import java.util.concurrent.TimeUnit

import org.apache.hadoop.fs.Path
import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.should.Matchers
import org.scalatest.time.{Millis, Span}
import org.scalatest.wordspec.AnyWordSpec

import scala.io.Source

class HiveParquetWithPartitionTest extends AnyWordSpec with Matchers with PersonTestData with Eventually with HiveTests {

  private implicit val patience: PatienceConfig = PatienceConfig(Span(60000, Millis), Span(5000, Millis))

  "Hive" should {
    "write partitioned records" in {

      val count = 100000L

      val topic = createTopic()
      val taskDef = Source.fromInputStream(getClass.getResourceAsStream("/hive_sink_task_with_partitions.json")).getLines().mkString("\n")
        .replace("{{TOPIC}}", topic)
        .replace("{{TABLE}}", topic)
        .replace("{{NAME}}", topic)
      postTask(taskDef)

      val producer = stringStringProducer()
      writeRecords(producer, topic, JacksonSupport.mapper.writeValueAsString(person), count)
      producer.close(30, TimeUnit.SECONDS)

      // wait for some data to have been flushed
      eventually {
        withConn { conn =>
          val stmt = conn.createStatement
          val rs = stmt.executeQuery(s"select count(*) FROM $topic")
          if (rs.next()) {
            val count = rs.getLong(1)
            println(s"Current count for $topic is $count")
            count should be > 100L
          } else {
            fail()
          }
        }
      }

      // we should see every partition created
      eventually {
        withConn { conn =>
          val stmt = conn.createStatement
          val rs = stmt.executeQuery(s"select distinct state from $topic")
          var count = 0
          while (rs.next()) {
            count = count + 1
          }
          println(s"State count is $count")
          count shouldBe states.length
        }
      }

      // check for the presence of each partition directory
      val table = metastore.getTable("default", topic)
      for (state <- states) {
        fs.exists(new Path(table.getSd.getLocation, s"state=$state")) shouldBe true
      }

      stopTask(topic)
    }
  }
} 
Example 47
Source File: HiveSourceTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.landoop.streamreactor.hive.it

import java.util.Collections
import java.util.concurrent.TimeUnit

import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.should.Matchers
import org.scalatest.time.{Millis, Span}
import org.scalatest.wordspec.AnyWordSpec

import scala.io.Source

class HiveSourceTest extends AnyWordSpec with Matchers with PersonTestData with Eventually with HiveTests {

  private implicit val patience: PatienceConfig = PatienceConfig(Span(60000, Millis), Span(5000, Millis))

  "Hive" should {
    "read non partitioned table" in {
      val count = 2000L

      val inputTopic = createTopic()
      val sinkTaskDef = Source.fromInputStream(getClass.getResourceAsStream("/hive_sink_task_no_partitions.json")).getLines().mkString("\n")
        .replace("{{TOPIC}}", inputTopic)
        .replace("{{TABLE}}", inputTopic)
        .replace("{{NAME}}", inputTopic)
      postTask(sinkTaskDef)

      val producer = stringStringProducer()
      writeRecords(producer, inputTopic, JacksonSupport.mapper.writeValueAsString(person), count)
      producer.close(30, TimeUnit.SECONDS)

      // we now should have 1000 records in hive which we can test via jdbc
      eventually {
        withConn { conn =>
          val stmt = conn.createStatement
          val rs = stmt.executeQuery(s"select count(*) from $inputTopic")
          rs.next()
          rs.getLong(1) shouldBe count
        }
      }

      stopTask(inputTopic)

      // now we can read them back in
      val outputTopic = createTopic()

      val sourceTaskDef = Source.fromInputStream(getClass.getResourceAsStream("/hive_source_task.json")).getLines().mkString("\n")
        .replace("{{TOPIC}}", outputTopic)
        .replace("{{TABLE}}", inputTopic)
        .replace("{{NAME}}", outputTopic)
      postTask(sourceTaskDef)

      // we should have 1000 records on the outputTopic
      var records = 0L
      val consumer = stringStringConsumer("earliest")
      consumer.subscribe(Collections.singleton(outputTopic))
      eventually {
        records = records + readRecords(consumer, outputTopic, 2, TimeUnit.SECONDS).size
        records shouldBe count
      }

      stopTask(outputTopic)
    }
  }
} 
Example 48
Source File: HiveParquetTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.landoop.streamreactor.hive.it

import java.util.concurrent.TimeUnit

import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.should.Matchers
import org.scalatest.time.{Millis, Span}
import org.scalatest.wordspec.AnyWordSpec

import scala.io.Source

class HiveParquetTest extends AnyWordSpec with Matchers with PersonTestData with Eventually with HiveTests {

  private implicit val patience: PatienceConfig = PatienceConfig(Span(30000, Millis), Span(2000, Millis))

  "Hive" should {
    "write records" in {

      val count = 10000L

      val topic = createTopic()
      val taskDef = Source.fromInputStream(getClass.getResourceAsStream("/hive_sink_task_no_partitions.json")).getLines().mkString("\n")
        .replace("{{TOPIC}}", topic)
        .replace("{{TABLE}}", topic)
        .replace("{{NAME}}", topic)
      postTask(taskDef)

      val producer = stringStringProducer()
      writeRecords(producer, topic, JacksonSupport.mapper.writeValueAsString(person), count)
      producer.close(30, TimeUnit.SECONDS)

      // we now should have 1000 records in hive which we can test via jdbc
      eventually {
        withConn { conn =>
          val stmt = conn.createStatement
          val rs = stmt.executeQuery(s"select count(*) from $topic")
          rs.next()
          rs.getLong(1) shouldBe count
        }
      }

      stopTask(topic)
    }
  }
} 
Example 49
Source File: SubscriptionInfoExtractFnTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.bloomberg

import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class SubscriptionInfoExtractFnTest extends AnyWordSpec with Matchers {
  "SubscriptionInfoExtractFn" should {
    "handle empty settings" in {
      intercept[IllegalArgumentException] {
        SubscriptionInfoExtractFn("") shouldBe Seq.empty
      }
    }

    "handle one ticker subscription" in {
      SubscriptionInfoExtractFn("ticker1?fields= field1, field2, field3") shouldBe Seq(
        SubscriptionInfo("ticker1", List("FIELD1", "FIELD2", "FIELD3"), "ticker1?fields= field1, field2, field3")
      )
    }
    "handle multiple tickers subscription" in {
      SubscriptionInfoExtractFn("ticker1?fields= field1, field2, field3; ticker2?fields=field1;ticker3?fields=fieldA") shouldBe List(
        SubscriptionInfo("ticker1", List("FIELD1", "FIELD2", "FIELD3"), "ticker1?fields= field1, field2, field3"),
        SubscriptionInfo("ticker2", List("FIELD1"), "ticker2?fields=field1"),
        SubscriptionInfo("ticker3", List("FIELDA"), "ticker3?fields=fieldA")
      )
    }
    "handle missing ? between ticker and fields" in {
      intercept[IllegalArgumentException] {
        SubscriptionInfoExtractFn("ticker field1, field2, field3")
      }
    }

    "handle missing fields for a ticker subscription" in {
      intercept[IllegalArgumentException] {
        SubscriptionInfoExtractFn("ticker1?fields=f1,f2;ticker2?fields=")
      }
    }

    "handle missing fields= for a ticker subscription" in {
      intercept[IllegalArgumentException] {
        SubscriptionInfoExtractFn("ticker1?fields=f1,f2;ticker2?f3")
      }
    }
  }
} 
Example 50
Source File: CorrelationIdsExtractorFnTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.bloomberg

import com.bloomberglp.blpapi.{CorrelationID, Subscription, SubscriptionList}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class CorrelationIdsExtractorFnTest extends AnyWordSpec with Matchers {
  "CorrelationIdsExtractorFn" should {
    "handle null parameter" in {
      CorrelationIdsExtractorFn(null) shouldBe ""
    }
    "list all the correlation ids" in {
      val subscriptions = new SubscriptionList()
      subscriptions.add(new Subscription("someticker1", new CorrelationID(11)))
      subscriptions.add(new Subscription("someticker2", new CorrelationID(31)))
      CorrelationIdsExtractorFn(subscriptions) shouldBe "11,31"
    }
  }
} 
Example 51
Source File: BloombergSubscriptionManagerTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.bloomberg

import com.bloomberglp.blpapi.Event.EventType
import com.bloomberglp.blpapi._
import org.mockito.MockitoSugar
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._

class BloombergSubscriptionManagerTest extends AnyWordSpec with Matchers with MockitoSugar {
  "BloombergSubscriptionManager" should {
    "return null if there are no items in the manager buffer" in {
      val manager = new BloombergSubscriptionManager(Map(1L -> "ticker1"))
      manager.getData shouldBe None
    }

    "ignore non SUBSCRIPTION_DATA events" in {
      val manager = new BloombergSubscriptionManager(Map(1L -> "ticker1"))
      val events = Seq(EventType.ADMIN,
        EventType.AUTHORIZATION_STATUS,
        EventType.PARTIAL_RESPONSE,
        EventType.REQUEST,
        EventType.REQUEST_STATUS,
        EventType.RESOLUTION_STATUS,
        EventType.RESPONSE,
        EventType.SERVICE_STATUS,
        EventType.SESSION_STATUS,
        EventType.SUBSCRIPTION_STATUS,
        EventType.TIMEOUT,
        EventType.TOPIC_STATUS,
        EventType.TOKEN_STATUS)

      events.map { et =>
        val ev = mock[Event]
        when(ev.eventType()).thenReturn(et)
        when(ev.iterator()).thenReturn(Seq.empty[Message].iterator.asJava)
        ev
      }.foreach(manager.processEvent(_, null))
      manager.getData shouldBe None
    }

    "return all items in the buffer" in {
      val manager = new BloombergSubscriptionManager(Map(1L -> "ticker1"))

      val correlationId = new CorrelationID(1)

      val msg1 = mock[Message]
      val elem1 = MockElementFn(Seq(MockElementFn(3.15D, "FIELD1")))

      when(msg1.correlationID()).thenReturn(correlationId)
      when(msg1.asElement()).thenReturn(elem1)


      val msg2 = mock[Message]
      val elem2 = MockElementFn(Seq(MockElementFn(value = true, "FIELD2")))

      when(msg2.numElements()).thenReturn(1)
      when(msg2.correlationID()).thenReturn(correlationId)
      when(msg2.asElement()).thenReturn(elem2)

      val ev = mock[Event]
      when(ev.eventType()).thenReturn(Event.EventType.SUBSCRIPTION_DATA)
      when(ev.iterator()).thenReturn(Seq(msg1, msg2).iterator.asJava)

      manager.processEvent(ev, null)

      val data = manager.getData.get
      data.size() shouldBe 2

      data.get(0).data.size() shouldBe 2 //contains the ticker as well
      data.get(0).data.containsKey(BloombergData.SubscriptionFieldKey)
      data.get(0).data.containsKey("FIELD1") shouldBe true
      data.get(0).data.get("FIELD1") shouldBe 3.15D

      data.get(1).data.size() shouldBe 2
      data.get(1).data.containsKey(BloombergData.SubscriptionFieldKey)
      data.get(1).data.containsKey("FIELD2") shouldBe true
      data.get(1).data.get("FIELD2") shouldBe true

      manager.getData shouldBe None
    }
  }
} 
Example 52
Source File: CreateIndexTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.elastic6

import com.datamountaineer.kcql.Kcql
import com.datamountaineer.streamreactor.connect.elastic6.indexname.CreateIndex
import org.joda.time.{DateTime, DateTimeZone}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class CreateIndexTest extends AnyWordSpec with Matchers {
  "CreateIndex" should {
    "create an index name without suffix when suffix not set" in {
      val kcql = Kcql.parse("INSERT INTO index_name SELECT * FROM topicA")
      CreateIndex.getIndexName(kcql) shouldBe "index_name"
    }

    "create an index name with suffix when suffix is set" in {
      val kcql = Kcql.parse("INSERT INTO index_name SELECT * FROM topicA WITHINDEXSUFFIX=_suffix_{YYYY-MM-dd}")

      val formattedDateTime = new DateTime(DateTimeZone.UTC).toString("YYYY-MM-dd")
      CreateIndex.getIndexName(kcql) shouldBe s"index_name_suffix_$formattedDateTime"
    }
  }
} 
Example 53
Source File: ConsumerConfigFactoryTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.pulsar

import com.datamountaineer.streamreactor.connect.pulsar.config._
import org.apache.pulsar.client.api.SubscriptionType
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._


class ConsumerConfigFactoryTest extends AnyWordSpec with Matchers {
  val pulsarTopic = "persistent://landoop/standalone/connect/kafka-topic"

  "should create a config with batch settings" in {

    val config = PulsarSourceConfig(Map(
      PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
      PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kafka_topic SELECT * FROM $pulsarTopic BATCH = 10",
      PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
      PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500"
    ).asJava)


    val settings = PulsarSourceSettings(config, 1)
    val consumerConfig = ConsumerConfigFactory("test", settings.kcql)
    consumerConfig(pulsarTopic).getReceiverQueueSize shouldBe 10
    consumerConfig(pulsarTopic).getConsumerName shouldBe "test"
  }

  "should create a config with Failover mode" in {

    val config = PulsarSourceConfig(Map(
      PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
      PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kafka_topic SELECT * FROM $pulsarTopic BATCH = 10 WITHSUBSCRIPTION = failOver",
      PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
      PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500"
    ).asJava)


    val settings = PulsarSourceSettings(config, 2)
    val consumerConfig = ConsumerConfigFactory("test", settings.kcql)
    consumerConfig(pulsarTopic).getReceiverQueueSize shouldBe 10
    consumerConfig(pulsarTopic).getConsumerName shouldBe "test"
    consumerConfig(pulsarTopic).getSubscriptionType shouldBe SubscriptionType.Failover
  }

  "should create a config with exclusive mode" in {
    val config = PulsarSourceConfig(Map(
      PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
      PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kafka_topic SELECT * FROM $pulsarTopic BATCH = 10 WITHSUBSCRIPTION = Exclusive",
      PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
      PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500"
    ).asJava)


    val settings = PulsarSourceSettings(config, 1)
    val consumerConfig = ConsumerConfigFactory("test", settings.kcql)
    consumerConfig(pulsarTopic).getReceiverQueueSize shouldBe 10
    consumerConfig(pulsarTopic).getConsumerName shouldBe "test"
    consumerConfig(pulsarTopic).getSubscriptionType shouldBe SubscriptionType.Exclusive

  }

  "should create a config with shared mode" in {
    val config = PulsarSourceConfig(Map(
      PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
      PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kafka_topic SELECT * FROM $pulsarTopic BATCH = 10 WITHSUBSCRIPTION = shared",
      PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
      PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500"
    ).asJava)


    val settings = PulsarSourceSettings(config, 2)
    val consumerConfig = ConsumerConfigFactory("test", settings.kcql)
    consumerConfig(pulsarTopic).getReceiverQueueSize shouldBe 10
    consumerConfig(pulsarTopic).getConsumerName shouldBe "test"
    consumerConfig(pulsarTopic).getSubscriptionType shouldBe SubscriptionType.Shared
  }

} 
Example 54
Source File: ProducerConfigFactoryTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.pulsar

import com.datamountaineer.streamreactor.connect.pulsar.config.{PulsarConfigConstants, PulsarSinkConfig, PulsarSinkSettings}
import org.apache.pulsar.client.api.CompressionType
import org.apache.pulsar.client.api.ProducerConfiguration.MessageRoutingMode
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._


class ProducerConfigFactoryTest extends AnyWordSpec with Matchers {

  val pulsarTopic = "persistent://landoop/standalone/connect/kafka-topic"

  "should create a SinglePartition with batching" in {
    val config = PulsarSinkConfig(Map(
      PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
      PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $pulsarTopic SELECT * FROM kafka_topic BATCH = 10 WITHPARTITIONER = SinglePartition WITHCOMPRESSION = ZLIB WITHDELAY = 1000"
    ).asJava)


    val settings = PulsarSinkSettings(config)
    val producerConfig = ProducerConfigFactory("test", settings.kcql)
    producerConfig(pulsarTopic).getBatchingEnabled shouldBe true
    producerConfig(pulsarTopic).getBatchingMaxMessages shouldBe 10
    producerConfig(pulsarTopic).getBatchingMaxPublishDelayMs shouldBe 1000

    producerConfig(pulsarTopic).getCompressionType shouldBe CompressionType.ZLIB
    producerConfig(pulsarTopic).getMessageRoutingMode shouldBe MessageRoutingMode.SinglePartition
  }

  "should create a CustomPartition with no batching and no compression" in {
    val config = PulsarSinkConfig(Map(
      PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
      PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $pulsarTopic SELECT * FROM kafka_topic WITHPARTITIONER = CustomPartition"
    ).asJava)


    val settings = PulsarSinkSettings(config)
    val producerConfig = ProducerConfigFactory("test", settings.kcql)
    producerConfig(pulsarTopic).getBatchingEnabled shouldBe false
    producerConfig(pulsarTopic).getCompressionType shouldBe CompressionType.NONE
    producerConfig(pulsarTopic).getMessageRoutingMode shouldBe MessageRoutingMode.CustomPartition
  }

  "should create a roundrobin with batching and no compression no delay" in {
    val config = PulsarSinkConfig(Map(
      PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
      PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $pulsarTopic SELECT * FROM kafka_topic BATCH  = 10 WITHPARTITIONER = ROUNDROBINPARTITION"
    ).asJava)


    val settings = PulsarSinkSettings(config)
    val producerConfig = ProducerConfigFactory("test", settings.kcql)
    producerConfig(pulsarTopic).getBatchingEnabled shouldBe true
    producerConfig(pulsarTopic).getBatchingEnabled shouldBe true
    producerConfig(pulsarTopic).getBatchingMaxMessages shouldBe 10
    producerConfig(pulsarTopic).getBatchingMaxPublishDelayMs shouldBe 10

    producerConfig(pulsarTopic).getCompressionType shouldBe CompressionType.NONE
    producerConfig(pulsarTopic).getMessageRoutingMode shouldBe MessageRoutingMode.RoundRobinPartition
  }
} 
Example 55
Source File: PulsarWriterTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.pulsar.sink

import com.datamountaineer.streamreactor.connect.pulsar.ProducerConfigFactory
import com.datamountaineer.streamreactor.connect.pulsar.config.{PulsarConfigConstants, PulsarSinkConfig, PulsarSinkSettings}
import org.apache.kafka.connect.data.{Schema, SchemaBuilder, Struct}
import org.apache.kafka.connect.sink.SinkRecord
import org.apache.pulsar.client.api.{Message, MessageId, Producer, PulsarClient}
import org.mockito.ArgumentMatchers.any
import org.mockito.MockitoSugar
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._


class PulsarWriterTest extends AnyWordSpec with MockitoSugar with Matchers {
  val pulsarTopic = "persistent://landoop/standalone/connect/kafka-topic"

  def getSchema: Schema = {
    SchemaBuilder.struct
      .field("int8", SchemaBuilder.int8().defaultValue(2.toByte).doc("int8 field").build())
      .field("int16", Schema.INT16_SCHEMA)
      .field("int32", Schema.INT32_SCHEMA)
      .field("int64", Schema.INT64_SCHEMA)
      .field("float32", Schema.FLOAT32_SCHEMA)
      .field("float64", Schema.FLOAT64_SCHEMA)
      .field("boolean", Schema.BOOLEAN_SCHEMA)
      .field("string", Schema.STRING_SCHEMA)
      .build()
  }


  def getStruct(schema: Schema): Struct = {
    new Struct(schema)
      .put("int8", 12.toByte)
      .put("int16", 12.toShort)
      .put("int32", 12)
      .put("int64", 12L)
      .put("float32", 12.2f)
      .put("float64", 12.2)
      .put("boolean", true)
      .put("string", "foo")
  }


  "should write messages" in {

    val config = PulsarSinkConfig(Map(
      PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
      PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $pulsarTopic SELECT * FROM kafka_topic BATCH = 10 WITHPARTITIONER = SinglePartition WITHCOMPRESSION = ZLIB WITHDELAY = 1000"
    ).asJava)

    val schema = getSchema
    val struct = getStruct(schema)
    val record1 = new SinkRecord("kafka_topic", 0, null, null, schema, struct, 1)

    val settings = PulsarSinkSettings(config)
    val producerConfig = ProducerConfigFactory("test", settings.kcql)

    val client = mock[PulsarClient]
    val producer = mock[Producer]
    val messageId = mock[MessageId]

    when(client.createProducer(pulsarTopic, producerConfig(pulsarTopic))).thenReturn(producer)
    when(producer.send(any[Message])).thenReturn(messageId)

    val writer = PulsarWriter(client, "test", settings)
    writer.write(List(record1))
  }
} 
Example 56
Source File: PulsarSinkConnectorTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.pulsar.sink

import com.datamountaineer.streamreactor.connect.pulsar.config.PulsarConfigConstants
import org.apache.kafka.common.config.ConfigException
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._


class PulsarSinkConnectorTest extends AnyWordSpec with Matchers {

  val pulsarTopic = "persistent://landoop/standalone/connect/kafka-topic"

  "should start a Connector and split correctly" in {
    val props = Map(
      "topics" -> "kafka_topic",
      PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
      PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $pulsarTopic SELECT * FROM kafka_topic BATCH = 10 WITHPARTITIONER = SinglePartition WITHCOMPRESSION = ZLIB WITHDELAY = 1000"
    ).asJava

    val connector = new PulsarSinkConnector()
    connector.start(props)
    val taskConfigs = connector.taskConfigs(1)
    taskConfigs.size shouldBe 1
    connector.taskClass() shouldBe classOf[PulsarSinkTask]
    connector.stop()
  }

  "should throw as topic doesn't match kcql" in {
    val props = Map(
      "topics" -> "bad",
      PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
      PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $pulsarTopic SELECT * FROM kafka_topic BATCH = 10 WITHPARTITIONER =  SinglePartition WITHCOMPRESSION = ZLIB WITHDELAY = 1000"
    ).asJava

    val connector = new PulsarSinkConnector()

    intercept[ConfigException] {
      connector.start(props)
    }
  }
} 
Example 57
Source File: PulsarSourceConnectorTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.pulsar.source

import com.datamountaineer.streamreactor.connect.pulsar.config.PulsarConfigConstants
import org.apache.kafka.common.config.ConfigException
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._


class PulsarSourceConnectorTest extends AnyWordSpec with Matchers {

  val pulsarTopic = "persistent://landoop/standalone/connect/kafka-topic"
  val pulsarTopic1 = "persistent://landoop/standalone/connect/kafka-topic1"

  "should start a connector with shared consumer" in {
    val kcql = s"INSERT INTO kafka_topic SELECT * FROM $pulsarTopic BATCH = 10 WITHSUBSCRIPTION = SHARED"
    val kcql1 = s"INSERT INTO kafka_topic SELECT * FROM $pulsarTopic1 BATCH = 10 WITHSUBSCRIPTION = FAILOVER"

    val props =  Map(
      PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
      PulsarConfigConstants.KCQL_CONFIG -> s"$kcql;$kcql1",
      PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
      PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500"
    ).asJava

    val connector = new PulsarSourceConnector()
    connector.start(props)
    val configs = connector.taskConfigs(2)
    configs.size() shouldBe 2
    connector.taskClass() shouldBe classOf[PulsarSourceTask]
  }

  "should fail to start a connector with exclusive consumer and more than one task" in {
    val props =  Map(
      PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
      PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kafka_topic SELECT * FROM $pulsarTopic BATCH = 10 WITHSUBSCRIPTION = exclusive;INSERT INTO kafka_topic SELECT * FROM $pulsarTopic BATCH = 10 WITHSUBSCRIPTION = FAILOVER",
      PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
      PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500"
    ).asJava

    val connector = new PulsarSourceConnector()
    connector.start(props)
    intercept[ConfigException] {
      connector.taskConfigs(2)
    }
  }
} 
Example 58
Source File: PulsarMessageConverterTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.pulsar.source

import java.util

import com.datamountaineer.streamreactor.connect.pulsar.config.{PulsarConfigConstants, PulsarSourceConfig, PulsarSourceSettings}
import com.datamountaineer.streamreactor.connect.schemas.ConverterUtil
import org.apache.kafka.connect.source.SourceRecord
import org.apache.pulsar.client.api.MessageBuilder
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._


class PulsarMessageConverterTest extends AnyWordSpec with Matchers with ConverterUtil {

  val pulsarTopic = "persistent://landoop/standalone/connect/kafka-topic"
  val jsonMessage = "{\"int8\":12,\"int16\":12,\"int32\":12,\"int64\":12,\"float32\":12.2,\"float64\":12.2,\"boolean\":true,\"string\":\"foo\"}"

  "should convert messages" in {
    val props =  Map(
      PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
      PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kafka_topic SELECT * FROM $pulsarTopic BATCH = 10",
      PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
      PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500"
    ).asJava

    val config = PulsarSourceConfig(props)
    val settings = PulsarSourceSettings(config, 1)

    // test part of the task here aswell
    val task = new PulsarSourceTask()
    val convertersMap = task.buildConvertersMap(props, settings)

    val converter = PulsarMessageConverter(convertersMap, settings.kcql, false, 100, 100)

    val message = MessageBuilder
      .create
      .setContent(jsonMessage.getBytes)
      .setKey("landoop")
      .setSequenceId(1)
      .build()


    // pulsar message
    converter.convertMessages(message, pulsarTopic)

    val list = new util.ArrayList[SourceRecord]()
    converter.getRecords(list)
    list.size shouldBe 1
    val record = list.get(0)
    record.key().toString shouldBe "landoop"
    record.value().asInstanceOf[Array[Byte]].map(_.toChar).mkString shouldBe jsonMessage
  }
} 
Example 59
Source File: PulsarSinkSettingsTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.pulsar.config

import com.datamountaineer.kcql.CompressionType
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._


class PulsarSinkSettingsTest extends AnyWordSpec with Matchers {

  val topic = "persistent://landoop/standalone/connect/kafka-topic"

  "should produce a valid config" in {
    val config = PulsarSinkConfig(Map(
      PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
      PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $topic SELECT * FROM kafka_topic",
      PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
      PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500"
    ).asJava)


    val settings = PulsarSinkSettings(config)
    settings.kcql.head.getTarget shouldBe topic
  }

  "should have messagemode SinglePartititon" in {
    val config = PulsarSinkConfig(Map(
      PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
      PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $topic SELECT * FROM kafka_topic WITHPARTITIONER = singlepartition",
      PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
      PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500"
    ).asJava)


    val settings = PulsarSinkSettings(config)
    settings.kcql.head.getTarget shouldBe topic
    settings.kcql.head.getWithPartitioner shouldBe "singlepartition"
  }

  "should have messagemode RoundRobinPartition" in {
    val config = PulsarSinkConfig(Map(
      PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
      PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $topic SELECT * FROM kafka_topic WITHPARTITIONER = RoundRobinPartition",
      PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
      PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500"
    ).asJava)


    val settings = PulsarSinkSettings(config)
    settings.kcql.head.getTarget shouldBe topic
    settings.kcql.head.getWithPartitioner shouldBe "RoundRobinPartition"
  }

  "should have messagemode CustomPartition" in {
    val config = PulsarSinkConfig(Map(
      PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
      PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $topic SELECT * FROM kafka_topic WITHPARTITIONER = CustomPartition",
      PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
      PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500"
    ).asJava)


    val settings = PulsarSinkSettings(config)
    settings.kcql.head.getTarget shouldBe topic
    settings.kcql.head.getWithPartitioner shouldBe "CustomPartition"
  }

  "should have compression" in {
    val config = PulsarSinkConfig(Map(
      PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
      PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $topic SELECT * FROM kafka_topic WITHCOMPRESSION = LZ4",
      PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
      PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500"
    ).asJava)


    val settings = PulsarSinkSettings(config)
    settings.kcql.head.getTarget shouldBe topic
    settings.kcql.head.getWithCompression shouldBe CompressionType.LZ4
  }
} 
Example 60
Source File: RedisInsertSortedSetTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.redis.sink.writer

import com.datamountaineer.streamreactor.connect.redis.sink.config.{RedisConfig, RedisConfigConstants, RedisConnectionInfo, RedisSinkSettings}
import org.apache.kafka.connect.data.{Schema, SchemaBuilder, Struct}
import org.apache.kafka.connect.sink.SinkRecord
import org.mockito.MockitoSugar
import org.scalatest.BeforeAndAfterAll
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import redis.clients.jedis.Jedis
import redis.embedded.RedisServer

import scala.collection.JavaConverters._

class RedisInsertSortedSetTest extends AnyWordSpec with Matchers with BeforeAndAfterAll with MockitoSugar {

  val redisServer = new RedisServer(6379)

  override def beforeAll() = redisServer.start()

  override def afterAll() = redisServer.stop()

  "Redis INSERT into Sorted Set (SS) writer" should {

    "write Kafka records to a Redis Sorted Set" in {

      val TOPIC = "cpuTopic"
      val KCQL = s"INSERT INTO cpu_stats SELECT * from $TOPIC STOREAS SortedSet(score=ts)"
      println("Testing KCQL : " + KCQL)
      val props = Map(
        RedisConfigConstants.REDIS_HOST->"localhost",
        RedisConfigConstants.REDIS_PORT->"6379",
        RedisConfigConstants.KCQL_CONFIG->KCQL
      ).asJava

      val config = RedisConfig(props)
      val connectionInfo = new RedisConnectionInfo("localhost", 6379, None)
      val settings = RedisSinkSettings(config)
      val writer = new RedisInsertSortedSet(settings)
      writer.createClient(settings)

      val schema = SchemaBuilder.struct().name("com.example.Cpu")
        .field("type", Schema.STRING_SCHEMA)
        .field("temperature", Schema.FLOAT64_SCHEMA)
        .field("voltage", Schema.FLOAT64_SCHEMA)
        .field("ts", Schema.INT64_SCHEMA).build()

      val struct1 = new Struct(schema).put("type", "Xeon").put("temperature", 60.4).put("voltage", 90.1).put("ts", 1482180657010L)
      val struct2 = new Struct(schema).put("type", "i7").put("temperature", 62.1).put("voltage", 103.3).put("ts", 1482180657020L)
      val struct3 = new Struct(schema).put("type", "i7-i").put("temperature", 64.5).put("voltage", 101.1).put("ts", 1482180657030L)

      val sinkRecord1 = new SinkRecord(TOPIC, 0, null, null, schema, struct1, 1)
      val sinkRecord2 = new SinkRecord(TOPIC, 0, null, null, schema, struct2, 2)
      val sinkRecord3 = new SinkRecord(TOPIC, 0, null, null, schema, struct3, 3)

      val jedis = new Jedis(connectionInfo.host, connectionInfo.port)
      // Clean up in-memory jedis
      jedis.flushAll()

      writer.write(Seq(sinkRecord1))
      writer.write(Seq(sinkRecord2, sinkRecord3))

      // Redis cardinality should now be 3
      jedis.zcard("cpu_stats") shouldBe 3

      val allSSrecords = jedis.zrange("cpu_stats", 0, 999999999999L)
      val results = allSSrecords.asScala.toList
      results.head shouldBe """{"type":"Xeon","temperature":60.4,"voltage":90.1,"ts":1482180657010}"""
      results(1) shouldBe """{"type":"i7","temperature":62.1,"voltage":103.3,"ts":1482180657020}"""
      results(2) shouldBe """{"type":"i7-i","temperature":64.5,"voltage":101.1,"ts":1482180657030}"""

    }

  }

} 
Example 61
Source File: RedisPubSubTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.redis.sink.writer

import com.datamountaineer.streamreactor.connect.redis.sink.config.{RedisConfig, RedisConfigConstants, RedisConnectionInfo, RedisSinkSettings}
import org.apache.kafka.connect.data.{Schema, SchemaBuilder, Struct}
import org.apache.kafka.connect.sink.SinkRecord
import org.mockito.MockitoSugar
import org.scalatest.BeforeAndAfterAll
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import redis.clients.jedis.{Jedis, JedisPubSub}
import redis.embedded.RedisServer

import scala.collection.JavaConverters._
import scala.collection.mutable.ListBuffer

class RedisPubSubTest extends AnyWordSpec with Matchers with BeforeAndAfterAll with MockitoSugar {

  val redisServer = new RedisServer(6379)

  override def beforeAll() = redisServer.start()

  override def afterAll() = redisServer.stop()

  "Redis PUBSUB writer" should {

    "write Kafka records to a Redis PubSub" in {

      val TOPIC = "cpuTopic"
      val KCQL = s"SELECT * from $TOPIC STOREAS PubSub (channel=type)"
      println("Testing KCQL : " + KCQL)
      val props = Map(
        RedisConfigConstants.REDIS_HOST->"localhost",
        RedisConfigConstants.REDIS_PORT->"6379",
        RedisConfigConstants.KCQL_CONFIG->KCQL
      ).asJava

      val config = RedisConfig(props)
      val connectionInfo = new RedisConnectionInfo("localhost", 6379, None)
      val settings = RedisSinkSettings(config)
      val writer = new RedisPubSub(settings)
      writer.createClient(settings)

      val schema = SchemaBuilder.struct().name("com.example.Cpu")
        .field("type", Schema.STRING_SCHEMA)
        .field("temperature", Schema.FLOAT64_SCHEMA)
        .field("voltage", Schema.FLOAT64_SCHEMA)
        .field("ts", Schema.INT64_SCHEMA).build()

      val struct1 = new Struct(schema).put("type", "Xeon").put("temperature", 60.4).put("voltage", 90.1).put("ts", 1482180657010L)
      val struct2 = new Struct(schema).put("type", "i7").put("temperature", 62.1).put("voltage", 103.3).put("ts", 1482180657020L)
      val struct3 = new Struct(schema).put("type", "i7-i").put("temperature", 64.5).put("voltage", 101.1).put("ts", 1482180657030L)

      val sinkRecord1 = new SinkRecord(TOPIC, 0, null, null, schema, struct1, 1)
      val sinkRecord2 = new SinkRecord(TOPIC, 0, null, null, schema, struct2, 2)
      val sinkRecord3 = new SinkRecord(TOPIC, 0, null, null, schema, struct3, 3)

      val jedis = new Jedis(connectionInfo.host, connectionInfo.port)
      // Clean up in-memory jedis
      jedis.flushAll()

      val messagesMap = collection.mutable.Map[String, ListBuffer[String]]()

      val t = new Thread {
        private val pubsub = new JedisPubSub {
          override def onMessage(channel: String, message: String): Unit = {
            messagesMap.get(channel) match {
              case Some(msgs) => messagesMap.put(channel, msgs += message)
              case None => messagesMap.put(channel, ListBuffer(message))
            }
          }
        }

        override def run(): Unit = {
          jedis.subscribe(pubsub, "Xeon", "i7", "i7-i")
        }

        override def interrupt(): Unit = {
          pubsub.punsubscribe("*")
          super.interrupt()
        }
      }
      t.start()
      t.join(5000)
      if (t.isAlive) t.interrupt()

      writer.write(Seq(sinkRecord1))
      writer.write(Seq(sinkRecord2, sinkRecord3))

      messagesMap.size shouldBe 3

      messagesMap("Xeon").head shouldBe """{"type":"Xeon","temperature":60.4,"voltage":90.1,"ts":1482180657010}"""
      messagesMap("i7").head shouldBe """{"type":"i7","temperature":62.1,"voltage":103.3,"ts":1482180657020}"""
      messagesMap("i7-i").head shouldBe """{"type":"i7-i","temperature":64.5,"voltage":101.1,"ts":1482180657030}"""
    }
  }
} 
Example 62
Source File: RedisStreamTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.redis.sink.writer

/*
 * Copyright 2017 Datamountaineer.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

import java.util

import com.datamountaineer.streamreactor.connect.redis.sink.RedisSinkTask
import com.datamountaineer.streamreactor.connect.redis.sink.config.{RedisConfig, RedisConfigConstants, RedisConnectionInfo, RedisSinkSettings}
import org.apache.kafka.connect.data.{Schema, SchemaBuilder, Struct}
import org.apache.kafka.connect.sink.SinkRecord
import org.mockito.MockitoSugar
import org.scalatest.BeforeAndAfterAll
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import redis.clients.jedis.{Jedis, StreamEntryID}

import scala.collection.JavaConverters._

class RedisStreamTest extends AnyWordSpec with Matchers with BeforeAndAfterAll with MockitoSugar {
//
//  val redisServer = new RedisServer(6379)
//
//  override def beforeAll() = redisServer.start()
//
//  override def afterAll() = redisServer.stop()

  "Redis Stream writer" should {

    "write Kafka records to a Redis Stream" in {

      val TOPIC = "cpuTopic"
      val KCQL = s"INSERT INTO stream1 SELECT * from $TOPIC STOREAS STREAM"
      println("Testing KCQL : " + KCQL)
      val props = Map(
        RedisConfigConstants.REDIS_HOST->"localhost",
        RedisConfigConstants.REDIS_PORT->"6379",
        RedisConfigConstants.KCQL_CONFIG->KCQL,
        RedisConfigConstants.REDIS_PASSWORD -> ""
      ).asJava

      val config = RedisConfig(props)
      val connectionInfo = new RedisConnectionInfo("localhost", 6379, None)
      val settings = RedisSinkSettings(config)
      val writer = new RedisStreams(settings)

      val schema = SchemaBuilder.struct().name("com.example.Cpu")
        .field("type", Schema.STRING_SCHEMA)
        .field("temperature", Schema.FLOAT64_SCHEMA)
        .field("voltage", Schema.FLOAT64_SCHEMA)
        .field("ts", Schema.INT64_SCHEMA).build()

      val struct1 = new Struct(schema).put("type", "Xeon").put("temperature", 60.4).put("voltage", 90.1).put("ts", 1482180657010L)

      val sinkRecord1 = new SinkRecord(TOPIC, 0, null, null, schema, struct1, 1)

      val jedis = mock[Jedis]
      writer.jedis = jedis

      val map = new util.HashMap[String, String]()
      map.put("type", "Xeon")
      map.put("temperature", "60.4")
      map.put("voltage", "90.1")
      map.put("ts", 1482180657010L.toString)

      when(jedis.auth("")).isLenient()
      when(jedis.xadd("stream1", null, map)).thenReturn(mock[StreamEntryID])
      writer.initialize(1, settings.errorPolicy)
      writer.write(Seq(sinkRecord1))
    }
  }
} 
Example 63
Source File: ConfigInsertSortedSetTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.redis.sink.config

import com.datamountaineer.streamreactor.connect.redis.sink.support.RedisMockSupport
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._


class ConfigInsertSortedSetTest extends AnyWordSpec with Matchers with RedisMockSupport {

  // Insert into a Single Sorted Set
  val KCQL1 = "INSERT INTO cpu_stats SELECT * from cpuTopic STOREAS SortedSet"
  KCQL1 in {
    val config = getRedisSinkConfig(password = true, KCQL = Option(KCQL1))
    val settings = RedisSinkSettings(config)
    val route = settings.kcqlSettings.head.kcqlConfig

    route.getStoredAs shouldBe "SortedSet"
    route.getFields.asScala.exists(_.getName.equals("*")) shouldBe true
    // Store all data on a Redis Sorted Set called <cpu_stats>
    route.getTarget shouldBe "cpu_stats"
    route.getSource shouldBe "cpuTopic"
  }

  // Define which field to use to `score` the entry in the Set
  val KCQL2 = "INSERT INTO cpu_stats_SS SELECT temperature from cpuTopic STOREAS SortedSet (score=ts)"
  KCQL2 in {
    val config = getRedisSinkConfig(password = true, KCQL = Option(KCQL2))
    val settings = RedisSinkSettings(config)
    val route = settings.kcqlSettings.head.kcqlConfig
    val fields = route.getFields.asScala.toList

    route.getFields.asScala.exists(_.getName.equals("*")) shouldBe false
    fields.length shouldBe 1
    route.getTarget shouldBe "cpu_stats_SS"
    route.getSource shouldBe "cpuTopic"

    route.getStoredAs shouldBe "SortedSet"
    route.getStoredAsParameters.asScala shouldBe Map("score" -> "ts")
  }

  // Define the Date | DateTime format to use to parse the `score` field (store millis in redis)
  val KCQL3 = "INSERT INTO cpu_stats_SS SELECT * from cpuTopic STOREAS SortedSet (score=ts,to=YYYYMMDDHHSS)"
  KCQL3 in {
    //(param1 = value1 , param2 = value2,param3=value3)
    val config = getRedisSinkConfig(password = true, KCQL = Option(KCQL3))
    val settings = RedisSinkSettings(config)
    val route = settings.kcqlSettings.head.kcqlConfig

    route.getFields.asScala.exists(_.getName.equals("*")) shouldBe true
    route.getTarget shouldBe "cpu_stats_SS"
    route.getSource shouldBe "cpuTopic"

    route.getStoredAs shouldBe "SortedSet"
    route.getStoredAsParameters.asScala shouldBe Map("score" -> "ts", "to" -> "YYYYMMDDHHSS")
  }

} 
Example 64
Source File: ConfigGeoAddTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.redis.sink.config

import com.datamountaineer.streamreactor.connect.redis.sink.support.RedisMockSupport
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._



class ConfigGeoAddTest extends AnyWordSpec with Matchers with RedisMockSupport {

  // GEOADD with PK
  val KCQL1 = "SELECT * from addressTopic PK addressId STOREAS GeoAdd"
  KCQL1 in {
    val config = getRedisSinkConfig(password = true, KCQL = Option(KCQL1))
    val settings = RedisSinkSettings(config)
    val route = settings.kcqlSettings.head.kcqlConfig

    route.getStoredAs shouldBe "GeoAdd"
    route.getFields.asScala.exists(_.getName.equals("*")) shouldBe true
    route.getPrimaryKeys.asScala.head.getName shouldBe "addressId"
    route.getTarget shouldBe null
    route.getSource shouldBe "addressTopic"
  }

  // GEOADD with PK and prefix
  val KCQL2 = "INSERT INTO address_set SELECT * from addressTopic PK addressId STOREAS GeoAdd"
  KCQL2 in {
    val config = getRedisSinkConfig(password = true, KCQL = Option(KCQL2))
    val settings = RedisSinkSettings(config)
    val route = settings.kcqlSettings.head.kcqlConfig

    route.getStoredAs shouldBe "GeoAdd"
    route.getFields.asScala.exists(_.getName.equals("*")) shouldBe true
    route.getPrimaryKeys.asScala.head.getName shouldBe "addressId"
    route.getTarget shouldBe "address_set"
    route.getSource shouldBe "addressTopic"
  }

  // GEOADD with PK, prefix, storedAsParameters
  val KCQL3 = "INSERT INTO address_set SELECT country from addressTopic PK addressId " +
    "STOREAS GeoAdd (longitudeField=lng, latitudeField=lat)"
  KCQL3 in {
    val config = getRedisSinkConfig(password = true, KCQL = Option(KCQL3))
    val settings = RedisSinkSettings(config)
    val route = settings.kcqlSettings.head.kcqlConfig
    val fields = route.getFields.asScala.toList

    route.getFields.asScala.exists(_.getName.equals("*")) shouldBe false
    fields.length shouldBe 1
    route.getTarget shouldBe "address_set"
    route.getSource shouldBe "addressTopic"

    route.getStoredAs shouldBe "GeoAdd"
    route.getStoredAsParameters.asScala shouldBe Map("longitudeField" -> "lng", "latitudeField" -> "lat")
  }
} 
Example 65
Source File: ConfigMultipleSortedSetsTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.redis.sink.config

import com.datamountaineer.streamreactor.connect.redis.sink.support.RedisMockSupport
import com.datamountaineer.streamreactor.connect.rowkeys.StringStructFieldsStringKeyBuilder
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._


class ConfigMultipleSortedSetsTest extends AnyWordSpec with Matchers with RedisMockSupport {

  // A Sorted Set will be used for every sensorID
  val KCQL1 = "SELECT temperature, humidity FROM sensorsTopic PK sensorID STOREAS SortedSet TTL = 60"
  KCQL1 in {
    val config = getRedisSinkConfig(password = true, KCQL = Option(KCQL1))
    val settings = RedisSinkSettings(config)
    val route = settings.kcqlSettings.head.kcqlConfig

    settings.kcqlSettings.head.builder.isInstanceOf[StringStructFieldsStringKeyBuilder] shouldBe true

    route.getStoredAs shouldBe "SortedSet"
    route.getFields.asScala.exists(_.getName.equals("*")) shouldBe false
    route.getSource shouldBe "sensorsTopic"
    route.getTarget shouldBe null
  }

  // If you want your Sorted Set to be prefixed use the INSERT
  val KCQL2 = "INSERT INTO SENSOR- SELECT temperature, humidity FROM sensorsTopic PK sensorID STOREAS SortedSet TTL = 60"
  // This will store the SortedSet as   Key=SENSOR-<sensorID>
  KCQL2 in {
    val config = getRedisSinkConfig(password = true, KCQL = Option(KCQL2))
    val settings = RedisSinkSettings(config)
    val route = settings.kcqlSettings.head.kcqlConfig
    val fields = route.getFields.asScala.toList

    route.getPrimaryKeys.asScala.head.getName shouldBe "sensorID"
    route.getFields.asScala.exists(_.getName.equals("*")) shouldBe false
    route.getSource shouldBe "sensorsTopic"
    route.getStoredAs shouldBe "SortedSet"
    route.getTarget shouldBe "SENSOR-"
    fields.length == 2
  }

  // Define which field to use to `score` the entry in the Set
  val KCQL3 = "SELECT * FROM sensorsTopic PK sensorID STOREAS SortedSet (score=ts)"
  KCQL3 in {
    val config = getRedisSinkConfig(password = true, KCQL = Option(KCQL3))
    val settings = RedisSinkSettings(config)
    val route = settings.kcqlSettings.head.kcqlConfig

    route.getStoredAsParameters.asScala shouldBe Map("score" -> "ts")
    route.getPrimaryKeys.asScala.head.getName shouldBe "sensorID"
    route.getFields.asScala.exists(_.getName.equals("*")) shouldBe true
    route.getSource shouldBe "sensorsTopic"
    route.getStoredAs shouldBe "SortedSet"
    route.getTarget shouldBe null
  }

  // Define the Date | DateTime format to use to parse the `score` field (store millis in redis)
  val KCQL4 = "SELECT temperature, humidity FROM sensorsTopic PK sensorID STOREAS SortedSet (score=ts, to=yyyyMMddHHmmss)"
  KCQL4 in {
    val config = getRedisSinkConfig(password = true, KCQL = Option(KCQL4))
    val settings = RedisSinkSettings(config)
    val route = settings.kcqlSettings.head.kcqlConfig
    val fields = route.getFields.asScala.toList

    route.getPrimaryKeys.asScala.head.getName shouldBe "sensorID"
    route.getFields.asScala.exists(_.getName.equals("*")) shouldBe false
    route.getSource shouldBe "sensorsTopic"
    route.getStoredAs shouldBe "SortedSet"
    route.getTarget shouldBe null
    fields.length shouldBe 2
  }
} 
Example 66
Source File: MqttSourceConnectorTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.mqtt.source

import java.util

import com.datamountaineer.streamreactor.connect.mqtt.config.{MqttConfigConstants, MqttSourceConfig, MqttSourceSettings}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._

class MqttSourceConnectorTest extends AnyWordSpec with Matchers {
  val baseProps: Map[String, String] = Map(
    MqttConfigConstants.HOSTS_CONFIG -> "tcp://0.0.0.0:1883",
    MqttConfigConstants.QS_CONFIG -> "1"
  )

  val mqttSourceConnector = new MqttSourceConnector()
  val targets = Array("test", "topic", "stream")

  val normalSources = Array(
    "/test/#",
    "/mqttTopic/+/test",
    "/stream",
    "/some/other/topic",
    "/alot/+/+/fourth"
  )
  val sharedSources = Array(
    "$share/some-group/test/#",
    "$share/aservice/mqttTopic/+/test",
    "$share/connectorGroup/stream",
    "$share/g1/some/other/topic",
    "$share/grouped/alot/+/+/fourth"
  )

  val normalKcql: Array[String] = normalSources.zip(targets).map{
    case (source, target) =>  s"INSERT INTO `$target` SELECT * FROM `$source`"
  }
  val sharedKcql: Array[String] = sharedSources.zip(targets).map{
    case (source, target) =>  s"INSERT INTO `$target` SELECT * FROM `$source`"
  }
  val allKcql: Array[String] = normalKcql ++ sharedKcql

  "The MqttSourceConnector" should {
    "indicate that shared subscription instructions should be replicated" in {
      all (sharedKcql.map(mqttSourceConnector.shouldReplicate)) should be (true)
    }

    "indicate that normal subscription instructions should not be replicated" in {
      all (normalKcql.map(mqttSourceConnector.shouldReplicate)) should be (false)
    }
  }

  "The MqttSourceConnector" when {
    "the connect.mqtt.share.replicate option is activated" should {
      val replicateProps = baseProps ++ Map(MqttConfigConstants.REPLICATE_SHARED_SUBSCIRPTIONS_CONFIG -> "true")

      "correctly distribute instructions when there is no replicated instruction" in {
        val props = replicateProps ++ Map(MqttConfigConstants.KCQL_CONFIG -> normalKcql.mkString(";"))
        mqttSourceConnector.start(props.asJava)

        val maxTasks = 2
        val kcqls = extractKcqls(mqttSourceConnector.taskConfigs(maxTasks))
        kcqls.flatten should have length normalKcql.length
      }

      "correctly distribute instructions when there is only replicated instructions" in {
        val props = replicateProps ++ Map(MqttConfigConstants.KCQL_CONFIG -> sharedKcql.mkString(";"))
        mqttSourceConnector.start(props.asJava)

        val maxTasks = 2
        val kcqls = extractKcqls(mqttSourceConnector.taskConfigs(maxTasks))
        all (kcqls) should have length sharedKcql.length
      }

      "correctly distribute instructions when there is a mix of instructions" in {
        val props = replicateProps ++ Map(MqttConfigConstants.KCQL_CONFIG -> allKcql.mkString(";"))
        mqttSourceConnector.start(props.asJava)

        val maxTasks = 2
        val kcqls = extractKcqls(mqttSourceConnector.taskConfigs(maxTasks))
        kcqls.flatten should have length(sharedKcql.length * maxTasks + normalKcql.length)
        all (kcqls.map(_.length)) should be >= sharedKcql.length
      }
    }

    "the connect.mqtt.share.replicate option is deactivated" should {
      "not replicate shared instructions" in {
        val props = baseProps ++ Map(MqttConfigConstants.KCQL_CONFIG -> allKcql.mkString(";"))
        mqttSourceConnector.start(props.asJava)

        val maxTasks = 2
        val kcqls = extractKcqls(mqttSourceConnector.taskConfigs(maxTasks))
        kcqls.flatten should have length allKcql.length
      }
    }
  }

  def extractKcqls(configs: util.List[util.Map[String, String]]): Array[Array[String]] = {
    configs.asScala.map(t => MqttSourceSettings(MqttSourceConfig(t)).kcql).toArray
  }
} 
Example 67
Source File: StageManagerTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.landoop.streamreactor.connect.hive.sink.staging

import com.landoop.streamreactor.connect.hive.{Offset, Topic, TopicPartition, TopicPartitionOffset}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class StageManagerTest extends AnyWordSpec with Matchers {

  implicit val conf = new Configuration()
  implicit val fs = FileSystem.getLocal(conf)

  val dir = new Path("stageman")
  fs.mkdirs(dir)

  val manager = new StageManager(DefaultFilenamePolicy)

  "StageManager" should {

    "stage file as hidden" in {
      val stagePath = manager.stage(dir, TopicPartition(Topic("mytopic"), 1))
      stagePath.getName.startsWith(".") shouldBe true
    }

    "delete existing file" in {

      val stagePath = manager.stage(dir, TopicPartition(Topic("mytopic"), 1))
      fs.create(stagePath)

      manager.stage(dir, TopicPartition(Topic("mytopic"), 1))
      fs.exists(stagePath) shouldBe false
    }
    "commit file using offset" in {

      val stagePath = manager.stage(dir, TopicPartition(Topic("mytopic"), 1))
      fs.create(stagePath)

      val tpo = TopicPartitionOffset(Topic("mytopic"), 1, Offset(100))
      val finalPath = manager.commit(stagePath, tpo)
      finalPath.getName shouldBe "streamreactor_mytopic_1_100"
    }
  }
} 
Example 68
Source File: DefaultCommitPolicyTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.landoop.streamreactor.connect.hive.sink.staging

import com.landoop.streamreactor.connect.hive.{Offset, Topic, TopicPartitionOffset}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, LocalFileSystem, Path}
import org.apache.kafka.connect.data.{Schema, SchemaBuilder, Struct}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.concurrent.duration._

class DefaultCommitPolicyTest extends AnyWordSpec with Matchers {

  val schema: Schema = SchemaBuilder.struct()
    .field("name", SchemaBuilder.string().required().build())
    .build()

  val struct = new Struct(schema)

  implicit val conf: Configuration = new Configuration()
  implicit val fs: LocalFileSystem = FileSystem.getLocal(conf)
  val tpo = TopicPartitionOffset(Topic("mytopic"), 1, Offset(100))

  private def shouldFlush(policy: CommitPolicy, path: Path, count: Long) = {
    val status = fs.getFileStatus(path)
    policy.shouldFlush(CommitContext(tpo, path, count, status.getLen, status.getModificationTime))
  }

  "DefaultCommitPolicy" should {
    "roll over after interval" in {

      val policy = DefaultCommitPolicy(None, Option(2.seconds), None)
      val path = new Path("foo")
      fs.create(path)

      shouldFlush(policy, path, 10) shouldBe false
      Thread.sleep(2000)
      shouldFlush(policy, path, 10) shouldBe true

      fs.delete(path, false)
    }
    "roll over after file count" in {
      val policy = DefaultCommitPolicy(None, None, Some(9))
      val path = new Path("foo")
      fs.create(path)

      shouldFlush(policy, path, 7) shouldBe false
      shouldFlush(policy, path, 8) shouldBe false
      shouldFlush(policy, path, 9) shouldBe true
      shouldFlush(policy, path, 10) shouldBe true

      fs.delete(path, false)
    }
    "roll over after file size" in {
      val policy = DefaultCommitPolicy(Some(10), None, None)
      val path = new Path("foo")
      val out = fs.create(path)
      shouldFlush(policy, path, 7) shouldBe false
      out.writeBytes("wibble wobble wabble wubble")
      out.close()
      shouldFlush(policy, path, 9) shouldBe true
      fs.delete(path, false)
    }
  }
} 
Example 69
Source File: HiveSourceConfigTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.landoop.streamreactor.connect.hive.source

import com.landoop.streamreactor.connect.hive.source.config.{HiveSourceConfig, ProjectionField}
import com.landoop.streamreactor.connect.hive.{TableName, Topic}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class HiveSourceConfigTest extends AnyWordSpec with Matchers {

  "HiveSource" should {
    "populate required table properties from KCQL" in {
      val config = HiveSourceConfig.fromProps(Map(
        "connect.hive.database.name" -> "mydatabase",
        "connect.hive.metastore" -> "thrift",
        "connect.hive.metastore.uris" -> "thrift://localhost:9083",
        "connect.hive.fs.defaultFS" -> "hdfs://localhost:8020",
        "connect.hive.kcql" -> "insert into mytopic select a,b from mytable"
      ))
      val tableConfig = config.tableOptions.head
      tableConfig.topic shouldBe Topic("mytopic")
      tableConfig.tableName shouldBe TableName("mytable")
      tableConfig.projection.get.toList shouldBe Seq(ProjectionField("a", "a"), ProjectionField("b", "b"))
    }
    "populate aliases from KCQL" in {
      val config = HiveSourceConfig.fromProps(Map(
        "connect.hive.database.name" -> "mydatabase",
        "connect.hive.metastore" -> "thrift",
        "connect.hive.metastore.uris" -> "thrift://localhost:9083",
        "connect.hive.fs.defaultFS" -> "hdfs://localhost:8020",
        "connect.hive.kcql" -> "insert into mytopic select a as x,b from mytable"
      ))
      val tableConfig = config.tableOptions.head
      tableConfig.projection.get.toList shouldBe Seq(ProjectionField("a", "x"), ProjectionField("b", "b"))
    }
    "set projection to None for *" in {
      val config = HiveSourceConfig.fromProps(Map(
        "connect.hive.database.name" -> "mydatabase",
        "connect.hive.metastore" -> "thrift",
        "connect.hive.metastore.uris" -> "thrift://localhost:9083",
        "connect.hive.fs.defaultFS" -> "hdfs://localhost:8020",
        "connect.hive.kcql" -> "insert into mytopic select * from mytable"
      ))
      val tableConfig = config.tableOptions.head
      tableConfig.projection shouldBe None
    }
    "set table limit" in {
      val config = HiveSourceConfig.fromProps(Map(
        "connect.hive.database.name" -> "mydatabase",
        "connect.hive.metastore" -> "thrift",
        "connect.hive.metastore.uris" -> "thrift://localhost:9083",
        "connect.hive.fs.defaultFS" -> "hdfs://localhost:8020",
        "connect.hive.kcql" -> "insert into mytopic select a from mytable limit 200"
      ))
      val tableConfig = config.tableOptions.head
      tableConfig.limit shouldBe 200
    }
  }
} 
Example 70
Source File: ParquetWriterTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.landoop.streamreactor.connect.hive.parquet

import com.landoop.streamreactor.connect.hive.StructUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.kafka.connect.data.{SchemaBuilder, Struct}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ParquetWriterTest extends AnyWordSpec with Matchers {

  implicit val conf = new Configuration()
  implicit val fs = FileSystem.getLocal(conf)

  "ParquetWriter" should {
    "write parquet files" in {

      val schema = SchemaBuilder.struct()
        .field("name", SchemaBuilder.string().required().build())
        .field("title", SchemaBuilder.string().optional().build())
        .field("salary", SchemaBuilder.float64().optional().build())
        .build()

      val users = List(
        new Struct(schema).put("name", "sam").put("title", "mr").put("salary", 100.43),
        new Struct(schema).put("name", "laura").put("title", "ms").put("salary", 429.06)
      )

      val path = new Path("sinktest.parquet")

      val writer = parquetWriter(path, schema, ParquetSinkConfig(overwrite = true))
      users.foreach(writer.write)
      writer.close()

      val reader = parquetReader(path)
      val actual = Iterator.continually(reader.read).takeWhile(_ != null).toList
      reader.close()

      actual.map(StructUtils.extractValues) shouldBe users.map(StructUtils.extractValues)

      fs.delete(path, false)
    }
    "support writing nulls" in {

      val schema = SchemaBuilder.struct()
        .field("name", SchemaBuilder.string().required().build())
        .field("title", SchemaBuilder.string().optional().build())
        .field("salary", SchemaBuilder.float64().optional().build())
        .build()

      val users = List(
        new Struct(schema).put("name", "sam").put("title", null).put("salary", 100.43),
        new Struct(schema).put("name", "laura").put("title", "ms").put("salary", 429.06)
      )

      val path = new Path("sinktest.parquet")

      val writer = parquetWriter(path, schema, ParquetSinkConfig(overwrite = true))
      users.foreach(writer.write)
      writer.close()

      val reader = parquetReader(path)
      val actual = Iterator.continually(reader.read).takeWhile(_ != null).toList
      reader.close()

      actual.map(StructUtils.extractValues) shouldBe users.map(StructUtils.extractValues)

      fs.delete(path, false)
    }
  }
} 
Example 71
Source File: HiveOrcTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.landoop.streamreactor.hive.it

import java.util.concurrent.TimeUnit

import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.should.Matchers
import org.scalatest.time.{Seconds, Span}
import org.scalatest.wordspec.AnyWordSpec

import scala.io.Source

class HiveOrcTest extends AnyWordSpec with Matchers with PersonTestData with Eventually with HiveTests {

  private implicit val patience: PatienceConfig = PatienceConfig(Span(120, Seconds), Span(10, Seconds))

  "Hive" should {
    "write non partitioned orc records" in {
      val count = 10000L

      val topic = createTopic()
      val taskDef = Source.fromInputStream(getClass.getResourceAsStream("/hive_sink_task_no_partitions-orc.json")).getLines().mkString("\n")
        .replace("{{TOPIC}}", topic)
        .replace("{{TABLE}}", topic)
        .replace("{{NAME}}", topic)
      postTask(taskDef)

      val producer = stringStringProducer()
      writeRecords(producer, topic, JacksonSupport.mapper.writeValueAsString(person), count)
      producer.close(30, TimeUnit.SECONDS)

      // we now should have 1000 records in hive which we can test via jdbc
      eventually {
        withConn { conn =>
          val stmt = conn.createStatement
          val rs = stmt.executeQuery(s"select count(*) from $topic")
          rs.next()
          rs.getLong(1) shouldBe count
        }
      }

      stopTask(topic)
    }
  }
} 
Example 72
Source File: HiveSchemaTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.landoop.streamreactor.hive.it

import java.util.concurrent.TimeUnit

import com.landoop.streamreactor.connect.hive.{DatabaseName, TableName}
import org.apache.kafka.connect.data.Schema
import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.should.Matchers
import org.scalatest.time.{Millis, Span}
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._
import scala.io.Source
import scala.util.Random

class HiveSchemaTest extends AnyWordSpec with Matchers with PersonTestData with Eventually with HiveTests {

  private implicit val patience: PatienceConfig = PatienceConfig(Span(60000, Millis), Span(5000, Millis))

  case class Foo(s: String, l: Long, b: Boolean, d: Double)
  def foo = Foo("string", Random.nextLong, Random.nextBoolean, Random.nextDouble)

  "Hive" should {
    "create correct schema for table" in {

      val topic = createTopic()
      val taskDef = Source.fromInputStream(getClass.getResourceAsStream("/hive_sink_task_no_partitions.json")).getLines().mkString("\n")
        .replace("{{TOPIC}}", topic)
        .replace("{{TABLE}}", topic)
        .replace("{{NAME}}", topic)
      postTask(taskDef)

      val producer = stringStringProducer()
      writeRecords(producer, topic, JacksonSupport.mapper.writeValueAsString(foo), 2000)
      producer.close(30, TimeUnit.SECONDS)

      // wait for some data to have been flushed
      eventually {
        withConn { conn =>
          val stmt = conn.createStatement
          val rs = stmt.executeQuery(s"select count(*) FROM $topic")
          rs.next()
          rs.getLong(1) should be > 0L
        }
      }

      // check that the schema is correct
      val schema = com.landoop.streamreactor.connect.hive.schema(DatabaseName("default"), TableName(topic))
      schema.fields().asScala.map(_.name).toSet shouldBe Set("s", "b", "l", "d")
      schema.field("s").schema().`type`() shouldBe Schema.Type.STRING
      schema.field("l").schema().`type`() shouldBe Schema.Type.INT64
      schema.field("d").schema().`type`() shouldBe Schema.Type.FLOAT64
      schema.field("b").schema().`type`() shouldBe Schema.Type.BOOLEAN

      stopTask(topic)
    }
  }
} 
Example 73
Source File: HiveParquetWithPartitionTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.landoop.streamreactor.hive.it

import java.util.concurrent.TimeUnit

import org.apache.hadoop.fs.Path
import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.should.Matchers
import org.scalatest.time.{Millis, Span}
import org.scalatest.wordspec.AnyWordSpec

import scala.io.Source

class HiveParquetWithPartitionTest extends AnyWordSpec with Matchers with PersonTestData with Eventually with HiveTests {

  private implicit val patience: PatienceConfig = PatienceConfig(Span(60000, Millis), Span(5000, Millis))

  "Hive" should {
    "write partitioned records" in {

      val count = 100000L

      val topic = createTopic()
      val taskDef = Source.fromInputStream(getClass.getResourceAsStream("/hive_sink_task_with_partitions.json")).getLines().mkString("\n")
        .replace("{{TOPIC}}", topic)
        .replace("{{TABLE}}", topic)
        .replace("{{NAME}}", topic)
      postTask(taskDef)

      val producer = stringStringProducer()
      writeRecords(producer, topic, JacksonSupport.mapper.writeValueAsString(person), count)
      producer.close(30, TimeUnit.SECONDS)

      // wait for some data to have been flushed
      eventually {
        withConn { conn =>
          val stmt = conn.createStatement
          val rs = stmt.executeQuery(s"select count(*) FROM $topic")
          if (rs.next()) {
            val count = rs.getLong(1)
            println(s"Current count for $topic is $count")
            count should be > 100L
          } else {
            fail()
          }
        }
      }

      // we should see every partition created
      eventually {
        withConn { conn =>
          val stmt = conn.createStatement
          val rs = stmt.executeQuery(s"select distinct state from $topic")
          var count = 0
          while (rs.next()) {
            count = count + 1
          }
          println(s"State count is $count")
          count shouldBe states.length
        }
      }

      // check for the presence of each partition directory
      val table = metastore.getTable("default", topic)
      for (state <- states) {
        fs.exists(new Path(table.getSd.getLocation, s"state=$state")) shouldBe true
      }

      stopTask(topic)
    }
  }
} 
Example 74
Source File: HiveSourceTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.landoop.streamreactor.hive.it

import java.util.Collections
import java.util.concurrent.TimeUnit

import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.should.Matchers
import org.scalatest.time.{Millis, Span}
import org.scalatest.wordspec.AnyWordSpec

import scala.io.Source

class HiveSourceTest extends AnyWordSpec with Matchers with PersonTestData with Eventually with HiveTests {

  private implicit val patience: PatienceConfig = PatienceConfig(Span(60000, Millis), Span(5000, Millis))

  "Hive" should {
    "read non partitioned table" in {
      val count = 2000L

      val inputTopic = createTopic()
      val sinkTaskDef = Source.fromInputStream(getClass.getResourceAsStream("/hive_sink_task_no_partitions.json")).getLines().mkString("\n")
        .replace("{{TOPIC}}", inputTopic)
        .replace("{{TABLE}}", inputTopic)
        .replace("{{NAME}}", inputTopic)
      postTask(sinkTaskDef)

      val producer = stringStringProducer()
      writeRecords(producer, inputTopic, JacksonSupport.mapper.writeValueAsString(person), count)
      producer.close(30, TimeUnit.SECONDS)

      // we now should have 1000 records in hive which we can test via jdbc
      eventually {
        withConn { conn =>
          val stmt = conn.createStatement
          val rs = stmt.executeQuery(s"select count(*) from $inputTopic")
          rs.next()
          rs.getLong(1) shouldBe count
        }
      }

      stopTask(inputTopic)

      // now we can read them back in
      val outputTopic = createTopic()

      val sourceTaskDef = Source.fromInputStream(getClass.getResourceAsStream("/hive_source_task.json")).getLines().mkString("\n")
        .replace("{{TOPIC}}", outputTopic)
        .replace("{{TABLE}}", inputTopic)
        .replace("{{NAME}}", outputTopic)
      postTask(sourceTaskDef)

      // we should have 1000 records on the outputTopic
      var records = 0L
      val consumer = stringStringConsumer("earliest")
      consumer.subscribe(Collections.singleton(outputTopic))
      eventually {
        records = records + readRecords(consumer, outputTopic, 2, TimeUnit.SECONDS).size
        records shouldBe count
      }

      stopTask(outputTopic)
    }
  }
} 
Example 75
Source File: HiveParquetTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.landoop.streamreactor.hive.it

import java.util.concurrent.TimeUnit

import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.should.Matchers
import org.scalatest.time.{Millis, Span}
import org.scalatest.wordspec.AnyWordSpec

import scala.io.Source

class HiveParquetTest extends AnyWordSpec with Matchers with PersonTestData with Eventually with HiveTests {

  private implicit val patience: PatienceConfig = PatienceConfig(Span(30000, Millis), Span(2000, Millis))

  "Hive" should {
    "write records" in {

      val count = 10000L

      val topic = createTopic()
      val taskDef = Source.fromInputStream(getClass.getResourceAsStream("/hive_sink_task_no_partitions.json")).getLines().mkString("\n")
        .replace("{{TOPIC}}", topic)
        .replace("{{TABLE}}", topic)
        .replace("{{NAME}}", topic)
      postTask(taskDef)

      val producer = stringStringProducer()
      writeRecords(producer, topic, JacksonSupport.mapper.writeValueAsString(person), count)
      producer.close(30, TimeUnit.SECONDS)

      // we now should have 1000 records in hive which we can test via jdbc
      eventually {
        withConn { conn =>
          val stmt = conn.createStatement
          val rs = stmt.executeQuery(s"select count(*) from $topic")
          rs.next()
          rs.getLong(1) shouldBe count
        }
      }

      stopTask(topic)
    }
  }
} 
Example 76
Source File: RetriesTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.voltdb.writers

import io.confluent.common.config.ConfigException
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class RetriesTest extends AnyWordSpec with Matchers with Retries {
  "Retries" should {
    "return the value when no error is encountered" in {
      val expected = "The return value"
      val actual = withRetries(10, 10, Some("abcd"))(expected)
      actual shouldBe expected
    }
    "return the value if an error is thrown but max retries is not met" in {
      val expected = "The return value"
      var count = 10
      val actual = withRetries(10, 10, Some("abcd")) {
        count -= 1
        if (count == 0) expected
        else throw new RuntimeException("something went wrong")
      }

      actual shouldBe expected
    }
    "return the value even with 0 retries" in {
      val expected = 12315L
      val actual = withRetries(0, 10, Some("abcd"))(expected)
      actual shouldBe expected
    }

    "throws the last exception" in {
      var count = 4
      intercept[ConfigException] {
        withRetries(4, 10, Some("abcd")) {
          count -= 1
          if (count > 0) sys.error("Not yet")
          else throw new ConfigException("this one")
        }
      }
    }
  }
} 
Example 77
Source File: StructFieldsExtractorTest.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.voltdb

import org.apache.kafka.connect.data.{Schema, SchemaBuilder, Struct}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec


class StructFieldsExtractorTest extends AnyWordSpec with Matchers {
  "StructFieldsExtractor" should {
    "return all the fields and their bytes value" in {
      val schema = SchemaBuilder.struct().name("com.example.Person")
        .field("firstName", Schema.STRING_SCHEMA)
        .field("lastName", Schema.STRING_SCHEMA)
        .field("age", Schema.INT32_SCHEMA)
        .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build()

      val struct = new Struct(schema)
        .put("firstName", "Alex")
        .put("lastName", "Smith")
        .put("age", 30)

      val min = System.currentTimeMillis()
      val record = StructFieldsExtractor("table", true, Map.empty).get(struct)
      val map = record
      map("firstName") shouldBe "Alex"
      map("lastName") shouldBe "Smith"
      map("age") shouldBe 30
    }

    "return all fields and apply the mapping" in {
      val schema = SchemaBuilder.struct().name("com.example.Person")
        .field("firstName", Schema.STRING_SCHEMA)
        .field("lastName", Schema.STRING_SCHEMA)
        .field("age", Schema.INT32_SCHEMA)
        .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build()

      val struct = new Struct(schema)
        .put("firstName", "Alex")
        .put("lastName", "Smith")
        .put("age", 30)

      val map = StructFieldsExtractor("table", includeAllFields = true, Map("lastName" -> "Name", "age" -> "a")).get(struct)
      map("firstName") shouldBe "Alex"
      map("Name") shouldBe "Smith"
      map("a") shouldBe 30

    }

    "return only the specified fields" in {
      val schema = SchemaBuilder.struct().name("com.example.Person")
        .field("firstName", Schema.STRING_SCHEMA)
        .field("lastName", Schema.STRING_SCHEMA)
        .field("age", Schema.INT32_SCHEMA)
        .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build()

      val struct = new Struct(schema)
        .put("firstName", "Alex")
        .put("lastName", "Smith")
        .put("age", 30)

      val map = StructFieldsExtractor("table", includeAllFields = false, Map("lastName" -> "Name", "age" -> "age")).get(struct)
      map("Name") shouldBe "Smith"
      map("age") shouldBe 30
      map.size shouldBe 2
    }
  }
} 
Example 78
Source File: TestCoapSourceConnector.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.coap.source

import com.datamountaineer.streamreactor.connect.coap.TestBase
import com.datamountaineer.streamreactor.connect.coap.configs.CoapConstants
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._


class TestCoapSourceConnector extends AnyWordSpec with TestBase {
  "should create a CoapSourceConnector" in {
    val props = getPropsSecure
    val connector = new CoapSourceConnector
    connector.start(props)
    val taskConfigs = connector.taskConfigs(2)
    taskConfigs.size() shouldBe 1
    taskConfigs.asScala.head.get(CoapConstants.COAP_KCQL) shouldBe SOURCE_KCQL_SECURE
    taskConfigs.asScala.head.get(CoapConstants.COAP_KEY_STORE_PATH) shouldBe KEYSTORE_PATH
    taskConfigs.asScala.head.get(CoapConstants.COAP_TRUST_STORE_PATH) shouldBe TRUSTSTORE_PATH
    taskConfigs.asScala.head.get(CoapConstants.COAP_URI) shouldBe SOURCE_URI_SECURE
    connector.taskClass() shouldBe classOf[CoapSourceTask]
  }

  "should create a CoapSourceConnector multiple kcql" in {
    val props = getPropsSecureMultipleKCQL
    val connector = new CoapSourceConnector
    connector.start(props)
    val taskConfigs = connector.taskConfigs(2)
    taskConfigs.size() shouldBe 2
    taskConfigs.asScala.head.get(CoapConstants.COAP_KCQL) shouldBe SOURCE_KCQL_SECURE
    taskConfigs.asScala.head.get(CoapConstants.COAP_KEY_STORE_PATH) shouldBe KEYSTORE_PATH
    taskConfigs.asScala.head.get(CoapConstants.COAP_TRUST_STORE_PATH) shouldBe TRUSTSTORE_PATH
    taskConfigs.asScala.head.get(CoapConstants.COAP_URI) shouldBe SOURCE_URI_SECURE
    connector.taskClass() shouldBe classOf[CoapSourceTask]
  }
} 
Example 79
Source File: TestCoapSourceSettings.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.coap.config

import com.datamountaineer.streamreactor.connect.coap.TestBase
import com.datamountaineer.streamreactor.connect.coap.configs.{CoapSettings, CoapSinkConfig, CoapSourceConfig}
import org.apache.kafka.common.config.ConfigException
import org.scalatest.wordspec.AnyWordSpec


class TestCoapSourceSettings extends AnyWordSpec with TestBase {
  "should create CoapSettings for an insecure connection" in {
    val props = getPropsInsecure
    val config = CoapSourceConfig(props)
    val settings = CoapSettings(config)
    val setting = settings.head
    setting.kcql.getSource shouldBe RESOURCE_INSECURE
    setting.kcql.getTarget shouldBe TOPIC
    setting.uri shouldBe SOURCE_URI_INSECURE
    setting.keyStoreLoc.nonEmpty shouldBe false
    setting.trustStoreLoc.nonEmpty shouldBe false
  }

  "should create CoapSettings for an secure connection" in {
    val props = getPropsSecure
    val config = CoapSourceConfig(props)
    val settings = CoapSettings(config)
    val setting = settings.head
    setting.kcql.getSource shouldBe RESOURCE_SECURE
    setting.kcql.getTarget shouldBe TOPIC
    setting.uri shouldBe SOURCE_URI_SECURE
    setting.keyStoreLoc.nonEmpty shouldBe true
    setting.trustStoreLoc.nonEmpty shouldBe true
  }

  "should fail to create CoapSettings for an secure connection with key wrong path" in {
    val props = getPropsSecureKeyNotFound
    val config = CoapSinkConfig(props)
    intercept[ConfigException] {
      CoapSettings(config)
    }
  }

  "should fail to create CoapSettings for an secure connection with trust wrong path" in {
    val props = getPropsSecureTrustNotFound
    val config = CoapSourceConfig(props)
    intercept[ConfigException] {
      CoapSettings(config)
    }
  }
} 
Example 80
Source File: TestCoapMessageConverter.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.coap.domain

import com.datamountaineer.streamreactor.connect.coap.TestBase
import org.apache.kafka.connect.data.Struct
import org.scalatest.wordspec.AnyWordSpec


class TestCoapMessageConverter extends AnyWordSpec with TestBase {
  "should convert a CoapResponse to a Struct " in {
    val response = getCoapResponse
    val converter = new CoapMessageConverter
    val record = converter.convert(RESOURCE_INSECURE ,TOPIC, response)
    val struct = record.value().asInstanceOf[Struct]
    struct.getString("payload") shouldBe response.getPayloadString
    struct.getInt32("raw_code") shouldBe response.getRawCode
    struct.getBoolean("is_last") shouldBe response.isLast
    struct.getInt32("content_format") shouldBe response.getOptions.getContentFormat
  }
} 
Example 81
Source File: BaseSpec.scala    From pfhais   with Creative Commons Attribution Share Alike 4.0 International 5 votes vote down vote up
package com.wegtam.books.pfhais

import com.typesafe.config._
import com.wegtam.books.pfhais.tapir.config._
import eu.timepit.refined.auto._
import pureconfig._
import org.scalatest._
import org.scalatest.wordspec.AnyWordSpec
import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks


abstract class BaseSpec extends AnyWordSpec 
    with MustMatchers
    with ScalaCheckPropertyChecks
    with BeforeAndAfterAll
    with BeforeAndAfterEach {

  protected val config = ConfigFactory.load()
  protected val dbConfig = ConfigSource.fromConfig(config).at("database").load[DatabaseConfig]

  override def beforeAll(): Unit = {
    val _ = withClue("Database configuration could not be loaded!") {
      dbConfig.isRight must be(true)
    }
  }
} 
Example 82
Source File: ServeSpec.scala    From typed-schema   with Apache License 2.0 5 votes vote down vote up
package ru.tinkoff.tschema.akkaHttp

import akka.http.scaladsl.model.Multipart.FormData
import akka.http.scaladsl.model.Uri.Query
import akka.http.scaladsl.model.{HttpEntity, Uri}
import akka.http.scaladsl.server.MissingQueryParamRejection
import akka.http.scaladsl.testkit.ScalatestRouteTest
import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import ru.tinkoff.tschema.syntax

class ServeSpec extends AnyWordSpec with Matchers with ScalatestRouteTest {
  trait Small

  import ru.tinkoff.tschema.syntax._
  val dsl = syntax

  val intAnswer = 42

  object handler {
    val int = 42

    def repeat(body: String, n: Int) = body * n

    def multiply(x: Long, y: Double) = f"result is ${x * y}%.2f"

    def size(args: List[Int]) = args.size

    def min(args: List[Int]) = args.min
  }

  def api = (keyPrefix("int") :> get :> complete[Int]) ~
    (keyPrefix("repeat") :> reqBody[String] :> queryParam[Int]("n") :> post :> complete[String]) ~
    (keyPrefix("multiply") :> formField[Long]("x") :> formField[Double]("y") :> post :> complete[String]) ~
    (keyPrefix("size") :> queryParams[Option[Int]]("args") :> post :> complete[Int]) ~
    (keyPrefix("min") :> queryParams[Int]("args") :> post :> complete[Int])

  val route = MkRoute(api)(handler)

  "Simple service" should {
    "return a simple int" in {
      Get("/int") ~> route ~> check {
        responseAs[Int] shouldEqual intAnswer
      }
    }

    "multiply string by n times" in {
      Post(Uri("/repeat").withQuery(Query("n" -> "5")), "batman") ~> route ~> check {
        responseAs[String] shouldEqual ("batman" * 5)
      }
    }

    "multiply numbers from formdata" in {
      Post(Uri("/multiply"), FormData(Map("x" -> HttpEntity("3"), "y" -> HttpEntity("1.211")))) ~>
        route ~>
        check {
          responseAs[String] shouldEqual f"result is ${3.63}%.2f"
        }
    }

    "return size of empty args" in {
      Post(Uri("/size")) ~> route ~> check {
        responseAs[Int] shouldEqual 0
      }
    }

    "return size of non empty args" in {
      Post(Uri("/size").withQuery(Query(List("1", "2", "3").map("args" -> _): _*))) ~> route ~> check {
        responseAs[Int] shouldEqual 3
      }
    }

    "return min of non empty args" in {
      Post(Uri("/min").withQuery(Query(List("3", "1", "2").map("args" -> _): _*))) ~> route ~> check {
        responseAs[Int] shouldEqual 1
      }
    }

    "reject on min with empty args" in {
      Post(Uri("/min")) ~> route ~> check {
        rejection shouldEqual MissingQueryParamRejection("args")
      }
    }
  }
} 
Example 83
Source File: XmlSpec.scala    From typed-schema   with Apache License 2.0 5 votes vote down vote up
package ru.tinkoff.tschema
package swagger

import syntax._
import SwaggerTypeable.deriveNamedTypeable
import io.circe.syntax._
import cats.syntax.option._
import io.circe.Printer
import SwaggerXMLOptions.{apply => xmlOpts}
import shapeless.syntax.singleton._
import org.scalatest.wordspec.AnyWordSpec

class XmlSpec extends AnyWordSpec {
  val swaggerJson = XmlSpec.swagger.make(OpenApiInfo()).asJson
  val top         = swaggerJson.hcursor
  val method      = top.downField("paths").downField("/xml").downField("get")
  val response    = method.downField("responses").downField("200").downField("content")
  val bookType    = top.downField("components").downField("schemas").downField("Book")
  "Swagger Json" should {
    "contain XML method" in assert(method.succeeded)
    "contain XML media type answer" in assert(
      response.downField("application/xml").downField("schema").downField("$ref").as[String] === Right(
        "#/components/schemas/Book"
      )
    )
    "contain only one media type" in assert(
      response.keys.toSeq.flatten.length === 1
    )
    "have Book type" in assert(bookType.succeeded)
    "have Book xml-name" in assert(bookType.downField("xml").downField("name").as[String] === Right("book"))
    "have id xml-attribute" in assert(
      bookType.downField("properties").downField("id").downField("xml").downField("attribute").as[Boolean] === Right(
        true
      )
    )
    "have tag wrapped array property" in assert(
      bookType.downField("properties").downField("tags").downField("xml").as[SwaggerXMLOptions] === Right(
        xmlOpts(name = "tag".some, wrapped = true)
      )
    )
  }

}

object XmlSpec {
  case class Book(id: Int, author: String, title: String, tags: List[String])
  implicit val bookSwagger: SwaggerTypeable[Book] =
    deriveNamedTypeable[Book]
      .xmlFld(Symbol("id") ->> xmlOpts(attribute = true))
      .xmlFields("tags" -> xmlOpts(name = "tag".some, wrapped = true))
      .xml(name = "book".some)

  def api = prefix("xml") |> key("foo") |> get |> $$[Book]

  val swagger = MkSwagger(api)
} 
Example 84
Source File: GeometryCheckers.scala    From rtree2d   with Apache License 2.0 5 votes vote down vote up
package com.github.plokhotnyuk.rtree2d.core

import TestUtils._
import org.scalacheck.Gen
import org.scalacheck.Prop._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks

class GeometryCheckers extends AnyWordSpec with Matchers with ScalaCheckPropertyChecks {
  implicit override val generatorDrivenConfig: PropertyCheckConfiguration =
    PropertyCheckConfiguration(minSuccessful = 1000)
  "EuclideanPlane.distanceCalculator" when {
    "asked to calculate distance from point to an RTree" should {
      "return a distance to a nearest part of the RTree bounding box or 0 if the point is inside it" in {
        forAll(entryListGen, floatGen, floatGen) {
          (entries: Seq[RTreeEntry[Int]], x: Float, y: Float) =>
            val t = RTree(entries)
            propBoolean(entries.nonEmpty && !intersects(t, x, y)) ==> {
              val expected = euclideanDistance(x, y, t)
              EuclideanPlane.distanceCalculator.distance(x, y, t) === expected +- 0.001f
            }
        }
      }
    }
  }
  "SphericalEarth.distanceCalculator" when {
    "asked to calculate distance from point to an RTree" should {
      "return 0 if the point is inside it" in {
        forAll(latLonEntryGen, Gen.choose[Float](0, 1), Gen.choose[Float](0, 1)) {
          (t: RTreeEntry[Int], rdx: Float, rdy: Float) =>
            val lat = t.minX + rdx * (t.maxX - t.minX)
            val lon = t.minY + rdy * (t.maxY - t.minY)
            propBoolean(intersects(t, lat, lon)) ==> {
              SphericalEarth.distanceCalculator.distance(lat, lon, t) === 0.0f +- 0.1f
            }
        }
      }
      "return a distance to the nearest edge of the RTree bounding box if point doesn't intersect and is aligned vertically" in {
        forAll(latLonEntryGen, latGen, lonGen) {
          (t: RTreeEntry[Int], lat: Float, lon: Float) =>
            propBoolean(!intersects(t, lat, lon) && alignedVertically(t, lat, lon)) ==> {
              val distancesForCorners = IndexedSeq(
                greatCircleDistance(lat, lon, t.minX, lon),
                greatCircleDistance(lat, lon, t.maxX, lon),
                greatCircleDistance(lat, lon, t.minX, t.minY),
                greatCircleDistance(lat, lon, t.minX, t.maxY),
                greatCircleDistance(lat, lon, t.maxX, t.minY),
                greatCircleDistance(lat, lon, t.maxX, t.maxY)
              )
              val expected = distancesForCorners.min
              val result = SphericalEarth.distanceCalculator.distance(lat, lon, t)
              result <= expected + 0.1f
            }
        }
      }
      "return a distance to the nearest edge of the RTree bounding box if point doesn't not intersect and is aligned horizontally" in {
        forAll(latLonEntryGen, latGen, lonGen) {
          (t: RTreeEntry[Int], lat: Float, lon: Float) =>
            propBoolean(!intersects(t, lat, lon) && alignedHorizontally(t, lat, lon)) ==> {
              val distancesForCorners = IndexedSeq(
                greatCircleDistance(lat, lon, lat, t.minY),
                greatCircleDistance(lat, lon, lat, t.maxY),
                greatCircleDistance(lat, lon, t.minX, t.minY),
                greatCircleDistance(lat, lon, t.minX, t.maxY),
                greatCircleDistance(lat, lon, t.maxX, t.minY),
                greatCircleDistance(lat, lon, t.maxX, t.maxY)
              )
              val expected = distancesForCorners.min
              val result = SphericalEarth.distanceCalculator.distance(lat, lon, t)
              result <= expected + 0.1f
            }
        }
      }
      "return a distance to the nearest corner of the RTree bounding box if point doesn't not intersect and is not aligned vertically or horizontally" in {
        forAll(latLonEntryGen, latGen, lonGen) {
          (t: RTreeEntry[Int], lat: Float, lon: Float) =>
            propBoolean(!intersects(t, lat, lon) && !alignedHorizontally(t, lat, lon) && !alignedVertically(t, lat, lon)) ==> {
              val distancesForCorners = IndexedSeq(
                greatCircleDistance(lat, lon, t.minX, t.minY),
                greatCircleDistance(lat, lon, t.minX, t.maxY),
                greatCircleDistance(lat, lon, t.maxX, t.minY),
                greatCircleDistance(lat, lon, t.maxX, t.maxY)
              )
              val expected = distancesForCorners.min
              val result = SphericalEarth.distanceCalculator.distance(lat, lon, t)
              result <= expected + 0.1f
            }
        }
      }
    }
  }
} 
Example 85
Source File: JTSTest.scala    From rtree2d   with Apache License 2.0 5 votes vote down vote up
package com.github.plokhotnyuk.rtree2d.benchmark

import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class JTSTest extends AnyWordSpec with Matchers {
  private def benchmark(shuffling: Boolean): JTS = new JTS {
    shuffle = shuffling
    setup()
  }

  def testWith(benchmark: () => JTS): Unit = {
    benchmark().apply.entries.toList shouldBe benchmark().rtree.entries.toList
    benchmark().entries should contain allElementsOf benchmark().rtreeEntries
    benchmark().update.entries.toList should contain allElementsOf benchmark().rtreeEntries.diff(benchmark().entriesToRemove) ++ benchmark().entriesToAdd
    benchmark().nearest should contain oneElementOf benchmark().rtreeEntries
    val b = benchmark()
    (1 to b.size * 2).foreach(_ => b.nearestK.size shouldBe b.nearestMax)
    (1 to b.size * 2).foreach(_ => b.searchByPoint.size shouldBe 1)
    (1 to b.size * 2).foreach(_ => b.searchByRectangle.size should be >= 1)
  }

  "JTS" should {
    "return proper values without shuffling" in testWith(() => benchmark(shuffling = false))
    "return proper values with shuffling" in testWith(() => benchmark(shuffling = true))
  }
} 
Example 86
Source File: RTree2DTest.scala    From rtree2d   with Apache License 2.0 5 votes vote down vote up
package com.github.plokhotnyuk.rtree2d.benchmark

import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class RTree2DTest extends AnyWordSpec with Matchers {
  private def benchmark(geom: String, shuffling: Boolean): RTree2D = new RTree2D {
    geometry = geom
    shuffle = shuffling
    setup()
  }
  "RTree2D" should {
    def testWith(benchmark: () => RTree2D): Unit = {
      benchmark().apply.entries shouldBe benchmark().rtree.entries
      benchmark().entries should contain allElementsOf benchmark().rtreeEntries
      benchmark().update.entries should contain allElementsOf benchmark().rtreeEntries.diff(benchmark().entriesToRemove) ++ benchmark().entriesToAdd
      benchmark().nearest should contain oneElementOf benchmark().rtreeEntries
      val b = benchmark()
      (1 to b.size * 2).foreach(_ => b.nearestK.size shouldBe b.nearestMax)
      (1 to b.size * 2).foreach(_ => b.searchByPoint.size shouldBe 1)
      (1 to b.size * 2).foreach(_ => b.searchByRectangle.size should be >= 1)
    }

    "return proper values for plane geometry without shuffling" in testWith(() => benchmark("plane", shuffling = false))
    "return proper values for spherical geometry without shuffling" in testWith(() => benchmark("spherical", shuffling = false))
    "return proper values for plane geometry with shuffling" in testWith(() => benchmark("plane", shuffling = true))
    "return proper values for spherical geometry with shuffling" in testWith(() => benchmark("spherical", shuffling = true))
  }
} 
Example 87
Source File: ArcheryTest.scala    From rtree2d   with Apache License 2.0 5 votes vote down vote up
package com.github.plokhotnyuk.rtree2d.benchmark

import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ArcheryTest extends AnyWordSpec with Matchers {
  def benchmark(shuffling: Boolean): Archery = new Archery {
    shuffle = shuffling
    setup()
  }

  def testWith(benchmark: () => Archery): Unit = {
    benchmark().apply.entries.toSet shouldBe benchmark().rtree.entries.toSet
    benchmark().entries should contain allElementsOf benchmark().rtreeEntries
    benchmark().update.entries.toList should
      contain allElementsOf benchmark().rtreeEntries.diff(benchmark().entriesToRemove) ++ benchmark().entriesToAdd
    benchmark().nearest should contain oneElementOf benchmark().rtreeEntries
    val b = benchmark()
    (1 to b.size * 2).foreach(_ => b.nearestK.size shouldBe b.nearestMax)
    (1 to b.size * 2).foreach(_ => b.searchByPoint.size shouldBe 1)
    (1 to b.size * 2).foreach(_ => b.searchByRectangle.size should be >= 1)
  }

  "Archery" should {
    "return proper values without shuffling" in testWith(() => benchmark(shuffling = false))
    "return proper values with shuffling" in testWith(() => benchmark(shuffling = true))
  }
} 
Example 88
Source File: DavidMotenRTreeTest.scala    From rtree2d   with Apache License 2.0 5 votes vote down vote up
package com.github.plokhotnyuk.rtree2d.benchmark

import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import collection.JavaConverters._

class DavidMotenRTreeTest extends AnyWordSpec with Matchers {
  def benchmark(shuffling: Boolean): DavidMotenRTree = new DavidMotenRTree {
    shuffle = shuffling
    setup()
  }

  def testWith(benchmark: () => DavidMotenRTree): Unit = {
    benchmark().apply.entries().asScala.toSet shouldBe benchmark().rtree.entries().asScala.toSet
    benchmark().entries should contain allElementsOf benchmark().rtreeEntries
    benchmark().update.entries().asScala.toList should contain allElementsOf benchmark().rtreeEntries.diff(benchmark().entriesToRemove) ++ benchmark().entriesToAdd
    benchmark().nearest should contain oneElementOf benchmark().rtreeEntries
    val b = benchmark()
    (1 to b.size * 2).foreach(_ => b.nearestK.size shouldBe b.nearestMax)
    (1 to b.size * 2).foreach(_ => b.searchByPoint.size shouldBe 1)
    (1 to b.size * 2).foreach(_ => b.searchByRectangle.size should be >= 1)
  }

  "DavidMotenRTree" should {
    "return proper values without shuffling" in testWith(() => benchmark(shuffling = false))
    "return proper values with shuffling" in testWith(() => benchmark(shuffling = true))
  }
} 
Example 89
Source File: ModelServiceIntegrationSpec.scala    From full-scala-stack   with Apache License 2.0 5 votes vote down vote up
package api

import akka.http.scaladsl.model.{ContentTypes, HttpEntity}
import akka.http.scaladsl.testkit.ScalatestRouteTest
import de.heikoseeberger.akkahttpupickle.UpickleSupport
import model.{SampleModelObject, SimpleSearch}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.{AnyWordSpec, AnyWordSpecLike}
import routes.ModelRoutes
import upickle.default._
import util.ModelPickler

import scala.concurrent.ExecutionContext


class ModelServiceIntegrationSpec
  extends AnyWordSpec
    with Matchers
    with ScalatestRouteTest
    with ZIODirectives
  with UpickleSupport
    with ModelPickler
    {

  val service = new ModelRoutes with LiveEnvironment

  //TODO test your route here, we would probably not have a test like the one below in reality, since it's super simple.
      "The Service" should  {
        "return one objects on a get" in {
          Get("/api/sampleModelObject/1") ~> service.apiRoute("") ~> check {
            val res = responseAs[Seq[SampleModelObject]].headOption

            println(res)
            assert(res.nonEmpty)
          }
        }
        "return some objects on a search" in {
          Post("/api/sampleModelObject/search", HttpEntity(ContentTypes.`application/json`, write(SimpleSearch()))) ~> service.apiRoute("") ~> check {
            val str = responseAs[ujson.Value]
            val res = responseAs[Seq[SampleModelObject]]

            println(res)
            assert(res.nonEmpty)
          }
        }
      }

} 
Example 90
Source File: ModelServiceSpec.scala    From full-scala-stack   with Apache License 2.0 5 votes vote down vote up
package api

import akka.http.scaladsl.marshalling.Marshal
import akka.http.scaladsl.model.{ContentType, ContentTypes, HttpEntity, HttpResponse, MessageEntity, RequestEntity}
import akka.http.scaladsl.testkit.ScalatestRouteTest
import dao.{CRUDOperations, MockRepository, Repository}
import de.heikoseeberger.akkahttpupickle.UpickleSupport
import mail.{CourierPostman, Postman}
import model.{SampleModelObject, SimpleSearch}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import routes.{ModelRoutes, SampleModelObjectRoute}
import upickle.default._
import util.ModelPickler
import zio.{IO, ZIO}
import zioslick.RepositoryException

import scala.concurrent.ExecutionContext


class ModelServiceSpec
  extends AnyWordSpec
    with Matchers
    with ScalatestRouteTest
    with ZIODirectives
  with UpickleSupport
    with ModelPickler
 {

  val objects = Seq(
    SampleModelObject(0, "Zero"),
    SampleModelObject(1, "One"),
    SampleModelObject(2, "Two"),
    SampleModelObject(3, "Three"),
    SampleModelObject(4, "Four"),
  )

  val service = new SampleModelObjectRoute  with MockRepository with CourierPostman with Config {
    override def repository: Repository.Service = new Repository.Service {
      override val sampleModelObjectOps: CRUDOperations[SampleModelObject, Int, SimpleSearch, Any] = new MockOps {
        override def search(search: Option[SimpleSearch])(
          implicit session: Any
        ): IO[RepositoryException, Seq[SampleModelObject]] = ZIO.succeed(objects)
        override def get(pk: Int)(implicit session: Any): IO[RepositoryException, Option[SampleModelObject]] =
          ZIO.succeed(objects.headOption)

      }
    }
  }

  //TODO test your route here, we would probably not have a test like the one below in reality, since it's super simple.
  "The Service" should  {
    "return one objects on a get" in {
      Get("/sampleModelObject/1") ~> service.crudRoute.route("") ~> check {
        val res = responseAs[Seq[SampleModelObject]].headOption

        println(res)
        res shouldEqual objects.headOption
      }
    }
    "return some objects on a search" in {
      Post("/sampleModelObject/search", HttpEntity(ContentTypes.`application/json`, write(SimpleSearch()))) ~> service.crudRoute.route("") ~> check {
        val str = responseAs[ujson.Value]
        val res = responseAs[Seq[SampleModelObject]]

        println(res)
        res shouldEqual objects
      }
    }
  }
} 
Example 91
Source File: RefinedDecodersSpec.scala    From phobos   with Apache License 2.0 5 votes vote down vote up
package ru.tinkoff.phobos.refined

import eu.timepit.refined.api.Refined
import eu.timepit.refined.refineMV
import eu.timepit.refined.string.MatchesRegex
import eu.timepit.refined.types.numeric.NonNegLong
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import ru.tinkoff.phobos.annotations.{ElementCodec, XmlCodec}
import ru.tinkoff.phobos.decoding.XmlDecoder
import ru.tinkoff.phobos.syntax.{attr, text}
import ru.tinkoff.phobos.testString._
import shapeless.{Witness => W}

class RefinedDecodersSpec extends AnyWordSpec with Matchers {
  type NumericAtLeastTo = MatchesRegex[W.`"[0-9]{2,}"`.T]

  @XmlCodec("test")
  case class Test(x: Int, y: Refined[String, NumericAtLeastTo])

  @ElementCodec
  case class Foo(@attr bar: Int, @text baz: NonNegLong)
  @XmlCodec("qux")
  case class Qux(str: String, foo: Foo)

  "refined decoder" should {
    "decode element correctly" in {

      val sampleXml = """
         | <?xml version='1.0' encoding='UTF-8'?>
         | <test>
         |   <x>2</x>
         |   <y>123</y>
         | </test>
          """.stripMargin.minimized

      val expectedResult = Test(2, refineMV[NumericAtLeastTo]("123"))

      XmlDecoder[Test].decode(sampleXml) shouldEqual (Right(expectedResult))

    }

    "decode text correctly" in {
      val sampleXml =
        """
          | <?xml version='1.0' encoding='UTF-8'?>
          | <qux>
          |   <str>42</str>
          |   <foo bar="42">1000</foo>
          | </qux>
          """.stripMargin.minimized

      val expectedResult = Qux("42", Foo(42, NonNegLong(1000L)))
      XmlDecoder[Qux].decode(sampleXml) shouldEqual Right(expectedResult)
    }

    "provide verbose errorst" in {

      @XmlCodec("test")
      case class Test2(x: Int, y: Refined[String, NumericAtLeastTo])
      @ElementCodec
      case class Foo2(@attr bar: Int, @text baz: NonNegLong)
      @XmlCodec("qux")
      case class Qux2(str: String, foo: Foo2)

      val sampleXml0 = """
           | <?xml version='1.0' encoding='UTF-8'?>
           | <test>
           |   <x>2</x>
           |   <y>1</y>
           | </test>
          """.stripMargin.minimized

      XmlDecoder[Test2]
        .decode(sampleXml0)
        .left
        .map(_.text) shouldEqual Left(
        """Failed to verify RefinedDecodersSpec.this.NumericAtLeastTo refinement for value=1 of raw type String: Predicate failed: "1".matches("[0-9]{2,}")."""
      )

      val sampleXml1 =
        """
          | <?xml version='1.0' encoding='UTF-8'?>
          | <qux>
          |   <str>42</str>
          |   <foo bar="42">-1000</foo>
          | </qux>
          """.stripMargin.minimized

      XmlDecoder[Qux2]
        .decode(sampleXml1)
        .left
        .map(_.text) shouldEqual Left(
        """Failed to verify eu.timepit.refined.numeric.NonNegative refinement for value=-1000 of raw type Long: Predicate (-1000 < 0) did not fail."""
      )

    }
  }
} 
Example 92
Source File: RefinedEncodersSpec.scala    From phobos   with Apache License 2.0 5 votes vote down vote up
package ru.tinkoff.phobos.refined

import eu.timepit.refined.api.Refined
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import ru.tinkoff.phobos.annotations.{ElementCodec, XmlCodec}
import eu.timepit.refined.refineMV
import eu.timepit.refined.string.MatchesRegex
import eu.timepit.refined.types.numeric.NonNegLong
import ru.tinkoff.phobos.encoding.XmlEncoder
import ru.tinkoff.phobos.syntax.{attr, text}
import shapeless.{Witness => W}
import ru.tinkoff.phobos.testString._

class RefinedEncodersSpec extends AnyWordSpec with Matchers {
  type NumericAtLeastTo = MatchesRegex[W.`"[0-9]{2,}"`.T]

  @XmlCodec("test")
  case class Test(x: Int, y: Refined[String, NumericAtLeastTo])

  @ElementCodec
  case class Foo(@attr bar: Int, @text baz: NonNegLong)
  @XmlCodec("qux")
  case class Qux(str: String, foo: Foo)

  "refined encoder" should {
    "encode element correctly" in {
      val value = Test(2, refineMV[NumericAtLeastTo]("123"))

      val expectedResult = """
         | <?xml version='1.0' encoding='UTF-8'?>
         | <test>
         |   <x>2</x>
         |   <y>123</y>
         | </test>
          """.stripMargin.minimized

      XmlEncoder[Test].encode(value) shouldEqual expectedResult
    }

    "encode text correctly" in {
      val qux = Qux("42", Foo(42, NonNegLong(1000L)))
      val xml = XmlEncoder[Qux].encode(qux)
      assert(
        xml ==
          """
            | <?xml version='1.0' encoding='UTF-8'?>
            | <qux>
            |   <str>42</str>
            |   <foo bar="42">1000</foo>
            | </qux>
          """.stripMargin.minimized)
    }
  }
} 
Example 93
Source File: GenericElementDecoderSpec.scala    From phobos   with Apache License 2.0 5 votes vote down vote up
package ru.tinkoff.phobos.traverse

import cats.syntax.either._
import com.softwaremill.diffx.scalatest.DiffMatcher
import org.scalatest.EitherValues
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import ru.tinkoff.phobos.ast.XmlLeaf
import ru.tinkoff.phobos.decoding.{DecodingError, ElementDecoder, XmlDecoder}

class GenericElementDecoderSpec extends AnyWordSpec with Matchers with DiffMatcher with EitherValues {
  import GenericElementDecoderSpec._
  "GenericElementDecoder" should {
    "work correctly with immutable accumulators" in {
      implicit val decodeAllAttributes: ElementDecoder[Acc] = GenericElementDecoder(ImmutableTraversalLogic)
      val xmlDecoder = XmlDecoder
        .fromElementDecoder[Acc]("ast")

      val sampleXml =
        """<?xml version='1.0' encoding='UTF-8'?><ans1:ast xmlns:ans1="https://tinkoff.ru" foo="5"><bar>bazz</bar><array foo2="true" foo3="false"><elem>11111111111111</elem><elem>11111111111112</elem></array><nested x="2.0"><scala>2.13</scala><dotty>0.13</dotty><scala-4/></nested></ans1:ast>"""

      val expectedResult0 = Acc(
        Map(
          "foo"  -> "5",
          "foo2" -> "true",
          "foo3" -> "false",
          "x"    -> "2.0"
        )
      )

      xmlDecoder.decode(sampleXml) should matchTo(expectedResult0.asRight[DecodingError])

      val xmlWithoutAttrs =
        """<?xml version='1.0' encoding='UTF-8'?><ans1:ast xmlns:ans1="https://tinkoff.ru"><bar>bazz</bar><array><elem>11111111111111</elem><elem>11111111111112</elem></array><nested><scala>2.13</scala><dotty>0.13</dotty><scala-4/></nested></ans1:ast>"""

      val expectedResult1 = Acc(Map.empty)

      xmlDecoder.decode(xmlWithoutAttrs) should matchTo(expectedResult1.asRight[DecodingError])
    }
  }
}

object GenericElementDecoderSpec {
  case class Acc(attributes: Map[String, String])

  object ImmutableTraversalLogic extends DecodingTraversalLogic[Acc, Acc] {
    override def newAcc(): Acc = Acc(Map.empty)

    override def onFinish(acc: Acc): Acc = acc

    override def onAttributes(acc: Acc, attributes: List[(String, XmlLeaf)]): Acc = {
      acc.copy(
        attributes = acc.attributes ++ attributes.map { case (name, leaf) => name -> leaf.value.toString }
      )
    }

    override def combine(acc: Acc, field: String, intermediateResult: Acc): Acc = {
      acc.copy(attributes = acc.attributes ++ intermediateResult.attributes)
    }
  }
} 
Example 94
Source File: XmlEntryElementDecoderSpec.scala    From phobos   with Apache License 2.0 5 votes vote down vote up
package ru.tinkoff.phobos.ast

import com.softwaremill.diffx.scalatest.DiffMatcher
import org.scalatest.EitherValues
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import ru.tinkoff.phobos.Namespace
import ru.tinkoff.phobos.decoding.{DecodingError, XmlDecoder}
import cats.syntax.either._

class XmlEntryElementDecoderSpec extends AnyWordSpec with Matchers with DiffMatcher with EitherValues {

  "XmlEntry decoder" should {
    "decodes simple Xml into ast correctly" in {
      val sampleXml                = """<?xml version='1.0' encoding='UTF-8'?><ast foo="5"><bar>bazz</bar></ast>"""
      val decodedAst               = XmlDecoder.fromElementDecoder[XmlEntry]("ast").decode(sampleXml).right.value
      val expectedResult: XmlEntry = xml(attr("foo") := 5, node("bar") := "bazz")

      decodedAst should matchTo(expectedResult)
    }

    "decodes complicated Xml into ast correctly" in {
      case object tinkoff {
        type ns = tinkoff.type
        implicit val ns: Namespace[tinkoff.type] = Namespace.mkInstance("https://tinkoff.ru")
      }

      val sampleXml =
        """<?xml version='1.0' encoding='UTF-8'?><ans1:ast xmlns:ans1="https://tinkoff.ru" foo="5"><bar>bazz</bar><array foo2="true" foo3="false"><elem>11111111111111</elem><elem>11111111111112</elem></array><nested><scala>2.13</scala><dotty>0.13</dotty><scala-4/></nested></ans1:ast>"""

      val decodedAst = XmlDecoder.fromElementDecoderNs[XmlEntry, tinkoff.ns]("ast").decode(sampleXml)
      val expectedResult: XmlEntry = xml(attr("foo") := 5)(
        node("bar") := "bazz",
        node("array") := xml(
          attr("foo2") := true,
          attr("foo3") := false
        )(
          node("elem") := 11111111111111L,
          node("elem") := 11111111111112L
        ),
        node("nested") := xml(
          node("scala") := 2.13,
          node("dotty") := 0.13,
          node("scala-4") := xml.empty
        )
      )

      decodedAst should matchTo(expectedResult.asRight[DecodingError])
    }

    "works fine when for elements with same name" in {

      val n: XmlEntry = xml(
        node("k") :=
          xml(
            node("k") := "gbq"
          )
      )

      val encoded = ru.tinkoff.phobos.encoding.XmlEncoder.fromElementEncoder[XmlEntry]("ast").encode(n)

      val result = XmlDecoder
        .fromElementDecoder[XmlEntry]("ast")
        .decode(
          encoded
        )

      result.map(util.AstTransformer.sortNodeValues) should matchTo(
        util.AstTransformer.sortNodeValues(n).asRight[DecodingError]
      )
    }
  }
} 
Example 95
Source File: XmlEntryElementEncoderSpec.scala    From phobos   with Apache License 2.0 5 votes vote down vote up
package ru.tinkoff.phobos.ast

import com.softwaremill.diffx.scalatest.DiffMatcher
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import ru.tinkoff.phobos.Namespace
import ru.tinkoff.phobos.encoding.XmlEncoder

class XmlEntryElementEncoderSpec extends AnyWordSpec with DiffMatcher with Matchers {
  "XmlEntry encoder" should {
    "encodes simple Xml ast correctly" in {
      val ast = xml(attr("foo") := 5, node("bar") := "bazz")

      val result =
        XmlEncoder
          .fromElementEncoder[XmlEntry]("ast")
          .encode(ast)

      assert(result == """<?xml version='1.0' encoding='UTF-8'?><ast foo="5"><bar>bazz</bar></ast>""")
    }
    "encodes nested Xml ast correctly" in {
      case object tinkoff {
        type ns = tinkoff.type
        implicit val ns: Namespace[tinkoff.type] = Namespace.mkInstance("https://tinkoff.ru")
      }

      val ast = xml(attr("foo") := 5)(
        node("bar") := "bazz",
        node("array") := xml(
          attr("foo2") := true,
          attr("foo3") := false
        )(
          node("elem") := 11111111111111L,
          node("elem") := 11111111111112L
        ),
        node("nested") := xml(
          node("scala") := 2.13,
          node("dotty") := 0.13,
          node("scala-4") := xml.empty
        )
      )

      val result =
        XmlEncoder
          .fromElementEncoderNs[XmlEntry, tinkoff.ns]("ast")
          .encode(ast)

      assert(
        result == """<?xml version='1.0' encoding='UTF-8'?><ans1:ast xmlns:ans1="https://tinkoff.ru" foo="5"><bar>bazz</bar><array foo2="true" foo3="false"><elem>11111111111111</elem><elem>11111111111112</elem></array><nested><scala>2.13</scala><dotty>0.13</dotty><scala-4/></nested></ans1:ast>"""
      )
    }
  }
} 
Example 96
Source File: FlattenElementsDecoderSpec.scala    From phobos   with Apache License 2.0 5 votes vote down vote up
package ru.tinkoff.phobos.raw

import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import ru.tinkoff.phobos.Namespace
import ru.tinkoff.phobos.decoding.XmlDecoder
import ru.tinkoff.phobos.ast._

class FlattenElementsDecoderSpec extends AnyWordSpec with Matchers {

  "XmlEntry decoder" should {
    "decodes simple Xml into ast correctly" in {
      val xml        = """<?xml version='1.0' encoding='UTF-8'?><ast foo="5"><bar>bazz</bar></ast>"""
      val decodedRaw = XmlDecoder.fromElementDecoder[ElementsFlatten]("ast").decode(xml)
      assert(
        decodedRaw.contains(
          ElementsFlatten(
            "bar" -> "bazz"
          )
        )
      )
    }

    "decodes complicated Xml into ast correctly" in {
      case object tinkoff {
        type ns = tinkoff.type
        implicit val ns: Namespace[tinkoff.type] = Namespace.mkInstance("https://tinkoff.ru")
      }

      val xml =
        """<?xml version='1.0' encoding='UTF-8'?><ans1:ast xmlns:ans1="https://tinkoff.ru" foo="5"><bar>bazz</bar><array foo2="true" foo3="false"><elem>11111111111111</elem><elem>11111111111112</elem></array><nested><scala>2.13</scala><dotty>0.13</dotty><scala-4/></nested></ans1:ast>"""

      val decodedRaw = XmlDecoder.fromElementDecoderNs[ElementsFlatten, tinkoff.ns]("ast").decode(xml)
      assert(
        decodedRaw.contains(
          ElementsFlatten(
            "bar"   -> "bazz",
            "elem"  -> 11111111111111L,
            "elem"  -> 11111111111112L,
            "scala" -> 2.13,
            "dotty" -> 0.13
          )
        )
      )
    }
  }
} 
Example 97
Source File: QueryContextSpec.scala    From scruid   with Apache License 2.0 5 votes vote down vote up
package ing.wbaa.druid

import ing.wbaa.druid.definitions._
import ing.wbaa.druid.util._
import io.circe.generic.auto._
import io.circe.syntax._
import org.scalatest.concurrent._
import org.scalatest.matchers.should.Matchers
import org.scalatest.time._
import org.scalatest.wordspec.AnyWordSpec

class QueryContextSpec extends AnyWordSpec with Matchers with ScalaFutures {

  implicit override val patienceConfig =
    PatienceConfig(timeout = Span(20, Seconds), interval = Span(5, Millis))
  private val totalNumberOfEntries = 39244
  implicit val config              = DruidConfig()
  implicit val mat                 = config.client.actorMaterializer

  case class TimeseriesCount(count: Int)

  "TimeSeriesQuery with context" should {

    "successfully be interpreted by Druid" in {

      val query = TimeSeriesQuery(
        aggregations = List(
          CountAggregation(name = "count")
        ),
        granularity = GranularityType.Hour,
        intervals = List("2011-06-01/2017-06-01"),
        context = Map(
          QueryContext.QueryId          -> "some_custom_id",
          QueryContext.Priority         -> "100",
          QueryContext.UseCache         -> "false",
          QueryContext.SkipEmptyBuckets -> "true"
        )
      )

      val requestJson = query.asJson.noSpaces

      requestJson shouldBe
      """{
          |"aggregations":[{"name":"count","type":"count"}],
          |"intervals":["2011-06-01/2017-06-01"],
          |"filter":null,
          |"granularity":"hour",
          |"descending":"true",
          |"postAggregations":[],
          |"context":{"queryId":"some_custom_id","priority":"100","useCache":"false","skipEmptyBuckets":"true"}
          |}""".toOneLine

      val resultF = query.execute()

      whenReady(resultF) { response =>
        response.list[TimeseriesCount].map(_.count).sum shouldBe totalNumberOfEntries
      }
    }

  }

} 
Example 98
Source File: BasicAuthenticationSpec.scala    From scruid   with Apache License 2.0 5 votes vote down vote up
package ing.wbaa.druid.auth.basic

import scala.concurrent.duration._
import scala.language.postfixOps

import akka.http.scaladsl.model.StatusCodes
import ing.wbaa.druid.{ DruidConfig, QueryHost, TimeSeriesQuery }
import ing.wbaa.druid.client.{ DruidAdvancedHttpClient, HttpStatusException }
import ing.wbaa.druid.definitions._
import io.circe.generic.auto._
import org.scalatest.concurrent._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class BasicAuthenticationSpec extends AnyWordSpec with Matchers with ScalaFutures {

  override implicit def patienceConfig: PatienceConfig = PatienceConfig(5 minutes, 100 millis)
  private val totalNumberOfEntries                     = 39244
  private val basicAuthenticationAddition =
    new BasicAuthenticationExtension(username = "user", password = "aloha")

  case class TimeseriesCount(count: Int)

  "TimeSeriesQuery without Basic Auth" should {

    implicit val config = DruidConfig(
      clientBackend = classOf[DruidAdvancedHttpClient],
      clientConfig = DruidAdvancedHttpClient
        .ConfigBuilder()
        .build(),
      hosts = Seq(QueryHost("localhost", 8088))
    )

    "get 401 Auth Required when querying Druid without Authentication config" in {
      val request = TimeSeriesQuery(
        aggregations = List(
          CountAggregation(name = "count")
        ),
        granularity = GranularityType.Hour,
        intervals = List("2011-06-01/2017-06-01")
      ).execute

      whenReady(request.failed) { throwable =>
        throwable shouldBe a[HttpStatusException]
        throwable.asInstanceOf[HttpStatusException].status shouldBe StatusCodes.Unauthorized
      }
    }
  }

  "TimeSeriesQuery with Basic Auth" should {

    implicit val config = DruidConfig(
      clientBackend = classOf[DruidAdvancedHttpClient],
      clientConfig = DruidAdvancedHttpClient
        .ConfigBuilder()
        .withRequestInterceptor(basicAuthenticationAddition)
        .build(),
      hosts = Seq(QueryHost("localhost", 8088))
    )

    "successfully query Druid when an Authentication config is set" in {
      val request = TimeSeriesQuery(
        aggregations = List(
          CountAggregation(name = "count")
        ),
        granularity = GranularityType.Hour,
        intervals = List("2011-06-01/2017-06-01")
      ).execute

      whenReady(request) { response =>
        response.list[TimeseriesCount].map(_.count).sum shouldBe totalNumberOfEntries
      }
    }
  }

} 
Example 99
Source File: SQLQuerySpec.scala    From scruid   with Apache License 2.0 5 votes vote down vote up
package ing.wbaa.druid

import java.time.{ LocalDateTime, ZonedDateTime }

import akka.stream.scaladsl.Sink
import ing.wbaa.druid.SQL._
import ing.wbaa.druid.client.CirceDecoders
import io.circe.generic.auto._
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.matchers.should.Matchers
import org.scalatest.time.{ Millis, Seconds, Span }
import org.scalatest.wordspec.AnyWordSpec

class SQLQuerySpec extends AnyWordSpec with Matchers with ScalaFutures with CirceDecoders {
  implicit override val patienceConfig =
    PatienceConfig(timeout = Span(20, Seconds), interval = Span(5, Millis))
  private val totalNumberOfEntries  = 39244
  private val usOnlyNumberOfEntries = 528

  implicit val config = DruidConfig()
  implicit val mat    = config.client.actorMaterializer

  case class Result(hourTime: ZonedDateTime, count: Int)

  "SQL query" should {

    val sqlQuery: SQLQuery = dsql"""
      |SELECT FLOOR(__time to HOUR) AS hourTime, count(*) AS "count"
      |FROM wikipedia
      |WHERE "__time" BETWEEN TIMESTAMP '2015-09-12 00:00:00' AND TIMESTAMP '2015-09-13 00:00:00'
      |GROUP BY 1
      |""".stripMargin

    "successfully be interpreted by Druid" in {
      val resultsF = sqlQuery.execute()
      whenReady(resultsF) { response =>
        response.list[Result].map(_.count).sum shouldBe totalNumberOfEntries
      }
    }

    "support streaming" in {
      val resultsF = sqlQuery.streamAs[Result]().runWith(Sink.seq)

      whenReady(resultsF) { results =>
        results.map(_.count).sum shouldBe totalNumberOfEntries
      }
    }
  }

  "SQL parameterized query" should {

    val fromDateTime   = LocalDateTime.of(2015, 9, 12, 0, 0, 0, 0)
    val untilDateTime  = fromDateTime.plusDays(1)
    val countryIsoCode = "US"

    val sqlQuery: SQLQuery =
      dsql"""
      |SELECT FLOOR(__time to HOUR) AS hourTime, count(*) AS "count"
      |FROM wikipedia
      |WHERE "__time" BETWEEN ${fromDateTime} AND ${untilDateTime} AND countryIsoCode = ${countryIsoCode}
      |GROUP BY 1
      |""".stripMargin

    "be expressed as a parameterized query with three parameters" in {
      sqlQuery.query.count(_ == '?') shouldBe 3
      sqlQuery.parameters.size shouldBe 3

      sqlQuery.parameters(0) shouldBe SQLQueryParameter(SQLQueryParameterType.Timestamp,
                                                        "2015-09-12 00:00:00")
      sqlQuery.parameters(1) shouldBe SQLQueryParameter(SQLQueryParameterType.Timestamp,
                                                        "2015-09-13 00:00:00")
      sqlQuery.parameters(2) shouldBe SQLQueryParameter(SQLQueryParameterType.Varchar, "US")
    }

    "successfully be interpreted by Druid" in {
      val resultsF = sqlQuery.execute()
      whenReady(resultsF) { response =>
        response.list[Result].map(_.count).sum shouldBe usOnlyNumberOfEntries
      }
    }

    "support streaming" in {
      val resultsF = sqlQuery.streamAs[Result]().runWith(Sink.seq)

      whenReady(resultsF) { results =>
        results.map(_.count).sum shouldBe usOnlyNumberOfEntries
      }

    }

  }
} 
Example 100
Source File: GranularitySpec.scala    From scruid   with Apache License 2.0 5 votes vote down vote up
package ing.wbaa.druid.definitions

import io.circe._
import io.circe.syntax._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class GranularitySpec extends AnyWordSpec with Matchers {
  "Granularities" should {
    "be able to encode to json" in {
      implicit val granularityEncoder: Encoder[Granularity] = GranularityType.encoder

      val gran: Granularity = GranularityType.FifteenMinute
      gran.asJson.noSpaces shouldBe "\"fifteen_minute\""
    }

    "be able to decode json to a Granularity" in {
      implicit val granularityDecoder: Decoder[Granularity] = GranularityType.decoder
      val thirtyMinute                                      = "thirty_minute"
      thirtyMinute.asJson.as[Granularity] shouldBe Right(GranularityType.ThirtyMinute)

      val all = "all"
      all.asJson.as[Granularity] shouldBe Right(GranularityType.All)
    }

  }
} 
Example 101
Source File: StringSinkRecordKeyBuilderTest.scala    From kafka-connect-common   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.sink

import com.datamountaineer.streamreactor.connect.rowkeys.StringSinkRecordKeyBuilder
import org.apache.kafka.connect.data.Schema
import org.apache.kafka.connect.sink.SinkRecord
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec


class StringSinkRecordKeyBuilderTest extends AnyWordSpec with Matchers {
  val keyRowKeyBuilder = new StringSinkRecordKeyBuilder()

  "SinkRecordKeyStringKeyBuilder" should {

    "create the right key from the Schema key value - Byte" in {
      val b = 123.toByte
      val sinkRecord = new SinkRecord("", 1, Schema.INT8_SCHEMA, b, Schema.FLOAT64_SCHEMA, Nil, 0)

      keyRowKeyBuilder.build(sinkRecord) shouldBe "123"

    }
    "create the right key from the Schema key value - String" in {
      val s = "somekey"
      val sinkRecord = new SinkRecord("", 1, Schema.STRING_SCHEMA, s, Schema.FLOAT64_SCHEMA, Nil, 0)

      keyRowKeyBuilder.build(sinkRecord) shouldBe s
    }

    "create the right key from the Schema key value - Bytes" in {
      val bArray = Array(23.toByte, 24.toByte, 242.toByte)
      val sinkRecord = new SinkRecord("", 1, Schema.BYTES_SCHEMA, bArray, Schema.FLOAT64_SCHEMA, Nil, 0)
      keyRowKeyBuilder.build(sinkRecord) shouldBe bArray.toString
    }
    "create the right key from the Schema key value - Boolean" in {
      val bool = true
      val sinkRecord = new SinkRecord("", 1, Schema.BOOLEAN_SCHEMA, bool, Schema.FLOAT64_SCHEMA, Nil, 0)

      keyRowKeyBuilder.build(sinkRecord) shouldBe "true"

    }
  }
} 
Example 102
Source File: StringGenericRowKeyBuilderTest.scala    From kafka-connect-common   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.sink

import com.datamountaineer.streamreactor.connect.rowkeys.StringGenericRowKeyBuilder
import org.apache.kafka.connect.data.Schema
import org.apache.kafka.connect.sink.SinkRecord
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec


class StringGenericRowKeyBuilderTest extends AnyWordSpec with Matchers {
  "StringGenericRowKeyBuilder" should {
    "use the topic, partition and offset to make the key" in {

      val topic = "sometopic"
      val partition = 2
      val offset = 1243L
      val sinkRecord = new SinkRecord(topic, partition, Schema.INT32_SCHEMA, 345, Schema.STRING_SCHEMA, "", offset)

      val keyBuilder = new StringGenericRowKeyBuilder()
      val expected = Seq(topic, partition, offset).mkString("|")
      keyBuilder.build(sinkRecord) shouldBe expected
    }
  }
} 
Example 103
Source File: StringStructFieldsStringKeyBuilderTest.scala    From kafka-connect-common   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.sink

import com.datamountaineer.streamreactor.connect.rowkeys.StringStructFieldsStringKeyBuilder
import org.apache.kafka.connect.data.{Schema, SchemaBuilder, Struct}
import org.apache.kafka.connect.sink.SinkRecord
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec


class StringStructFieldsStringKeyBuilderTest extends AnyWordSpec with Matchers {
  "StructFieldsStringKeyBuilder" should {
    "raise an exception if the field is not present in the struct" in {
      intercept[IllegalArgumentException] {
        val schema = SchemaBuilder.struct().name("com.example.Person")
          .field("firstName", Schema.STRING_SCHEMA)
          .field("age", Schema.INT32_SCHEMA)
          .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build()

        val struct = new Struct(schema).put("firstName", "Alex").put("age", 30)

        val sinkRecord = new SinkRecord("sometopic", 1, null, null, schema, struct, 1)
        StringStructFieldsStringKeyBuilder(Seq("threshold")).build(sinkRecord)
      }
    }

    "create the row key based on one single field in the struct" in {
      val schema = SchemaBuilder.struct().name("com.example.Person")
        .field("firstName", Schema.STRING_SCHEMA)
        .field("age", Schema.INT32_SCHEMA)
        .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build()

      val struct = new Struct(schema).put("firstName", "Alex").put("age", 30)

      val sinkRecord = new SinkRecord("sometopic", 1, null, null, schema, struct, 1)
      StringStructFieldsStringKeyBuilder(Seq("firstName")).build(sinkRecord) shouldBe "Alex"
    }

    "create the row key based on one single field with doc in the struct" in {
      val firstNameSchema = SchemaBuilder.`type`(Schema.Type.STRING).doc("first name")
      val schema = SchemaBuilder.struct().name("com.example.Person")
        .field("firstName", firstNameSchema)
        .field("age", Schema.INT32_SCHEMA)
        .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build()

      val struct = new Struct(schema).put("firstName", "Alex").put("age", 30)

      val sinkRecord = new SinkRecord("sometopic", 1, null, null, schema, struct, 1)
      StringStructFieldsStringKeyBuilder(Seq("firstName")).build(sinkRecord) shouldBe "Alex"
    }

    "create the row key based on more thant one field in the struct" in {
      val schema = SchemaBuilder.struct().name("com.example.Person")
        .field("firstName", Schema.STRING_SCHEMA)
        .field("age", Schema.INT32_SCHEMA)
        .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build()

      val struct = new Struct(schema).put("firstName", "Alex").put("age", 30)

      val sinkRecord = new SinkRecord("sometopic", 1, null, null, schema, struct, 1)
      StringStructFieldsStringKeyBuilder(Seq("firstName", "age")).build(sinkRecord) shouldBe "Alex.30"
    }
  }
} 
Example 104
Source File: FutureAwaitWithFailFastFnTest.scala    From kafka-connect-common   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.concurrent

import java.util.concurrent.Executors

import com.datamountaineer.streamreactor.connect.concurrent.ExecutorExtension._
import org.scalactic.source.Position
import org.scalatest.concurrent.{Eventually, TimeLimits}
import org.scalatest.matchers.should.Matchers
import org.scalatest.time.{Millis, Span}
import org.scalatest.wordspec.AnyWordSpec

import scala.util.{Failure, Try}


class FutureAwaitWithFailFastFnTest extends AnyWordSpec with Matchers with Eventually with TimeLimits {


  "FutureAwaitWithFailFastFn" should {
    "return when all the futures have completed" in {
      val exec = Executors.newFixedThreadPool(10)
      val futures = (1 to 5).map(i => exec.submit {
        Thread.sleep(300)
        i
      })
      eventually {
        val result = FutureAwaitWithFailFastFn(exec, futures)
        exec.isTerminated shouldBe true
        result shouldBe Seq(1, 2, 3, 4, 5)
      }
    }

    "stop when the first futures times out" in {
      val exec = Executors.newFixedThreadPool(6)
      val futures = for (i <- 1 to 10) yield {
        exec.submit {
          if (i == 4) {
            Thread.sleep(1000)
            sys.error("this task failed.")
          } else {
            Thread.sleep(50000)
          }
        }
      }

      eventually {
        val t = Try(FutureAwaitWithFailFastFn(exec, futures))
        t.isFailure shouldBe true
        t.asInstanceOf[Failure[_]].exception.getMessage shouldBe "this task failed."
        exec.isTerminated shouldBe true
      }
    }
  }

} 
Example 105
Source File: TestSSLConfigContext.scala    From kafka-connect-common   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.config

import javax.net.ssl.{KeyManager, SSLContext, TrustManager}
import org.scalatest.wordspec.AnyWordSpec
import org.scalatest.BeforeAndAfter
import org.scalatest.matchers.should.Matchers


class TestSSLConfigContext extends AnyWordSpec with Matchers with BeforeAndAfter {
  var sslConfig : SSLConfig = null
  var sslConfigNoClient : SSLConfig = null

  before {
    val trustStorePath = System.getProperty("truststore")
    val trustStorePassword ="erZHDS9Eo0CcNo"
    val keystorePath = System.getProperty("keystore")
    val keystorePassword ="8yJQLUnGkwZxOw"
    sslConfig = SSLConfig(trustStorePath, trustStorePassword , Some(keystorePath), Some(keystorePassword), true)
    sslConfigNoClient = SSLConfig(trustStorePath, trustStorePassword , Some(keystorePath), Some(keystorePassword), false)
  }

  "SSLConfigContext" should {
    "should return an Array of KeyManagers" in {
      val keyManagers = SSLConfigContext.getKeyManagers(sslConfig)
      keyManagers.length shouldBe 1
      val entry = keyManagers.head
      entry shouldBe a [KeyManager]
    }

    "should return an Array of TrustManagers" in {
      val trustManager = SSLConfigContext.getTrustManagers(sslConfig)
      trustManager.length shouldBe 1
      val entry = trustManager.head
      entry shouldBe a [TrustManager]
    }

    "should return a SSLContext" in {
      val context = SSLConfigContext(sslConfig)
      context.getProtocol shouldBe "SSL"
      context shouldBe a [SSLContext]
    }
  }
} 
Example 106
Source File: KcqlSettingsTest.scala    From kafka-connect-common   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.config

import java.util

import com.datamountaineer.streamreactor.connect.config.base.traits.KcqlSettings
import org.apache.kafka.common.config.types.Password
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec


class KcqlSettingsTest extends AnyWordSpec with Matchers {

  import scala.collection.JavaConverters._

  case class KS(kcql: String) extends KcqlSettings {

    override def connectorPrefix: String = "66686723939"
    override def getString(key: String): String = key match {
      case `kcqlConstant` => kcql
      case _ => null
    }
    override def getInt(key: String): Integer = 0
    override def getBoolean(key: String): java.lang.Boolean = false
    override def getPassword(key: String): Password = null
    override def getList(key: String): util.List[String] = List.empty[String].asJava
  }

  def testUpsertKeys(
    kcql: String, 
    expectedKeys: Set[String], 
    topic: String = "t",
    preserve: Boolean = false) = {
    val keys = KS(kcql).getUpsertKeys(preserveFullKeys=preserve)(topic)
    // get rid of ListSet to avoid ordering issues:
    keys.toList.toSet shouldBe expectedKeys
  }

  "KcqlSettings.getUpsertKeys()" should {

    "return 'basename' of key by default" in {

      testUpsertKeys("UPSERT INTO coll SELECT * FROM t PK a", Set("a"))
      testUpsertKeys("UPSERT INTO coll SELECT * FROM t PK a, b.m.x", Set("a", "x"))
      testUpsertKeys("UPSERT INTO coll SELECT * FROM t PK b.m.x", Set("x"))
    }

    "return full keys if requested" in {

      testUpsertKeys("UPSERT INTO coll SELECT * FROM t PK a", Set("a"), preserve=true)
      testUpsertKeys("UPSERT INTO coll SELECT * FROM t PK a, b.m", Set("a", "b.m"), preserve=true)
      testUpsertKeys("UPSERT INTO coll SELECT * FROM t PK a, b.m, b.n.x", Set("a", "b.m", "b.n.x"), preserve=true)
      testUpsertKeys("UPSERT INTO coll SELECT * FROM t PK b.m.x", Set("b.m.x"), preserve=true)
    }

    "return keys in the expected order - as listed in the PK clause" in {

      val kcql = "UPSERT INTO coll SELECT * FROM t PK a,b,c,d"
      val expectedKeys = List("a","b","c","d")
      val keys = KS(kcql).getUpsertKeys(preserveFullKeys=true)("t").toList.sorted
      // SCALA 2.12 WARNING: If this fails when you upgrade to 2.12, you need to 
      // modify KcqlSettings to remove all the reverse() calls when constructing
      // the ListSets.
      keys shouldBe expectedKeys
    }

  }

} 
Example 107
Source File: TestOffsetHandler.scala    From kafka-connect-common   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.offsets

import com.datamountaineer.streamreactor.connect.TestUtilsBase
import org.mockito.MockitoSugar
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.collection.JavaConverters._


class TestOffsetHandler extends AnyWordSpec with Matchers with MockitoSugar with TestUtilsBase {
  "should return an offset" in {
    val lookupPartitionKey = "test_lk_key"
    val offsetValue = "2013-01-01 00:05+0000"
    val offsetColumn = "my_timeuuid_col"
    val table = "testTable"
    val taskContext = getSourceTaskContext(lookupPartitionKey, offsetValue,offsetColumn, table)

    //check we can read it back
    val tables = List(table)
    val offsetsRecovered = OffsetHandler.recoverOffsets(lookupPartitionKey, tables.asJava, taskContext)
    val offsetRecovered = OffsetHandler.recoverOffset[String](offsetsRecovered, lookupPartitionKey, table, offsetColumn)
    offsetRecovered.get shouldBe (offsetValue)
  }
} 
Example 108
Source File: BytesConverterTest.scala    From kafka-connect-common   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.converters.sink

import com.datamountaineer.streamreactor.connect.converters.MsgKey
import org.apache.kafka.connect.data.Schema
import org.apache.kafka.connect.sink.SinkRecord
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class BytesConverterTest extends AnyWordSpec with Matchers {
  private val converter = new BytesConverter()
  private val topic = "topicA"

  "Sink BytesConverter" should {
    "handle null payloads" in {
      val sinkRecord = converter.convert(topic, null)

      sinkRecord.keySchema() shouldBe null
      sinkRecord.key() shouldBe null
      sinkRecord.valueSchema() shouldBe Schema.BYTES_SCHEMA
      sinkRecord.value() shouldBe null
    }

    "handle non-null payloads" in {
      val expectedPayload: Array[Byte] = Array(245, 2, 10, 200, 22, 0, 0, 11).map(_.toByte)
      val data = new SinkRecord(topic, 0, null, "keyA", null, expectedPayload, 0)
      val sinkRecord = converter.convert(topic, data)

      sinkRecord.keySchema() shouldBe MsgKey.schema
      sinkRecord.key() shouldBe MsgKey.getStruct("topicA", "keyA")
      sinkRecord.valueSchema() shouldBe Schema.BYTES_SCHEMA
      sinkRecord.value() shouldBe expectedPayload
    }
  }
} 
Example 109
Source File: BytesConverterTest.scala    From kafka-connect-common   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.converters.source

import com.datamountaineer.streamreactor.connect.converters.MsgKey
import org.apache.kafka.connect.data.Schema
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class BytesConverterTest extends AnyWordSpec with Matchers {
  private val converter = new BytesConverter()
  private val topic = "topicA"

  "BytesConverter" should {
    "handle null payloads" in {
      val sourceRecord = converter.convert(topic, "somesource", "100", null)

      sourceRecord.keySchema() shouldBe MsgKey.schema
      sourceRecord.key() shouldBe MsgKey.getStruct("somesource", "100")
      sourceRecord.valueSchema() shouldBe Schema.BYTES_SCHEMA
      sourceRecord.value() shouldBe null
    }

    "handle non-null payloads" in {
      val expectedPayload: Array[Byte] = Array(245, 2, 10, 200, 22, 0, 0, 11).map(_.toByte)
      val sourceRecord = converter.convert(topic, "somesource", "1001", expectedPayload)

      sourceRecord.keySchema() shouldBe MsgKey.schema
      sourceRecord.key() shouldBe MsgKey.getStruct("somesource", "1001")
      sourceRecord.valueSchema() shouldBe Schema.BYTES_SCHEMA
      sourceRecord.value() shouldBe expectedPayload
    }
  }
} 
Example 110
Source File: JsonPassThroughConverterTest.scala    From kafka-connect-common   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.converters.source

import java.util.Collections

import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class JsonPassThroughConverterTest extends AnyWordSpec with Matchers {
  val topic = "the_real_topic"
  val sourceTopic = "source_topic"

  "JsonPassThroughConverter" should {
    "pass single message with no key through as json" in {
      val car = Car("LaFerrari", "Ferrari", 2015, 963, 0.0001)
      val json = JacksonJson.toJson(car)
      val converter = new JsonPassThroughConverter
      val record = converter.convert(topic, sourceTopic, "100", json.getBytes)
      record.keySchema() shouldBe null
      record.key() shouldBe "source_topic.100"

      record.valueSchema() shouldBe null

      record.value() shouldBe json
      record.sourcePartition() shouldBe Collections.singletonMap(Converter.TopicKey, sourceTopic)
      record.sourceOffset() shouldBe null
    }

    "pass single message with key through as json" in {
      val car = Car("LaFerrari", "Ferrari", 2015, 963, 0.0001)
      val json = JacksonJson.toJson(car)
      val converter = new JsonPassThroughConverter
      val keys = List("name", "manufacturer")
      val record = converter.convert(topic, sourceTopic, "100", json.getBytes, keys)
      record.keySchema() shouldBe null
      record.key() shouldBe "LaFerrari.Ferrari"

      record.valueSchema() shouldBe null

      record.value() shouldBe json
      record.sourcePartition() shouldBe Collections.singletonMap(Converter.TopicKey, sourceTopic)
      record.sourceOffset() shouldBe null
    }
  }
} 
Example 111
Source File: JsonSimpleConverterTest.scala    From kafka-connect-common   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.converters.source

import java.util.Collections

import com.datamountaineer.streamreactor.connect.converters.MsgKey
import com.sksamuel.avro4s.{RecordFormat, SchemaFor}
import io.confluent.connect.avro.AvroData
import org.apache.avro.Schema
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class JsonSimpleConverterTest extends AnyWordSpec with Matchers {
  val topic = "the_real_topic"
  val sourceTopic = "source_topic"
  val avroData = new AvroData(4)

  "JsonSimpleConverter" should {
    "convert from json to the struct" in {
      val car = Car("LaFerrari", "Ferrari", 2015, 963, 0.0001)
      val json = JacksonJson.toJson(car)
      val converter = new JsonSimpleConverter
      val record = converter.convert(topic, sourceTopic, "100", json.getBytes)
      record.keySchema() shouldBe MsgKey.schema
      record.key() shouldBe MsgKey.getStruct(sourceTopic, "100")

      val schema = new Schema.Parser().parse(
        SchemaFor[Car]().toString
          .replace("\"name\":\"Car\"", s"""\"name\":\"$sourceTopic\"""")
          .replace(s"""\"namespace\":\"${getClass.getCanonicalName.dropRight(getClass.getSimpleName.length+1)}\",""", "")
      )
      val format = RecordFormat[Car]
      val avro = format.to(car)

      record.valueSchema() shouldBe avroData.toConnectSchema(schema)

      record.value() shouldBe avroData.toConnectData(schema, avro).value()
      record.sourcePartition() shouldBe Collections.singletonMap(Converter.TopicKey, sourceTopic)
      record.sourceOffset() shouldBe null
    }
  }
} 
Example 112
Source File: JsonConverterWithSchemaEvolutionTest.scala    From kafka-connect-common   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.converters.source

import java.util.Collections

import com.datamountaineer.streamreactor.connect.converters.MsgKey
import com.sksamuel.avro4s.{RecordFormat, SchemaFor}
import io.confluent.connect.avro.AvroData
import org.apache.avro.Schema
import org.apache.kafka.connect.data.Struct
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class JsonConverterWithSchemaEvolutionTest extends AnyWordSpec with Matchers {
  val topic = "the_real_topic"
  val sourceTopic = "source_topic"
  val avroData = new AvroData(4)

  "JsonConverter" should {
    "throw IllegalArgumentException if payload is null" in {
      intercept[IllegalArgumentException] {
        val converter = new JsonConverterWithSchemaEvolution
        val record = converter.convert("topic", "somesource", "1000", null)
      }
    }

    "handle a simple json" in {
      val json = JacksonJson.toJson(Car("LaFerrari", "Ferrari", 2015, 963, 0.0001))
      val converter = new JsonConverterWithSchemaEvolution
      val record = converter.convert(topic, sourceTopic, "100", json.getBytes)
      record.keySchema() shouldBe MsgKey.schema
      record.key().asInstanceOf[Struct].getString("topic") shouldBe sourceTopic
      record.key().asInstanceOf[Struct].getString("id") shouldBe "100"

      val schema =
        new Schema.Parser().parse(
          SchemaFor[CarOptional]().toString
            .replace("\"name\":\"CarOptional\"", s"""\"name\":\"$sourceTopic\"""")
            .replace(s""",\"namespace\":\"${getClass.getCanonicalName.dropRight(getClass.getSimpleName.length+1)}\"""", "")
        )
      val format = RecordFormat[CarOptional]
      val carOptional = format.to(CarOptional(Option("LaFerrari"), Option("Ferrari"), Option(2015), Option(963), Option(0.0001)))

      record.valueSchema() shouldBe avroData.toConnectSchema(schema)

      record.value() shouldBe avroData.toConnectData(schema, carOptional).value()
      record.sourcePartition() shouldBe null
      record.sourceOffset() shouldBe Collections.singletonMap(JsonConverterWithSchemaEvolution.ConfigKey, avroData.fromConnectSchema(avroData.toConnectSchema(schema)).toString())
    }
  }
}


case class Car(name: String,
               manufacturer: String,
               model: Long,
               bhp: Long,
               price: Double)


case class CarOptional(name: Option[String],
                       manufacturer: Option[String],
                       model: Option[Long],
                       bhp: Option[Long],
                       price: Option[Double]) 
Example 113
Source File: CirceSupportSpec.scala    From sangria-circe   with Apache License 2.0 5 votes vote down vote up
package sangria.marshalling


import sangria.marshalling.circe._
import sangria.marshalling.testkit._

import io.circe.Json
import io.circe.generic.auto._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class CirceSupportSpec extends AnyWordSpec with Matchers with MarshallingBehaviour with InputHandlingBehaviour {
  "Circe integration" should {
    behave like `value (un)marshaller` (CirceResultMarshaller)

    behave like `AST-based input unmarshaller` (circeFromInput)
    behave like `AST-based input marshaller` (CirceResultMarshaller)

    behave like `case class input unmarshaller`
    behave like `case class input marshaller` (CirceResultMarshaller)
  }

  val toRender = Json.obj(
    "a" → Json.arr(Json.Null, Json.fromInt(123), Json.arr(Json.obj("foo" → Json.fromString("bar")))),
    "b" → Json.obj(
      "c" → Json.fromBoolean(true),
      "d" → Json.Null))

  "InputUnmarshaller" should {
    "throw an exception on invalid scalar values" in {
      an [IllegalStateException] should be thrownBy
          CirceInputUnmarshaller.getScalarValue(Json.obj())
    }

    "throw an exception on variable names" in {
      an [IllegalArgumentException] should be thrownBy
          CirceInputUnmarshaller.getVariableName(Json.fromString("$foo"))
    }

    "render JSON values" in {
      val rendered = CirceInputUnmarshaller.render(toRender)

      rendered should be ("""{"a":[null,123,[{"foo":"bar"}]],"b":{"c":true,"d":null}}""")
    }
  }

  "ResultMarshaller" should {
    "render pretty JSON values" in {
      val rendered = CirceResultMarshaller.renderPretty(toRender)

      rendered.replaceAll("\r", "") should be (
        """{
          |  "a" : [
          |    null,
          |    123,
          |    [
          |      {
          |        "foo" : "bar"
          |      }
          |    ]
          |  ],
          |  "b" : {
          |    "c" : true,
          |    "d" : null
          |  }
          |}""".stripMargin.replaceAll("\r", ""))
    }

    "render compact JSON values" in {
      val rendered = CirceResultMarshaller.renderCompact(toRender)

      rendered should be ("""{"a":[null,123,[{"foo":"bar"}]],"b":{"c":true,"d":null}}""")
    }
  }
} 
Example 114
Source File: ExchangeTransactionCreatorSpecification.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.model

import com.wavesplatform.dex.domain.asset.Asset
import com.wavesplatform.dex.domain.bytes.ByteStr
import com.wavesplatform.dex.domain.crypto
import com.wavesplatform.dex.domain.crypto.Proofs
import com.wavesplatform.dex.domain.order.Order
import com.wavesplatform.dex.domain.order.OrderOps._
import com.wavesplatform.dex.domain.transaction.ExchangeTransactionV2
import com.wavesplatform.dex.domain.utils.EitherExt2
import com.wavesplatform.dex.{MatcherSpecBase, NoShrink}
import org.scalacheck.Gen
import org.scalamock.scalatest.PathMockFactory
import org.scalatest.matchers.should.Matchers
import org.scalatest.prop.TableDrivenPropertyChecks
import org.scalatest.wordspec.AnyWordSpec
import org.scalatest.{Assertion, BeforeAndAfterAll}
import org.scalatestplus.scalacheck.{ScalaCheckPropertyChecks => PropertyChecks}

import scala.concurrent.ExecutionContext.Implicits.global

class ExchangeTransactionCreatorSpecification
    extends AnyWordSpec
    with Matchers
    with MatcherSpecBase
    with BeforeAndAfterAll
    with PathMockFactory
    with PropertyChecks
    with NoShrink
    with TableDrivenPropertyChecks {

  private def getExchangeTransactionCreator(hasMatcherScript: Boolean = false,
                                            hasAssetScripts: Asset => Boolean = _ => false): ExchangeTransactionCreator = {
    new ExchangeTransactionCreator(MatcherAccount, matcherSettings.exchangeTxBaseFee, hasMatcherScript, hasAssetScripts)
  }

  "ExchangeTransactionCreator" should {
    "create an ExchangeTransactionV2" when {
      (List(1, 2, 3) ++ List(1, 2, 3)).combinations(2).foreach {
        case List(counterVersion, submittedVersion) =>
          s"counterVersion=$counterVersion, submittedVersion=$submittedVersion" in {
            val counter   = buy(wavesBtcPair, 100000, 0.0008, matcherFee = Some(2000L), version = counterVersion.toByte)
            val submitted = sell(wavesBtcPair, 100000, 0.0007, matcherFee = Some(1000L), version = submittedVersion.toByte)

            val tc = getExchangeTransactionCreator()
            val oe = mkOrderExecutedRaw(submitted, counter)

            tc.createTransaction(oe).explicitGet() shouldBe a[ExchangeTransactionV2]
          }
      }
    }

    "take fee from order executed event" when {
      "orders are matched fully" in {
        val preconditions = for { ((_, buyOrder), (_, sellOrder)) <- orderV3MirrorPairGenerator } yield (buyOrder, sellOrder)
        test(preconditions)
      }

      "orders are matched partially" in {
        val preconditions = for { ((_, buyOrder), (senderSell, sellOrder)) <- orderV3MirrorPairGenerator } yield {
          val sellOrderWithUpdatedAmount = sellOrder.updateAmount(sellOrder.amount / 2)
          val newSignature               = crypto.sign(senderSell, sellOrderWithUpdatedAmount.bodyBytes())
          val correctedSellOrder         = sellOrderWithUpdatedAmount.updateProofs(Proofs(Seq(ByteStr(newSignature))))

          (buyOrder, correctedSellOrder)
        }

        test(preconditions)
      }

      def test(preconditions: Gen[(Order, Order)]): Assertion = forAll(preconditions) {
        case (buyOrder, sellOrder) =>
          val tc = getExchangeTransactionCreator()
          val oe = mkOrderExecutedRaw(buyOrder, sellOrder)
          val tx = tc.createTransaction(oe).explicitGet()

          tx.buyMatcherFee shouldBe oe.submittedExecutedFee
          tx.sellMatcherFee shouldBe oe.counterExecutedFee
      }
    }
  }
} 
Example 115
Source File: PlayJsonSupportSpec.scala    From sangria-play-json   with Apache License 2.0 5 votes vote down vote up
package sangria.marshalling

import play.api.libs.json._

import sangria.marshalling.testkit._
import sangria.marshalling.playJson._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec


class PlayJsonSupportSpec extends AnyWordSpec with Matchers with MarshallingBehaviour with InputHandlingBehaviour with ParsingBehaviour {
  implicit val commentFormat = Json.format[Comment]
  implicit val articleFormat = Json.format[Article]

  "PlayJson integration" should {
    behave like `value (un)marshaller` (PlayJsonResultMarshaller)

    behave like `AST-based input unmarshaller` (playJsonFromInput[JsValue])
    behave like `AST-based input marshaller` (PlayJsonResultMarshaller)

    behave like `case class input unmarshaller`
    behave like `case class input marshaller` (PlayJsonResultMarshaller)

    behave like `input parser` (ParseTestSubjects(
      complex = """{"a": [null, 123, [{"foo": "bar"}]], "b": {"c": true, "d": null}}""",
      simpleString = "\"bar\"",
      simpleInt = "12345",
      simpleNull = "null",
      list = "[\"bar\", 1, null, true, [1, 2, 3]]",
      syntaxError = List("[123, \"FOO\" \"BAR\"")
    ))
  }

  val toRender = Json.obj(
    "a" -> Json.arr(JsNull, JsNumber(123), Json.arr(Json.obj("foo" -> JsString("bar")))),
    "b" -> Json.obj(
      "c" -> JsBoolean(true),
      "d" -> JsNull))

  "InputUnmarshaller" should {
    "throw an exception on invalid scalar values" in {
      an [IllegalStateException] should be thrownBy
          PlayJsonInputUnmarshaller.getScalarValue(Json.obj())
    }

    "throw an exception on variable names" in {
      an [IllegalArgumentException] should be thrownBy
          PlayJsonInputUnmarshaller.getVariableName(JsString("$foo"))
    }

    "render JSON values" in {
      val rendered = PlayJsonInputUnmarshaller.render(toRender)

      rendered should be ("""{"a":[null,123,[{"foo":"bar"}]],"b":{"c":true,"d":null}}""")
    }
  }

  "ResultMarshaller" should {
    "render pretty JSON values" in {
      val rendered = PlayJsonResultMarshaller.renderPretty(toRender)

      rendered.replaceAll("\r", "") should be (
        """{
          |  "a" : [ null, 123, [ {
          |    "foo" : "bar"
          |  } ] ],
          |  "b" : {
          |    "c" : true,
          |    "d" : null
          |  }
          |}""".stripMargin.replaceAll("\r", ""))
    }

    "render compact JSON values" in {
      val rendered = PlayJsonResultMarshaller.renderCompact(toRender)

      rendered should be ("""{"a":[null,123,[{"foo":"bar"}]],"b":{"c":true,"d":null}}""")
    }
  }
} 
Example 116
Source File: RedisCheckSpec.scala    From sup   with Apache License 2.0 5 votes vote down vote up
package sup

import cats.implicits._
import dev.profunktor.redis4cats.algebra.Ping
import sup.modules.redis
import org.scalatest.wordspec.AnyWordSpec
import org.scalatest.matchers.should.Matchers

class RedisCheckSpec extends AnyWordSpec with Matchers {
  "Either check" when {
    "Right" should {
      "be Healthy" in {
        implicit val ping: Ping[Either[String, ?]] = new Ping[Either[String, ?]] {
          override val ping: Either[String, String] = Right("pong")
        }

        val healthCheck = redis.pingCheck

        healthCheck.check shouldBe Right(HealthResult.one(Health.Healthy))
      }
    }

    "Left" should {
      "be Sick" in {
        implicit val ping: Ping[Either[String, ?]] = new Ping[Either[String, ?]] {
          override val ping: Either[String, String] = Left("boo")
        }

        val healthCheck = modules.redis.pingCheck

        healthCheck.check shouldBe Right(HealthResult.one(Health.Sick))
      }
    }
  }
} 
Example 117
Source File: SKRSpec.scala    From spark-kafka-writer   with Apache License 2.0 5 votes vote down vote up
package com.github.benfradet.spark.kafka.writer

import java.util.concurrent.atomic.AtomicInteger

import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.scalatest.concurrent.Eventually
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}

import scala.collection.mutable.ArrayBuffer
import scala.util.Random
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

case class Foo(a: Int, b: String)

trait SKRSpec
  extends AnyWordSpec
  with Matchers
  with BeforeAndAfterEach
  with BeforeAndAfterAll
  with Eventually {

  val sparkConf = new SparkConf()
    .setMaster("local[1]")
    .setAppName(getClass.getSimpleName)

  var ktu: KafkaTestUtils = _
  override def beforeAll(): Unit = {
    ktu = new KafkaTestUtils
    ktu.setup()
  }
  override def afterAll(): Unit = {
    SKRSpec.callbackTriggerCount.set(0)
    if (ktu != null) {
      ktu.tearDown()
      ktu = null
    }
  }

  var topic: String = _
  var ssc: StreamingContext = _
  var spark: SparkSession = _
  override def afterEach(): Unit = {
    if (ssc != null) {
      ssc.stop()
      ssc = null
    }
    if (spark != null) {
      spark.stop()
      spark = null
    }
  }
  override def beforeEach(): Unit = {
    ssc = new StreamingContext(sparkConf, Seconds(1))
    spark = SparkSession.builder
      .config(sparkConf)
      .getOrCreate()
    topic = s"topic-${Random.nextInt()}"
    ktu.createTopics(topic)
  }

  def collect(ssc: StreamingContext, topic: String): ArrayBuffer[String] = {
    val kafkaParams = Map(
      "bootstrap.servers" -> ktu.brokerAddress,
      "auto.offset.reset" -> "earliest",
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> "test-collect"
    )
    val results = new ArrayBuffer[String]
    KafkaUtils.createDirectStream[String, String](
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](Set(topic), kafkaParams)
    ).map(_.value())
      .foreachRDD { rdd =>
        results ++= rdd.collect()
        ()
      }
    results
  }

  val producerConfig = Map(
    "bootstrap.servers" -> "127.0.0.1:9092",
    "key.serializer" -> classOf[StringSerializer].getName,
    "value.serializer" -> classOf[StringSerializer].getName
  )
}

object SKRSpec {
  val callbackTriggerCount = new AtomicInteger()
} 
Example 118
Source File: Discipline.scala    From discipline-scalatest   with MIT License 5 votes vote down vote up
package org.typelevel.discipline
package scalatest

import org.scalactic.Prettifier
import org.scalactic.source.Position
import org.scalatest.flatspec.AnyFlatSpecLike
import org.scalatest.funspec.AnyFunSpecLike
import org.scalatest.funsuite.AnyFunSuiteLike
import org.scalatest.prop.Configuration
import org.scalatest.wordspec.AnyWordSpec
import org.scalatestplus.scalacheck.Checkers

trait Discipline { self: Configuration =>

  
  final protected[this] def convertConfiguration(
    config: PropertyCheckConfiguration
  ): Checkers.PropertyCheckConfiguration =
    Checkers.PropertyCheckConfiguration(
      config.minSuccessful,
      config.maxDiscardedFactor,
      config.minSize,
      config.sizeRange,
      config.workers
    )

  def checkAll(name: String, ruleSet: Laws#RuleSet)(implicit
    config: PropertyCheckConfiguration,
    prettifier: Prettifier,
    pos: Position
  ): Unit
}

trait FlatSpecDiscipline extends Discipline { self: AnyFlatSpecLike with Configuration =>
  final def checkAll(name: String,
                     ruleSet: Laws#RuleSet
  )(implicit config: PropertyCheckConfiguration, prettifier: Prettifier, pos: Position): Unit =
    ruleSet.all.properties match {
      case first +: rest =>
        name should first._1 in Checkers.check(first._2)(convertConfiguration(config), prettifier, pos)

        for ((id, prop) <- rest)
          it should id in Checkers.check(prop)(convertConfiguration(config), prettifier, pos)
    }
}

trait FunSpecDiscipline extends Discipline { self: AnyFunSpecLike with Configuration =>
  final def checkAll(name: String,
                     ruleSet: Laws#RuleSet
  )(implicit config: PropertyCheckConfiguration, prettifier: Prettifier, pos: Position): Unit =
    describe(name) {
      for ((id, prop) <- ruleSet.all.properties)
        it(id) {
          Checkers.check(prop)(convertConfiguration(config), prettifier, pos)
        }
    }
}

trait FunSuiteDiscipline extends Discipline { self: AnyFunSuiteLike with Configuration =>
  final def checkAll(name: String,
                     ruleSet: Laws#RuleSet
  )(implicit config: PropertyCheckConfiguration, prettifier: Prettifier, pos: Position): Unit =
    for ((id, prop) <- ruleSet.all.properties)
      test(s"${name}.${id}") {
        Checkers.check(prop)(convertConfiguration(config), prettifier, pos)
      }
}

trait WordSpecDiscipline extends Discipline { self: AnyWordSpec with Configuration =>

  def checkAll(name: String,
               ruleSet: Laws#RuleSet
  )(implicit config: PropertyCheckConfiguration, prettifier: Prettifier, pos: Position): Unit =
    for ((id, prop) <- ruleSet.all.properties)
      registerTest(s"${name}.${id}") {
        Checkers.check(prop)(convertConfiguration(config), prettifier, pos)
      }
} 
Example 119
Source File: ScalazIntegrationSpec.scala    From octopus   with Apache License 2.0 5 votes vote down vote up
package octopus.scalaz

import octopus.example.domain._
import octopus.syntax._
import octopus.{Fixtures, ValidationError}
import org.scalatest.matchers.must.Matchers
import org.scalatest.wordspec.AnyWordSpec
import scalaz.{NonEmptyList, Validation}


class ScalazIntegrationSpec
  extends AnyWordSpec with Matchers with Fixtures {

  "Scalaz Integration" should {
    "support ValidationNel" when {

      "success scenario" in {
        1.validate.toValidationNel mustBe Validation.success(1)
        userId_Valid.validate.toValidationNel mustBe Validation.success(userId_Valid)
        user_Valid.validate.toValidationNel mustBe Validation.success(user_Valid)
      }

      "failure scenario" in {

        userId_Invalid.validate.toValidationNel mustBe Validation.failure(
          NonEmptyList.nels(ValidationError(UserId.Err_MustBePositive))
        )

        user_Invalid1.validate.toValidationNel.leftMap(_.map(_.toPair)) mustBe Validation.failure(
          NonEmptyList.nels(
            "id" -> UserId.Err_MustBePositive,
            "email" -> Email.Err_MustContainAt,
            "email" -> Email.Err_MustContainDotAfterAt,
            "address.postalCode" -> PostalCode.Err_MustBeLengthOf5,
            "address.postalCode" -> PostalCode.Err_MustContainOnlyDigits,
            "address.city" -> Address.Err_MustNotBeEmpty,
            "address.street" -> Address.Err_MustNotBeEmpty
          )
        )
      }
    }

    "support Validation" when {

      "success scenario" in {
        1.validate.toValidation mustBe Validation.success(1)
        userId_Valid.validate.toValidation mustBe Validation.success(userId_Valid)
        user_Valid.validate.toValidation mustBe Validation.success(user_Valid)
      }

      "failure scenario" in {

        userId_Invalid.validate.toValidation mustBe Validation.failure(
          List(ValidationError(UserId.Err_MustBePositive))
        )

        user_Invalid1.validate.toValidation.leftMap(_.map(_.toPair)) mustBe Validation.failure(
          List(
            "id" -> UserId.Err_MustBePositive,
            "email" -> Email.Err_MustContainAt,
            "email" -> Email.Err_MustContainDotAfterAt,
            "address.postalCode" -> PostalCode.Err_MustBeLengthOf5,
            "address.postalCode" -> PostalCode.Err_MustContainOnlyDigits,
            "address.city" -> Address.Err_MustNotBeEmpty,
            "address.street" -> Address.Err_MustNotBeEmpty
          )
        )
      }
    }
  }
} 
Example 120
Source File: CatsIntegrationSpec.scala    From octopus   with Apache License 2.0 5 votes vote down vote up
package octopus.cats

import cats.data.{NonEmptyList, Validated}
import octopus.example.domain._
import octopus.syntax._
import octopus.{Fixtures, ValidationError}
import org.scalatest.matchers.must.Matchers
import org.scalatest.wordspec.AnyWordSpec

class CatsIntegrationSpec
  extends AnyWordSpec with Matchers with Fixtures {

  "Cats Integration" should {

    "support ValidatedNel" when {

      "Valid scenario" in {
        1.validate.toValidatedNel mustBe Validated.Valid(1)
        userId_Valid.validate.toValidatedNel mustBe Validated.Valid(userId_Valid)
        user_Valid.validate.toValidatedNel mustBe Validated.Valid(user_Valid)
      }

      "Invalid scenario" in {

        userId_Invalid.validate.toValidatedNel mustBe Validated.Invalid(
          NonEmptyList.of(ValidationError(UserId.Err_MustBePositive))
        )

        user_Invalid1.validate.toValidatedNel.leftMap(_.map(_.toPair)) mustBe Validated.Invalid(
          NonEmptyList.of(
            "id" -> UserId.Err_MustBePositive,
            "email" -> Email.Err_MustContainAt,
            "email" -> Email.Err_MustContainDotAfterAt,
            "address.postalCode" -> PostalCode.Err_MustBeLengthOf5,
            "address.postalCode" -> PostalCode.Err_MustContainOnlyDigits,
            "address.city" -> Address.Err_MustNotBeEmpty,
            "address.street" -> Address.Err_MustNotBeEmpty
          )
        )
      }
    }

    "support Validated" when {

      "Valid scenario" in {
        1.validate.toValidated mustBe Validated.Valid(1)
        userId_Valid.validate.toValidated mustBe Validated.Valid(userId_Valid)
        user_Valid.validate.toValidated mustBe Validated.Valid(user_Valid)
      }

      "Invalid scenario" in {

        userId_Invalid.validate.toValidated mustBe Validated.Invalid(
          List(ValidationError(UserId.Err_MustBePositive))
        )

        user_Invalid1.validate.toValidated.leftMap(_.map(_.toPair)) mustBe Validated.Invalid(
          List(
            "id" -> UserId.Err_MustBePositive,
            "email" -> Email.Err_MustContainAt,
            "email" -> Email.Err_MustContainDotAfterAt,
            "address.postalCode" -> PostalCode.Err_MustBeLengthOf5,
            "address.postalCode" -> PostalCode.Err_MustContainOnlyDigits,
            "address.city" -> Address.Err_MustNotBeEmpty,
            "address.street" -> Address.Err_MustNotBeEmpty
          )
        )
      }
    }
  }

} 
Example 121
Source File: UserSpec.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.data

import org.scalatest.diagrams.Diagrams
import org.scalatest.wordspec.AnyWordSpec

class UserSpec extends AnyWordSpec with Diagrams {
  "UserRawJson" should {
    "convert to User in apply of UserRawJson" in {
      assert(
        User(UserRawJson("u", Some(Set("g")), "a", "s", None)) ==
          User(UserName("u"), Set(UserGroup("g")), AwsAccessKey("a"), AwsSecretKey("s"), UserAssumeRole(""))
      )
      assert(
        User(UserRawJson("u", None, "a", "s", Some("testrole"))) ==
          User(UserName("u"), Set(), AwsAccessKey("a"), AwsSecretKey("s"), UserAssumeRole("testrole"))
      )
    }
  }
} 
Example 122
Source File: HeaderIPsSpec.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.data

import java.net.InetAddress

import akka.http.scaladsl.model.RemoteAddress
import org.scalatest.diagrams.Diagrams
import org.scalatest.wordspec.AnyWordSpec

class HeaderIPsSpec extends AnyWordSpec with Diagrams {

  private[this] val address1 = RemoteAddress(InetAddress.getByName("1.1.1.1"), None)
  private[this] val address2 = RemoteAddress(InetAddress.getByName("1.1.1.2"), None)
  private[this] val address3 = RemoteAddress(InetAddress.getByName("1.1.1.3"), None)
  private[this] val address4 = RemoteAddress(InetAddress.getByName("1.1.1.4"), None)

  val headerIPs = HeaderIPs(
    `X-Real-IP` = Some(address1),
    `X-Forwarded-For` = Some(Seq(address2, address3)),
    `Remote-Address` = Some(address4)
  )

  "HeaderIPs" should {
    "return all IPs" that {
      "are in X-Real-IP" in {
        assert(headerIPs.allIPs.contains(address1))
      }
      "are in X-Forwarded-For" in {
        assert(headerIPs.allIPs.contains(address2))
        assert(headerIPs.allIPs.contains(address3))
      }
      "are in Remote-Address" in {
        assert(headerIPs.allIPs.contains(address4))
      }
      "in toString method" in {
        assert(headerIPs.toStringList contains "X-Real-IP=1.1.1.1")
        assert(headerIPs.toStringList contains "X-Forwarded-For=1.1.1.2,1.1.1.3")
        assert(headerIPs.toStringList contains "Remote-Address=1.1.1.4")
      }
    }
  }
} 
Example 123
Source File: JsonProtocolsSpec.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.data

import org.scalatest.diagrams.Diagrams
import org.scalatest.wordspec.AnyWordSpec

class JsonProtocolsSpec extends AnyWordSpec with Diagrams with JsonProtocols {

  import spray.json._

  "Json protocols" should {
    "parse a User" that {
      "has a group" in {
        val jsonString =
          """{
            | "userName": "user",
            | "userGroups": ["group1"],
            | "accessKey": "accesskey",
            | "secretKey": "secretkey"
            |}""".stripMargin
        val result = jsonString.parseJson.convertTo[UserRawJson]
        assert(result == UserRawJson("user", Some(Set("group1")), "accesskey", "secretkey", None))
      }

      "does not have a group" in {
        val jsonString =
          """{
            | "userName": "user",
            | "userGroups": [],
            | "accessKey": "accesskey",
            | "secretKey": "secretkey"
            |}""".stripMargin
        val result = jsonString.parseJson.convertTo[UserRawJson]
        assert(result == UserRawJson("user", Some(Set.empty[String]), "accesskey", "secretkey", None))
      }

      "fail when fields are missing" in {
        val jsonString =
          """{
            | "userName": "user"
            |}""".stripMargin

        assertThrows[spray.json.DeserializationException] {
          jsonString.parseJson.convertTo[UserRawJson]
        }
      }
    }
  }
} 
Example 124
Source File: RequestParserSpec.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.handler

import akka.http.scaladsl.model._
import com.ing.wbaa.rokku.proxy.handler.parsers.RequestParser
import com.ing.wbaa.rokku.proxy.handler.parsers.RequestParser.{ MultipartRequestType, RequestTypeUnknown }
import org.scalatest.diagrams.Diagrams
import org.scalatest.wordspec.AnyWordSpec

class RequestParserSpec extends AnyWordSpec with Diagrams with RequestParser {

  val uri = Uri("http://localhost:8987/demobucket/ObjectName?uploadId=1")
  val httpRequest: HttpMethod => HttpRequest = m => HttpRequest(m, uri)

  val partParseResult = awsRequestFromRequest(httpRequest(HttpMethods.PUT)).asInstanceOf[MultipartRequestType]
  val completeParseResult = awsRequestFromRequest(httpRequest(HttpMethods.POST)
    .withEntity(ContentType(MediaTypes.`application/xml`, HttpCharsets.`UTF-8`), "abc".getBytes())).asInstanceOf[MultipartRequestType]

  "RequestParser" should {
    "recognize multipart upload part request" in {
      assert(partParseResult.uploadId == "1" && !partParseResult.completeMultipartUpload)
    }
    "recognize multipart upload complete request" in {
      assert(completeParseResult.uploadId == "1" && completeParseResult.completeMultipartUpload)
    }
    "return RequestUnknown for other type of request" in {
      assert(awsRequestFromRequest(httpRequest(HttpMethods.PATCH)).isInstanceOf[RequestTypeUnknown])
    }
  }
} 
Example 125
Source File: CacheRulesV1Spec.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.cache

import akka.actor.ActorSystem
import akka.http.scaladsl.model.Uri.Path
import akka.http.scaladsl.model.{ HttpMethod, HttpMethods, HttpRequest, Uri }
import com.ing.wbaa.rokku.proxy.config.StorageS3Settings
import com.ing.wbaa.rokku.proxy.data.RequestId
import com.ing.wbaa.rokku.proxy.handler.parsers.RequestParser
import org.scalatest.diagrams.Diagrams
import org.scalatest.wordspec.AnyWordSpec

class CacheRulesV1Spec extends AnyWordSpec with Diagrams with CacheRulesV1 with RequestParser {

  private implicit val id = RequestId("testRequestId")

  val system: ActorSystem = ActorSystem.create("test-system")
  override val storageS3Settings: StorageS3Settings = new StorageS3Settings(system.settings.config) {
    override val storageS3Authority: Uri.Authority = Uri.Authority(Uri.Host("1.2.3.4"), 1234)
  }

  override def getMaxEligibleCacheObjectSizeInBytes(implicit id: RequestId): Long = 5242880L

  override def getEligibleCachePaths(implicit id: RequestId): Array[String] = "/home/,/test/".trim.split(",")

  override def getHeadEnabled(implicit id: RequestId): Boolean = true

  private val uri = Uri("http", Uri.Authority(Uri.Host("1.2.3.4")), Path(""), None, None)

  private val methods = Seq(HttpMethods.GET, HttpMethods.PUT, HttpMethods.POST, HttpMethods.DELETE, HttpMethods.HEAD)

  "Cache rules v1 set isEligibleToBeCached " should {

    methods.foreach { method =>
      testIsEligibleToBeCached(method, "/home/test", HttpRequest.apply(method = method, uri = uri.copy(path = Path("/home/test"))))
      testIsEligibleToBeCached(method, "/home2/test", HttpRequest.apply(method = method, uri = uri.copy(path = Path("/home2/test"))))
      testIsEligibleToBeCached(method, "/test/abc", HttpRequest.apply(method = method, uri = uri.copy(path = Path("/test/abc"))))
      testIsEligibleToBeCached(method, "/testtest/test", HttpRequest.apply(method = method, uri = uri.copy(path = Path("/testtest/test"))))
    }
  }

  private def testIsEligibleToBeCached(method: HttpMethod, path: String, request: HttpRequest): Unit = {
    method match {
      case HttpMethods.GET | HttpMethods.HEAD if storageS3Settings.eligibleCachePaths.exists(path.startsWith) =>
        s"for method=$method and path=$path to true" in {
          assert(isEligibleToBeCached(request))
        }
      case _ =>
        s"for method=$method and path=$path to false" in {
          assert(!isEligibleToBeCached(request))
        }
    }
  }

  "Cache rules v1 set isEligibleToBeInvalidated" should {

    methods.foreach { method =>
      val request = HttpRequest.apply(method = method, uri)
      method match {
        case HttpMethods.POST | HttpMethods.PUT | HttpMethods.DELETE =>
          s"for method=$method to true" in {
            assert(isEligibleToBeInvalidated(request))
          }
        case _ =>
          s"for method=$method to false" in {
            assert(!isEligibleToBeInvalidated(request))
          }
      }
    }
  }
} 
Example 126
Source File: HazelcastSpec.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.cache

import akka.util.ByteString
import com.ing.wbaa.rokku.proxy.data.RequestId
import org.scalatest.diagrams.Diagrams
import org.scalatest.wordspec.AnyWordSpec

class HazelcastSpec extends AnyWordSpec with Diagrams with HazelcastCache {

  private implicit val id = RequestId("testRequestId")

  "Hazelcast Cache" should {
    "return empty BS for non existing object" in {
      assert(getObject("/bucket/nonexisting").isEmpty)
    }
    "add object to cache if BS non empty" in {
      val someObject = "/bucket/Object"
      putObject(someObject, ByteString("abc"))
      assert(getObject(someObject).isDefined)
    }
    "fail to get object from cache if BS empty" in {
      val someObject = "/bucket/emptyObject"
      putObject(someObject, ByteString.empty)
      assert(getObject(someObject).isEmpty)
    }
    "remove existing object from cache" in {
      val removedObject = "/bucket/ObjectRemoved"
      putObject(removedObject, ByteString("abc"))
      removeObject(removedObject)
      assert(getObject(removedObject).isEmpty)
    }
    "remove both head and existing object from cache (on AWS rm)" in {
      val removedObjectHead = "/bucket/ObjectRemoved1-head"
      val removedObject = "/bucket/ObjectRemoved1"
      putObject(removedObjectHead, ByteString("abc"))
      putObject(removedObject, ByteString("abc"))
      removeObject(removedObject)
      assert(getObject(removedObjectHead).isEmpty)
      assert(getObject(removedObject).isEmpty)
    }
  }

} 
Example 127
Source File: LineageHelperSpec.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.provider

import akka.http.scaladsl.model.HttpRequest
import akka.http.scaladsl.model.headers.RawHeader
import com.ing.wbaa.rokku.proxy.config.KafkaSettings
import com.ing.wbaa.rokku.proxy.data.{ BucketClassification, DirClassification, ObjectClassification, RequestId }
import com.ing.wbaa.rokku.proxy.provider.atlas.LineageHelpers
import org.scalatest.PrivateMethodTester
import org.scalatest.diagrams.Diagrams
import org.scalatest.wordspec.AnyWordSpec

import scala.concurrent.ExecutionContext

class LineageHelperSpec extends AnyWordSpec with Diagrams with PrivateMethodTester {

  object LineageHelpersTest extends LineageHelpers {
    override protected[this] implicit val kafkaSettings: KafkaSettings = null
    override protected[this] implicit val executionContext: ExecutionContext = null
  }

  implicit val id = RequestId("1")

  "extractMetadataFromHeader" that {
    "return None for empty header" in {
      val result = LineageHelpersTest.extractMetadataHeader(None)
      assert(result.isEmpty)
    }

    "return None for wrong header" in {
      val result = LineageHelpersTest.extractMetadataHeader(Some("k,v"))
      assert(result.isEmpty)
      val result2 = LineageHelpersTest.extractMetadataHeader(Some("k=v,k2"))
      assert(result2.isEmpty)
      val result3 = LineageHelpersTest.extractMetadataHeader(Some("kv,=k2,v2"))
      assert(result3.isEmpty)
    }

    "return key and value for metadata header" in {
      val result = LineageHelpersTest.extractMetadataHeader(Some("k=v"))
      assert(result.contains(Map("k" -> "v")))
    }

    "return keys and values for metadata header" in {
      val result = LineageHelpersTest.extractMetadataHeader(Some("k1=v1,k2=v2"))
      assert(result.contains(Map("k1" -> "v1", "k2" -> "v2")))
    }
  }

  "extractClassifications" that {
    "returns bucket classifications" in {
      val request = HttpRequest().withUri("bucket").withHeaders(RawHeader(LineageHelpersTest.CLASSIFICATIONS_HEADER, "classification1"))
      val result = LineageHelpersTest.extractClassifications(request)
      assert(result.size == 1)
      assert(result contains BucketClassification())
      assert(result(BucketClassification()) == List("classification1"))
    }

    "returns dir classifications" in {
      val request = HttpRequest().withUri("bucket/dir1/").withHeaders(RawHeader(LineageHelpersTest.CLASSIFICATIONS_HEADER, "classification1,classification2"))
      val result = LineageHelpersTest.extractClassifications(request)
      assert(result.size == 1)
      assert(result contains DirClassification())
      assert(result(DirClassification()) == List("classification1", "classification2"))
    }

    "returns object classifications" in {
      val request = HttpRequest().withUri("bucket/obj").withHeaders(RawHeader(LineageHelpersTest.CLASSIFICATIONS_HEADER, "classification1,classification2,classification3"))
      val result = LineageHelpersTest.extractClassifications(request)
      assert(result.size == 1)
      assert(result contains ObjectClassification())
      assert(result(ObjectClassification()) == List("classification1", "classification2", "classification3"))
      val request2 = HttpRequest().withUri("bucket/dir1/obj").withHeaders(RawHeader(LineageHelpersTest.CLASSIFICATIONS_HEADER, "classification1"))
      val result2 = LineageHelpersTest.extractClassifications(request2)
      assert(result2.size == 1)
      assert(result2 contains ObjectClassification())
      assert(result2(ObjectClassification()) == List("classification1"))
    }
  }

} 
Example 128
Source File: AuthorizationProviderSpec.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.provider

import akka.actor.ActorSystem
import com.ing.wbaa.rokku.proxy.config.RangerSettings
import com.ing.wbaa.rokku.proxy.provider.AuthorizationProviderRanger.RangerException
import org.scalatest.diagrams.Diagrams
import org.scalatest.wordspec.AnyWordSpec

class AuthorizationProviderSpec extends AnyWordSpec with Diagrams with AuthorizationProviderRanger {

  private[this] final implicit val testSystem: ActorSystem = ActorSystem.create("test-system")

  override val rangerSettings: RangerSettings = new RangerSettings(testSystem.settings.config) {
    override val appId: String = "nonexistent"
    override val serviceType: String = "nonexistent"
  }

  "Authorization Provider" should {
    "throw a Ranger exception for unknown appId or serviceType" in {
      assertThrows[RangerException] {
        rangerPluginForceInit
      }
    }
  }
} 
Example 129
Source File: LoggerHandlerWithIdSpec.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.provider

import ch.qos.logback.classic.{ Level, Logger }
import com.ing.wbaa.rokku.proxy.data.RequestId
import com.ing.wbaa.rokku.proxy.handler.LoggerHandlerWithId
import org.scalatest.BeforeAndAfter
import org.scalatest.diagrams.Diagrams
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class LoggerHandlerWithIdSpec extends AnyWordSpec with Matchers with Diagrams with BeforeAndAfter {

  private val logger = new LoggerHandlerWithId
  implicit val id: RequestId = RequestId("1")

  private val logRoot: Logger = org.slf4j.LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME).asInstanceOf[Logger]
  private val currentLogLevel = logRoot.getLevel
  private val val1 = 1
  private val val2 = 2
  before(logRoot.setLevel(Level.DEBUG))
  after(logRoot.setLevel(currentLogLevel))

  "Logger" should {
    "work" in {

      noException should be thrownBy {

        logger.debug("test debug {}", val1)
        logger.debug("test debug {} {}", val1, val2)
        logger.debug("test debug {}", new RuntimeException("RTE").getMessage)

        logger.info("test info {}", val1)
        logger.info("test info {} {}", val1, val2)
        logger.info("test info {}", new RuntimeException("RTE").getMessage)

        logger.warn("test warn {}", val1)
        logger.warn("test warn {} {}", val1, val2)
        logger.warn("test warn {}", new RuntimeException("RTE").getMessage)

        logger.error("test error {}", val1)
        logger.error("test error {} {}", val1, val2)
        logger.error("test error {}", new RuntimeException("RTE").getMessage)
      }
    }
  }
} 
Example 130
Source File: AbstractAbstractIpCidrMatcherTest.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.ranger.plugin.conditionevaluator

import org.apache.ranger.plugin.policyengine.{ RangerAccessRequest, RangerAccessRequestImpl }
import org.scalatest.diagrams.Diagrams
import org.scalatest.wordspec.AnyWordSpec

abstract class AbstractAbstractIpCidrMatcherTest extends AnyWordSpec with Diagrams {

  import scala.collection.JavaConverters._

  def newIpCidrMatcher(cidrs: List[String]): AbstractIpCidrMatcher

  def newRangerRequest(remoteIp: String, forwardedForIps: List[String] = Nil): RangerAccessRequest = {
    val rari = new RangerAccessRequestImpl()
    rari.setRemoteIPAddress(remoteIp)
    rari.setForwardedAddresses(forwardedForIps.asJava)
    rari
  }

  "IpCidrMatcherTest" should {

    "match valid CIDR ranges" in {
      val newMatcher = newIpCidrMatcher(List("1.2.3.4/32"))
      val newRequest = newRangerRequest("1.2.3.4")
      assert(newMatcher.isMatched(newRequest))
    }

    "match when X-Forwarded-For IPs are in range" in {
      val newMatcher = newIpCidrMatcher(List("1.1.0.0/16"))
      val newRequest = newRangerRequest("1.1.1.1", List("1.1.1.1", "1.1.1.2", "1.1.2.1"))
      assert(newMatcher.isMatched(newRequest))
    }

    "match all when conditions are empty" in {
      val newMatcher = newIpCidrMatcher(List())
      val newRequest = newRangerRequest("1.2.3.4")
      assert(newMatcher.isMatched(newRequest))
    }

    "match all when conditions contain a *" in {
      val newMatcher = newIpCidrMatcher(List("1.2.3.4/32", "*"))
      val newRequest = newRangerRequest("23.34.45.56")
      assert(newMatcher.isMatched(newRequest))
    }

    "not match when Ip not in CIDR range" in {
      val newMatcher = newIpCidrMatcher(List("1.2.3.4/32"))
      val newRequest = newRangerRequest("23.34.45.56")
      assert(!newMatcher.isMatched(newRequest))
    }

    "skip an invalid cidr range" in {
      val newMatcher = newIpCidrMatcher(List("1.2.3.4//32"))
      val newRequest = newRangerRequest("1.2.3.4")
      assert(!newMatcher.isMatched(newRequest))
    }

    "throw an exception when any IP is null" in {
      val newMatcher = newIpCidrMatcher(List("1.2.3.4/32"))
      val newRequest1 = newRangerRequest(null)
      val newRequest2 = newRangerRequest("1.2.3.4", List("1.1.1.1", null))
      assertThrows[Exception](newMatcher.isMatched(newRequest1))
      assertThrows[Exception](newMatcher.isMatched(newRequest2))
    }

    "not throw an exception when an IP is null but matches all" in {
      val newMatcher = newIpCidrMatcher(List())
      val newRequest1 = newRangerRequest(null)
      val newRequest2 = newRangerRequest("1.2.3.4", List("1.1.1.1", null))
      assert(newMatcher.isMatched(newRequest1))
      assert(newMatcher.isMatched(newRequest2))
    }
  }
} 
Example 131
Source File: TupleSpec.scala    From play-json   with Apache License 2.0 5 votes vote down vote up
package play.api.libs.json

import org.scalatest._
import org.scalatest.matchers.must.Matchers
import org.scalatest.wordspec.AnyWordSpec

final class TupleSpec extends AnyWordSpec with Matchers {
  "Reading/Write tuples" should {
    def check[T: Reads: Writes](value: T, expected: String) = {
      Json.stringify(Json.toJson(value)).mustEqual(expected)
      Json.fromJson(Json.parse(expected)).get.mustEqual(value)
    }

    "work for small tuples" in {
      check(Tuple1(1), "[1]")
      check((1, 2, "lol"), """[1,2,"lol"]""")
    }

    "work for large tuples" in {
      check(
        (1, 2, "lol", "foo", "bar", "baz", true, Seq(1, 2)),
        """[1,2,"lol","foo","bar","baz",true,[1,2]]"""
      )
    }

    "work for nested tuples" in {
      check(
        (1, 2, ("lol", ("foo", "bar"))),
        """[1,2,["lol",["foo","bar"]]]"""
      )
    }
  }
} 
Example 132
Source File: JsonRichSpec.scala    From play-json   with Apache License 2.0 5 votes vote down vote up
package play.api.libs.json

import play.api.libs.json.Json._

import org.scalatest._
import org.scalatest.matchers.must.Matchers
import org.scalatest.wordspec.AnyWordSpec

class JsonRichSpec extends AnyWordSpec with Matchers {
  "JSON" should {
    "create json with rich syntax" in {
      def js = Json.obj(
        "key1" -> Json.obj("key11" -> "value11", "key12" -> 123L, "key13" -> JsNull),
        "key2" -> 123,
        "key3" -> true,
        "key4" -> Json.arr("value41", 345.6, JsString("test"), JsObject(Seq("key411" -> obj("key4111" -> 987.654))))
      )

      js.mustEqual(
        JsObject(
          Seq(
            "key1" -> JsObject(
              Seq(
                "key11" -> JsString("value11"),
                "key12" -> JsNumber(123L),
                "key13" -> JsNull
              )
            ),
            "key2" -> JsNumber(123),
            "key3" -> JsTrue,
            "key4" -> JsArray(
              Array(
                JsString("value41"),
                JsNumber(345.6),
                JsString("test"),
                JsObject(Seq("key411" -> JsObject(Seq("key4111" -> JsNumber(987.654)))))
              )
            )
          )
        )
      )
    }
  }
} 
Example 133
Source File: JsonSpec.scala    From play-json   with Apache License 2.0 5 votes vote down vote up
package play.api.libs.json

import play.api.libs.json.Json._

import org.scalatest._
import org.scalatest.matchers.must.Matchers
import org.scalatest.wordspec.AnyWordSpec

class JsonSpec extends AnyWordSpec with Matchers {
  "Complete JSON should create full object" when {
    "lose precision when parsing BigDecimals" in {
      val n = BigDecimal("12345678901234567890.123456789")

      parse(stringify(toJson(n))).mustEqual(
        JsNumber(
          BigDecimal("12345678901234567000")
        )
      )
    }

    "lose precision when parsing big integers" in {
      // By big integers, we just mean integers that overflow long,
      // since Jackson has different code paths for them from decimals
      val json = toJson(BigDecimal("1.2345678901234568E+29"))
      parse(stringify(json)).mustEqual(json)
    }

    "keep similar object between serialized and deserialized data".taggedAs(UnstableInScala213) in {
      val original = Json.obj(
        "key1" -> "value1",
        "key2" -> true,
        "key3" -> JsNull,
        "key4" -> Json.arr(1, 2.5, "value2", false, JsNull),
        "key5" -> Json.obj(
          "key6" -> "こんにちは",
          "key7" -> BigDecimal("12345678901234567000")
        )
      )
      val originalString = Json.stringify(original)
      val parsed         = Json.parse(originalString)

      parsed.asInstanceOf[JsObject].fields.mustEqual(original.fields)
      Json.stringify(parsed).mustEqual(originalString)
    }

    "parse from InputStream" in {
      val orig = Json.obj(
        "key1" -> "value1",
        "key2" -> true,
        "key3" -> JsNull,
        "key4" -> Json.arr(1, 2.5, "value2", false, JsNull),
        "key5" -> Json.obj(
          "key6" -> "こんにちは",
          "key7" -> BigDecimal("12345678901234567890.123456789")
        )
      )
      def stream = new java.io.ByteArrayInputStream(
        orig.toString.getBytes("UTF-8")
      )

      def expected = Json.obj(
        "key1" -> "value1",
        "key2" -> true,
        "key3" -> JsNull,
        "key4" -> Json.arr(1, 2.5, "value2", false, JsNull),
        "key5" -> Json.obj(
          "key6" -> "こんにちは",
          "key7" -> BigDecimal( // JS loose precision on BigDec
            "12345678901234567000"
          )
        )
      )

      Json.parse(stream).mustEqual(expected)
    }
  }
} 
Example 134
Source File: DurationGeneratorsSpec.scala    From play-json-ops   with MIT License 5 votes vote down vote up
package play.api.libs.json.scalacheck

import org.scalatest.wordspec.AnyWordSpec
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks._
import play.api.libs.json.scalacheck.DurationGenerators._

import scala.concurrent.duration.{Duration, FiniteDuration}

class DurationGeneratorsSpec extends AnyWordSpec {

  "Arbitrary[FiniteDuration]" should {
    "always produce a valid finite value" in {
      forAll() { (duration: FiniteDuration) =>
        assert(duration.isFinite())
      }
    }
  }

  "Arbitrary[Duration]" should {
    "always produce a valid value" in {
      forAll() { (duration: Duration) =>
        assert(duration ne null)
      }
    }
  }
} 
Example 135
Source File: DurationGeneratorsSpec.scala    From play-json-ops   with MIT License 5 votes vote down vote up
package play.api.libs.json.scalacheck

import org.scalatest.wordspec.AnyWordSpec
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks
import play.api.libs.json.scalacheck.DurationGenerators._

import scala.concurrent.duration.{Duration, FiniteDuration}

class DurationGeneratorsSpec extends AnyWordSpec
  with ScalaCheckDrivenPropertyChecks {

  "Arbitrary[FiniteDuration]" should {
    "always produce a valid finite value" in {
      forAll() { (duration: FiniteDuration) =>
        assert(duration.isFinite)
      }
    }
  }

  "Arbitrary[Duration]" should {
    "always produce a valid value" in {
      forAll() { (duration: Duration) =>
        assert(duration ne null)
      }
    }
  }
} 
Example 136
Source File: ResourceSpec.scala    From vat-api   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.vatapi.resources

import org.scalatest.OptionValues
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import play.api.http.{HeaderNames, MimeTypes, Status}
import play.api.mvc.ControllerComponents
import play.api.test.Helpers.stubControllerComponents
import play.api.test.{DefaultAwaitTimeout, ResultExtractors}
import uk.gov.hmrc.domain.Vrn
import uk.gov.hmrc.vatapi.TestUtils
import uk.gov.hmrc.vatapi.config.AppContext
import uk.gov.hmrc.vatapi.mocks.auth.MockAuthorisationService

trait ResourceSpec extends AnyWordSpec
  with Matchers
  with OptionValues
  with TestUtils
  with ResultExtractors
  with HeaderNames
  with Status
  with DefaultAwaitTimeout
  with MimeTypes
  with MockAuthorisationService {

  val vrn: Vrn = generateVrn

  val mockAppContext = mock[AppContext]

  lazy val cc: ControllerComponents = stubControllerComponents()

  def mockAuthAction(vrn: Vrn) = {
    MockAuthorisationService.authCheck(vrn)
  }

  def mockAuthActionWithNrs(vrn: Vrn) = {
    MockAuthorisationService.authCheckWithNrsRequirement(vrn)
  }
} 
Example 137
Source File: UnitSpec.scala    From vat-api   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.vatapi

import org.joda.time.{DateTime, DateTimeZone, LocalDate}
import org.scalatest.OptionValues
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.{AnyWordSpec, AsyncWordSpec}
import play.api.test.{DefaultAwaitTimeout, FutureAwaits}

import scala.concurrent.duration._
import scala.concurrent.{Await, Future}
import scala.language.postfixOps

trait BaseUnitSpec extends Matchers with OptionValues with TestUtils with FutureAwaits
  with DefaultAwaitTimeout {
  implicit val timeout: FiniteDuration = 5 seconds

  def await[T](f: Future[T])(implicit duration: FiniteDuration = timeout): T =
    Await.result(f, duration)
}

trait UnitSpec extends AnyWordSpec with BaseUnitSpec{
  implicit def extractAwait[A](future: Future[A]): A = await[A](future)

  def await[A](future: Future[A])(implicit timeout: Duration): A = Await.result(future, timeout)
}

trait AsyncUnitSpec extends AsyncWordSpec with BaseUnitSpec

trait TestUtils {
  private val vrnGenerator = VrnGenerator()

  def now: DateTime = DateTime.now(DateTimeZone.UTC)
  def generateVrn = vrnGenerator.nextVrn()

  implicit def toLocalDate(d: DateTime): LocalDate = d.toLocalDate
}

object TestUtils extends TestUtils 
Example 138
Source File: SeleniumTest.scala    From udash-core   with Apache License 2.0 5 votes vote down vote up
package io.udash.web

import java.util.concurrent.TimeUnit

import org.openqa.selenium.firefox.{FirefoxDriver, FirefoxOptions}
import org.openqa.selenium.remote.RemoteWebDriver
import org.openqa.selenium.{Dimension, WebElement}
import org.scalatest.concurrent.Eventually
import org.scalatest.time.{Millis, Seconds, Span}
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

private trait ServerConfig {
  def init(): Unit
  def createUrl(part: String): String
  def destroy(): Unit
}

// Doesn't launch embedded guide app server
private final class ExternalServerConfig(urlPrefix: String) extends ServerConfig {
  require(!urlPrefix.endsWith("/"))

  override def createUrl(part: String): String = {
    require(part.startsWith("/"))
    urlPrefix + part
  }

  override def init(): Unit = {}
  override def destroy(): Unit = {}
}

// Launches embedded guide server
private final class InternalServerConfig extends ServerConfig {
  private val server = Launcher.createApplicationServer()

  override def init(): Unit = server.start()

  override def destroy(): Unit = server.stop()

  override def createUrl(part: String): String = {
    require(part.startsWith("/"))
    s"http://127.0.0.2:${server.port}$part"
  }
}

abstract class SeleniumTest extends AnyWordSpec with Matchers with BeforeAndAfterAll with BeforeAndAfterEach with Eventually {
  override implicit val patienceConfig: PatienceConfig = PatienceConfig(scaled(Span(10, Seconds)), scaled(Span(50, Millis)))

  protected final val driver: RemoteWebDriver = new FirefoxDriver(new FirefoxOptions().setHeadless(true))
  driver.manage().timeouts().implicitlyWait(200, TimeUnit.MILLISECONDS)
  driver.manage().window().setSize(new Dimension(1440, 800))

  protected final def findElementById(id: String): WebElement = eventually {
    driver.findElementById(id)
  }

  protected def url: String

  private val server: ServerConfig = new InternalServerConfig

  override protected def beforeAll(): Unit = {
    super.beforeAll()
    server.init()
  }

  override protected def beforeEach(): Unit = {
    super.beforeEach()
    driver.get(server.createUrl(url))
  }

  override protected def afterAll(): Unit = {
    super.afterAll()
    server.destroy()
    driver.close()
  }
} 
Example 139
Source File: SmartOLEDTest.scala    From Pi-Akka-Cluster   with Apache License 2.0 5 votes vote down vote up
package eroled

import org.mockito.ArgumentMatchers.any
import org.mockito.MockitoSugar
import org.scalatest.wordspec.AnyWordSpec

class SmartOLEDTest extends AnyWordSpec with MockitoSugar {
   "SmartOLED" should {
      "draw spreadsheet in columns if less then screen" in {
         val a: TextCanvas = mock[TextCanvas]
         when(a.setFont(any(classOf[Font]))).thenCallRealMethod()
         when(a.drawSpreadsheetInColumns(any(classOf[Array[Array[String]]]))).thenCallRealMethod()
         a.setFont(new BasicFont())
         a.drawSpreadsheetInColumns(Array(Array("123", "123"), Array("234", "234")))

         verify(a, times(1))
            .drawMultilineString(
               "123                          123\n" +
               "234                          234\n")
      }

      "draw spreadsheet for many key-values" in {
         val a: TextCanvas = mock[TextCanvas]
         when(a.setFont(any(classOf[Font]))).thenCallRealMethod()
         when(a.drawSpreadsheetInColumns(any(classOf[Array[Array[String]]]))).thenCallRealMethod()

         a.setFont(new BasicFont())

         a.drawSpreadsheetInColumns(Array(Array("123 ", "123"),
            Array("234", "234"), Array("234", "234"), Array("234", "234"),
            Array("234", "234"), Array("234  ", "234")))

         verify(a, times(1))
            .drawMultilineString(
               "123          123 | 234          234\n" +
               "234          234 | 234          234\n" +
               "234          234\n234          234\n")
      }


      "draw keyValues" in {
         val a: TextCanvas = mock[TextCanvas]
         when(a.setFont(any(classOf[Font]))).thenCallRealMethod()
         when(a.drawKeyValues(any(classOf[Array[Array[String]]]))).thenCallRealMethod()

         a.setFont(new BasicFont())
         a.drawKeyValues(Array(Array("123 ", "123"), Array("234", "234"), Array("234", "234")
            , Array("234  ", "234"),
            Array("234", "234"), Array("234", "234")))

         verify(a, times(1))
            .drawMultilineString(
               "123  123 | 234234\n" +
               "234  234 | 234234\n" +
               "234  234\n" +
               "234  234\n")
      }
   }


} 
Example 140
Source File: CellMappingSpec.scala    From Pi-Akka-Cluster   with Apache License 2.0 5 votes vote down vote up
package akkapi.cluster.sudoku

import akkapi.cluster.sudoku.CellMappings._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class CellMappingSpec extends AnyWordSpec with Matchers {

  "Mapping row coordinates" should {
    "result in correct column & block coordinates" in {
      rowToColumnCoordinates(0, 0) shouldBe ((0, 0))
      rowToBlockCoordinates(0, 0)  shouldBe ((0, 0))
      rowToColumnCoordinates(8, 8) shouldBe ((8, 8))
      rowToBlockCoordinates(8, 8)  shouldBe ((8, 8))
      rowToColumnCoordinates(3, 4) shouldBe ((4, 3))
      rowToBlockCoordinates(3, 4)  shouldBe ((4, 1))
      rowToBlockCoordinates(4, 3)  shouldBe ((4, 3))
    }
  }

  "Mapping column coordinates" should {
    "result in correct row & block coordinates" in {
      columnToRowCoordinates(0, 0)   shouldBe ((0, 0))
      columnToBlockCoordinates(0, 0) shouldBe ((0, 0))
      columnToRowCoordinates(8, 8)   shouldBe ((8, 8))
      columnToBlockCoordinates(8, 8) shouldBe ((8, 8))
      columnToRowCoordinates(3, 4)   shouldBe ((4, 3))
      columnToBlockCoordinates(3, 4) shouldBe ((4, 3))
      columnToBlockCoordinates(4, 3) shouldBe ((4, 1))
    }
  }

  "Mapping block coordinates" should {
    "result in correct row & column coordinates" in {
      blockToRowCoordinates(0, 0)    shouldBe ((0, 0))
      blockToColumnCoordinates(0, 0) shouldBe ((0, 0))
      blockToRowCoordinates(8, 8)    shouldBe ((8, 8))
      blockToColumnCoordinates(8, 8) shouldBe ((8, 8))
      blockToRowCoordinates(4, 3)    shouldBe ((4, 3))
      blockToColumnCoordinates(4, 3) shouldBe ((3, 4))
      blockToRowCoordinates(3, 4)    shouldBe ((4, 1))
      blockToColumnCoordinates(3, 4) shouldBe ((1, 4))
      blockToRowCoordinates(5, 5)    shouldBe ((4, 8))
      blockToColumnCoordinates(5, 5) shouldBe ((8, 4))
    }
  }
} 
Example 141
Source File: ClientTestBase.scala    From endpoints4s   with MIT License 5 votes vote down vote up
package endpoints4s.algebra.client

import java.net.ServerSocket

import com.github.tomakehurst.wiremock.WireMockServer
import com.github.tomakehurst.wiremock.core.WireMockConfiguration.options
import endpoints4s.algebra
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll}

import scala.concurrent.Future
import scala.concurrent.duration._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

trait ClientTestBase[T <: algebra.Endpoints]
    extends AnyWordSpec
    with Matchers
    with ScalaFutures
    with BeforeAndAfterAll
    with BeforeAndAfter {

  override implicit def patienceConfig: PatienceConfig =
    PatienceConfig(15.seconds, 10.millisecond)

  val wiremockPort = findOpenPort
  val wireMockServer = new WireMockServer(options().port(wiremockPort))

  override def beforeAll(): Unit = wireMockServer.start()

  override def afterAll(): Unit = wireMockServer.stop()

  before {
    wireMockServer.resetAll()
  }

  def findOpenPort: Int = {
    val socket = new ServerSocket(0)
    try socket.getLocalPort
    finally if (socket != null) socket.close()
  }

  val client: T

  def call[Req, Resp](
      endpoint: client.Endpoint[Req, Resp],
      args: Req
  ): Future[Resp]

  def encodeUrl[A](url: client.Url[A])(a: A): String

} 
Example 142
Source File: ServerTestBase.scala    From endpoints4s   with MIT License 5 votes vote down vote up
package endpoints4s.algebra.server

import java.nio.charset.StandardCharsets

import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.headers.`Content-Type`
import akka.http.scaladsl.model.{HttpRequest, HttpResponse}
import akka.util.ByteString
import endpoints4s.algebra
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll}

import scala.concurrent.duration._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.concurrent.{ExecutionContext, Future}

trait ServerTestBase[T <: algebra.Endpoints]
    extends AnyWordSpec
    with Matchers
    with ScalaFutures
    with BeforeAndAfterAll
    with BeforeAndAfter {

  override implicit def patienceConfig: PatienceConfig =
    PatienceConfig(10.seconds, 10.millisecond)

  val serverApi: T

  
  case class Malformed(errors: Seq[String]) extends DecodedUrl[Nothing]
} 
Example 143
Source File: DescriptionsTest.scala    From endpoints4s   with MIT License 5 votes vote down vote up
package endpoints4s.openapi

import endpoints4s.openapi.model.OpenApi
import endpoints4s.{algebra, openapi}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class DescriptionsTest extends AnyWordSpec with Matchers {

  "Schemas" should {

    "Include descriptions in documentation" in new Fixtures {
      checkDescription(recordSchema)("a foo bar record")
      checkDescription(coprodSchema)("a foo or a bar coprod")
      checkDescription(enumSchema)("a foo or a bar enum")
      checkDescription(arraySchema)("a list of ints")
      checkDescription(mapSchema)("a map of ints")
      checkDescription(pairSchema)("a pair of int and string")
      checkDescription(hexSchema)("a hex string")
      checkDescription(fallbackSchema)("a foo or 1 or 4")
    }

  }

  trait FixturesAlg extends algebra.JsonSchemas {

    override def defaultDiscriminatorName: String = "kind"

    val recordSchema = (
      field[String]("foo") zip
        field[Int]("bar")
    ).withDescription("a foo bar record")

    val coprodSchema = {
      val left = field[String]("foo").tagged("L")
      val right = field[Int]("bar").tagged("R")
      left.orElse(right).withDescription("a foo or a bar coprod")
    }

    val enumSchema =
      stringEnumeration(Seq("foo", "bar"))(identity)
        .withDescription("a foo or a bar enum")

    val arraySchema =
      arrayJsonSchema[List, Int].withDescription("a list of ints")

    val mapSchema = mapJsonSchema[Int].withDescription("a map of ints")

    val pairSchema =
      implicitly[JsonSchema[(Int, String)]]
        .withDescription("a pair of int and string")

    val hexSchema =
      stringJsonSchema(Some("hex")).withDescription("a hex string")

    val fallbackSchema =
      defaultStringJsonSchema
        .orFallbackTo(longJsonSchema)
        .withDescription("a foo or 1 or 4")
  }

  trait Fixtures extends FixturesAlg with openapi.Endpoints with openapi.JsonEntitiesFromSchemas {

    def checkDescription[A](schema: JsonSchema[A])(description: String) = {
      assert(
        OpenApi.schemaJson(toSchema(schema.docs))("description") == ujson
          .Str(description)
      )
    }

  }

} 
Example 144
Source File: OneOfTest.scala    From endpoints4s   with MIT License 5 votes vote down vote up
package endpoints4s.openapi

import endpoints4s.algebra
import endpoints4s.openapi.model.OpenApi
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class OneOfTest extends AnyWordSpec with Matchers {

  "Schemas" should {
    "Document enumerated oneOf schemas" in new Fixtures {
      val expected =
        ujson.Obj(
          "oneOf" -> ujson.Arr(
            ujson.Obj(
              "type" -> ujson.Str("integer"),
              "format" -> ujson.Str("int32")
            ),
            ujson.Obj("type" -> ujson.Str("boolean"))
          )
        )
      assert(OpenApi.schemaJson(toSchema(intOrBoolean.docs)) == expected)
    }
  }

  trait Fixtures extends algebra.JsonSchemasFixtures with JsonSchemas

} 
Example 145
Source File: ChunkedEntitiesTest.scala    From endpoints4s   with MIT License 5 votes vote down vote up
package endpoints4s.openapi

import endpoints4s.openapi.model.Schema.{Array, Primitive, Reference}
import endpoints4s.openapi.model._
import org.scalatest.wordspec.AnyWordSpec

class ChunkedEntitiesTest extends AnyWordSpec {

  "Chunked Endpoint" in {
    val expected =
      PathItem(
        Map(
          "get" -> Operation(
            None,
            None,
            None,
            List(
              Parameter(
                "file",
                In.Path,
                required = true,
                None,
                Schema.simpleString
              )
            ),
            None,
            Map(
              "200" -> Response(
                "",
                Map.empty,
                Map("application/octet-stream" -> MediaType(None))
              ),
              "400" -> Response(
                "Client error",
                Map.empty,
                Map(
                  "application/json" -> MediaType(
                    Some(
                      Reference(
                        "endpoints.Errors",
                        Some(
                          Array(
                            Left(Primitive("string", None, None, None, None)),
                            None,
                            None,
                            None
                          )
                        ),
                        None
                      )
                    )
                  )
                )
              ),
              "500" -> Response(
                "Server error",
                Map.empty,
                Map(
                  "application/json" -> MediaType(
                    Some(
                      Reference(
                        "endpoints.Errors",
                        Some(
                          Array(
                            Left(Primitive("string", None, None, None, None)),
                            None,
                            None,
                            None
                          )
                        ),
                        None
                      )
                    )
                  )
                )
              )
            ),
            Nil,
            Nil,
            Map.empty,
            false
          )
        )
      )
    assert(Fixtures.documentation.paths("/assets2/{file}") == expected)
  }

} 
Example 146
Source File: TitlesTest.scala    From endpoints4s   with MIT License 5 votes vote down vote up
package endpoints4s.openapi

import endpoints4s.openapi.model.OpenApi
import endpoints4s.{algebra, openapi}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class TitlesTest extends AnyWordSpec with Matchers {

  "Schemas" should {

    "Include titles in documentation" in new Fixtures {
      checkTitle(recordSchema)("record")
      checkTitle(coprodSchema)("coprod")
      checkTitle(enumSchema)("enum")
      checkTitle(arraySchema)("list of ints")
      checkTitle(mapSchema)("map of ints")
      checkTitle(pairSchema)("pair")
      checkTitle(hexSchema)("hex string")
      checkTitle(fallbackSchema)("fallback literals")
    }

  }

  trait FixturesAlg extends algebra.JsonSchemas {

    override def defaultDiscriminatorName: String = "kind"

    val recordSchema = (
      field[String]("foo") zip
        field[Int]("bar")
    ).withTitle("record")

    val coprodSchema = {
      val left = field[String]("foo").tagged("L")
      val right = field[Int]("bar").tagged("R")
      left.orElse(right).withTitle("coprod")
    }

    val enumSchema =
      stringEnumeration(Seq("foo", "bar"))(identity).withTitle("enum")

    val arraySchema = arrayJsonSchema[List, Int].withTitle("list of ints")

    val mapSchema = mapJsonSchema[Int].withTitle("map of ints")

    val pairSchema =
      implicitly[JsonSchema[(Int, String)]].withTitle("pair")

    val hexSchema =
      stringJsonSchema(Some("hex")).withTitle("hex string")

    val fallbackSchema =
      defaultStringJsonSchema
        .orFallbackTo(longJsonSchema)
        .withTitle("fallback literals")
  }

  trait Fixtures extends FixturesAlg with openapi.Endpoints with openapi.JsonEntitiesFromSchemas {

    def checkTitle[A](schema: JsonSchema[A])(title: String) = {
      assert(
        OpenApi.schemaJson(toSchema(schema.docs))("title") == ujson.Str(title)
      )
    }

  }

} 
Example 147
Source File: ExamplesTest.scala    From endpoints4s   with MIT License 5 votes vote down vote up
package endpoints4s.openapi

import endpoints4s.openapi.model.OpenApi
import endpoints4s.{algebra, openapi}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ExamplesTest extends AnyWordSpec with Matchers {

  "Schemas" should {

    "Include examples in documentation" in new Fixtures {
      checkExample(recordSchema)(
        ujson.Obj("foo" -> ujson.Str("Quux"), "bar" -> ujson.Num(42))
      )
      checkExample(coprodSchema)(
        ujson.Obj("kind" -> ujson.Str("R"), "bar" -> ujson.Num(42))
      )
      checkExample(enumSchema)(ujson.Str("foo"))
      checkExample(arraySchema)(ujson.Arr(ujson.Num(1), ujson.Num(2)))
      checkExample(mapSchema)(
        ujson.Obj("foo" -> ujson.Num(1), "bar" -> ujson.Num(2))
      )
      checkExample(pairSchema)(ujson.Arr(ujson.Num(42), ujson.Str("foo")))
      checkExample(hexSchema)(ujson.Str("deadbeef"))
      checkExample(fallbackSchema)(ujson.Num(1))
    }

  }

  trait FixturesAlg extends algebra.JsonSchemas {

    override def defaultDiscriminatorName: String = "kind"

    val recordSchema = (
      field[String]("foo") zip
        field[Int]("bar")
    ).withExample(("Quux", 42))

    val coprodSchema = {
      val left = field[String]("foo").tagged("L")
      val right = field[Int]("bar").tagged("R")
      left.orElse(right).withExample(Right(42))
    }

    val enumSchema =
      stringEnumeration(Seq("foo", "bar"))(identity).withExample("foo")

    val arraySchema = arrayJsonSchema[List, Int].withExample(1 :: 2 :: Nil)

    val mapSchema = mapJsonSchema[Int].withExample(Map("foo" -> 1, "bar" -> 2))

    val pairSchema =
      implicitly[JsonSchema[(Int, String)]].withExample((42, "foo"))

    val hexSchema =
      stringJsonSchema(Some("hex")).withExample("deadbeef")

    val fallbackSchema =
      defaultStringJsonSchema
        .orFallbackTo(longJsonSchema)
        .withExample(Right(1L))
  }

  trait Fixtures extends FixturesAlg with openapi.Endpoints with openapi.JsonEntitiesFromSchemas {

    def checkExample[A](schema: JsonSchema[A])(example: ujson.Value) = {
      assert(OpenApi.schemaJson(toSchema(schema.docs))("example") == example)
    }

  }

} 
Example 148
Source File: SumTypedRequests.scala    From endpoints4s   with MIT License 5 votes vote down vote up
package endpoints4s.openapi

import endpoints4s.{algebra, openapi}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class SumTypedRequests extends AnyWordSpec with Matchers {

  "Request bondy content" should {

    "Include all supported content-types" in new Fixtures {
      checkRequestContentTypes(sumTypedEndpoint)(
        Set("text/plain", "application/json")
      )
      checkRequestContentTypes(onlyTextEndpoint)(Set("text/plain"))
      checkRequestContentTypes(onlyJsonEndpoint)(Set("application/json"))
    }
  }

  trait FixtureAlg
      extends algebra.Endpoints
      with algebra.JsonEntitiesFromSchemas
      with algebra.JsonSchemasFixtures {

    import User._ // Extra help for Scala 2.12 to find User json schema

    def sumTypedEndpoint =
      endpoint[Either[User, String], Unit](
        post(path / "user-or-name", jsonRequest[User].orElse(textRequest)),
        ok(emptyResponse)
      )

    def onlyTextEndpoint =
      endpoint[String, Unit](
        post(path / "name", textRequest),
        ok(emptyResponse)
      )

    def onlyJsonEndpoint =
      endpoint[User, Unit](
        post(path / "user", jsonRequest[User]),
        ok(emptyResponse)
      )
  }

  trait Fixtures extends FixtureAlg with openapi.Endpoints with openapi.JsonEntitiesFromSchemas {

    def checkRequestContentTypes[A](
        endpoint: DocumentedEndpoint
    )(contentTypes: Set[String]) = {
      val foundContentTypes = endpoint.item.operations.values.iterator
        .flatMap(_.requestBody.iterator)
        .flatMap(_.content.keys)
        .toSet
      assert(foundContentTypes == contentTypes)
    }

  }
} 
Example 149
Source File: EndpointsTest.scala    From endpoints4s   with MIT License 5 votes vote down vote up
package endpoints4s.akkahttp.server

import akka.http.scaladsl.model.StatusCodes.InternalServerError
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.testkit.ScalatestRouteTest
import endpoints4s.algebra

import scala.concurrent.Future
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec


class EndpointsEntitiesTestApi extends EndpointsTestApi with JsonEntities

class EndpointsTest extends AnyWordSpec with Matchers with ScalatestRouteTest {

  object TestRoutes extends EndpointsEntitiesTestApi {

    val singleStaticGetSegment = endpoint[Unit, Unit](
      get(path / "segment1"),
      (_: Unit) => complete("Ok")
    ).implementedBy(_ => ())

    val smokeEndpointSyncRoute =
      smokeEndpoint.implementedBy(_ => sys.error("Sorry."))

    val smokeEndpointAsyncRoute =
      smokeEndpoint.implementedByAsync(_ => Future.failed(new Exception("Sorry.")))

  }

  "Single segment route" should {

    "match single segment request" in {
      // tests:
      Get("/segment1") ~> TestRoutes.singleStaticGetSegment ~> check {
        responseAs[String] shouldEqual "Ok"
      }
    }
    "leave GET requests to other paths unhandled" in {
      Get("/segment1/segment2") ~> TestRoutes.singleStaticGetSegment ~> check {
        handled shouldBe false
      }
    }

  }

  "Routes" should {

    "Handle exceptions by default" in {
      Get("/user/foo/description?name=a&age=1") ~> TestRoutes.smokeEndpointAsyncRoute ~> check {
        handled shouldBe true
        status shouldBe InternalServerError
        responseAs[String] shouldBe "[\"Sorry.\"]"
      }
    }

  }

} 
Example 150
Source File: ExampleKafkaStreamsSpec.scala    From embedded-kafka   with MIT License 5 votes vote down vote up
package net.manub.embeddedkafka.streams

import net.manub.embeddedkafka.Codecs._
import net.manub.embeddedkafka.ConsumerExtensions._
import net.manub.embeddedkafka.EmbeddedKafkaConfig
import net.manub.embeddedkafka.streams.EmbeddedKafkaStreams._
import org.apache.kafka.common.serialization.{Serde, Serdes}
import org.apache.kafka.streams.StreamsBuilder
import org.apache.kafka.streams.kstream.{Consumed, KStream, Produced}
import org.scalatest.Assertion
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ExampleKafkaStreamsSpec extends AnyWordSpec with Matchers {
  implicit val config: EmbeddedKafkaConfig =
    EmbeddedKafkaConfig(kafkaPort = 7000, zooKeeperPort = 7001)

  val (inTopic, outTopic) = ("in", "out")

  val stringSerde: Serde[String] = Serdes.String()

  "A Kafka streams test" should {
    "be easy to run with streams and consumer lifecycle management" in {
      val streamBuilder = new StreamsBuilder
      val stream: KStream[String, String] =
        streamBuilder.stream(inTopic, Consumed.`with`(stringSerde, stringSerde))

      stream.to(outTopic, Produced.`with`(stringSerde, stringSerde))

      runStreams(Seq(inTopic, outTopic), streamBuilder.build()) {
        publishToKafka(inTopic, "hello", "world")
        publishToKafka(inTopic, "foo", "bar")
        publishToKafka(inTopic, "baz", "yaz")
        withConsumer[String, String, Assertion] { consumer =>
          val consumedMessages =
            consumer.consumeLazily[(String, String)](outTopic)
          consumedMessages.take(2).toList should be(
            Seq("hello" -> "world", "foo" -> "bar")
          )
          val h :: _ = consumedMessages.drop(2).toList
          h should be("baz" -> "yaz")
        }
      }
    }

    "allow support creating custom consumers" in {
      val streamBuilder = new StreamsBuilder
      val stream: KStream[String, String] =
        streamBuilder.stream(inTopic, Consumed.`with`(stringSerde, stringSerde))

      stream.to(outTopic, Produced.`with`(stringSerde, stringSerde))

      runStreams(Seq(inTopic, outTopic), streamBuilder.build()) {
        publishToKafka(inTopic, "hello", "world")
        publishToKafka(inTopic, "foo", "bar")

        withConsumer[String, String, Assertion] { consumer =>
          consumer.consumeLazily[(String, String)](outTopic).take(2) should be(
            Seq("hello" -> "world", "foo" -> "bar")
          )
        }
      }
    }

    "allow for easy string based testing" in {
      val streamBuilder = new StreamsBuilder
      val stream: KStream[String, String] =
        streamBuilder.stream(inTopic, Consumed.`with`(stringSerde, stringSerde))

      stream.to(outTopic, Produced.`with`(stringSerde, stringSerde))

      runStreams(Seq(inTopic, outTopic), streamBuilder.build())(
        withConsumer[String, String, Assertion]({ consumer =>
          publishToKafka(inTopic, "hello", "world")
          val h :: _ = consumer.consumeLazily[(String, String)](outTopic).toList
          h should be("hello" -> "world")
        })
      )(config)

    }
  }
} 
Example 151
Source File: ResponseBodyMatchersTest.scala    From wix-http-testkit   with MIT License 5 votes vote down vote up
package com.wix.e2e.http.matchers.internal

import com.wix.e2e.http.exceptions.MarshallerErrorException
import com.wix.e2e.http.matchers.ResponseMatchers._
import com.wix.e2e.http.matchers.drivers.HttpResponseFactory._
import com.wix.e2e.http.matchers.drivers.MarshallingTestObjects.SomeCaseClass
import com.wix.e2e.http.matchers.drivers.{CustomMarshallerProvider, HttpMessageTestSupport, MarshallerTestSupport, MatchersTestSupport}
import org.scalatest.matchers.should.Matchers._
import org.scalatest.wordspec.AnyWordSpec

class ResponseBodyMatchersTest extends AnyWordSpec with MatchersTestSupport {

  trait ctx extends HttpMessageTestSupport with MarshallerTestSupport with CustomMarshallerProvider

  "ResponseBodyMatchers" should {

    "exact match on response body" in new ctx {
      aResponseWith(content) should haveBodyWith(content)
      aResponseWith(content) should not( haveBodyWith(anotherContent) )
    }

    "match underlying matcher with body content" in new ctx {
      aResponseWith(content) should haveBodyThat(must = be( content ))
      aResponseWith(content) should not( haveBodyThat(must = be( anotherContent )) )
    }

    "exact match on response binary body" in new ctx {
      aResponseWith(binaryContent) should haveBodyWith(binaryContent)
      aResponseWith(binaryContent) should not( haveBodyWith(anotherBinaryContent) )
    }

    "match underlying matcher with binary body content" in new ctx {
      aResponseWith(binaryContent) should haveBodyDataThat(must = be( binaryContent ))
      aResponseWith(binaryContent) should not( haveBodyDataThat(must = be( anotherBinaryContent )) )
    }

    "handle empty body" in new ctx {
      aResponseWithoutBody should not( haveBodyWith(content))
    }

    "support unmarshalling body content with user custom unmarshaller" in new ctx {
      givenUnmarshallerWith[SomeCaseClass](someObject, forContent = content)

      aResponseWith(content) should haveBodyWith(entity = someObject)
      aResponseWith(content) should not( haveBodyWith(entity = anotherObject) )
    }

    "provide a meaningful explanation why match failed" in new ctx {
      givenUnmarshallerWith[SomeCaseClass](someObject, forContent = content)

      failureMessageFor(haveBodyEntityThat(must = be(anotherObject)), matchedOn = aResponseWith(content)) shouldBe
        s"Failed to match: ['$someObject' != '$anotherObject'] with content: [$content]"
    }

    "provide a proper message to user in case of a badly behaving marshaller" in new ctx {
      givenBadlyBehavingUnmarshallerFor[SomeCaseClass](withContent = content)

      the[MarshallerErrorException] thrownBy haveBodyWith(entity = someObject).apply( aResponseWith(content) )
    }

    "provide a proper message to user sent a matcher to an entity matcher" in new ctx {
      failureMessageFor(haveBodyWith(entity = be(someObject)), matchedOn = aResponseWith(content)) shouldBe
        s"Matcher misuse: `haveBodyWith` received a matcher to match against, please use `haveBodyThat` instead."
    }

    "support custom matcher for user object" in new ctx {
      givenUnmarshallerWith[SomeCaseClass](someObject, forContent = content)

      aResponseWith(content) should haveBodyEntityThat(must = be(someObject))
      aResponseWith(content) should not( haveBodyEntityThat(must = be(anotherObject)) )
    }
  }
} 
Example 152
Source File: RequestContentTypeMatchersTest.scala    From wix-http-testkit   with MIT License 5 votes vote down vote up
package com.wix.e2e.http.matchers.internal

import akka.http.scaladsl.model.ContentTypes._
import com.wix.e2e.http.matchers.RequestMatchers._
import com.wix.e2e.http.matchers.drivers.HttpRequestFactory._
import com.wix.e2e.http.matchers.drivers.{HttpMessageTestSupport, MatchersTestSupport}
import org.scalatest.matchers.should.Matchers._
import org.scalatest.wordspec.AnyWordSpec

class RequestContentTypeMatchersTest extends AnyWordSpec with MatchersTestSupport {

  trait ctx extends HttpMessageTestSupport

  "RequestContentTypeMatchers" should {

    "exact match on request json content type" in new ctx {
      aRequestWith(`application/json`) should haveJsonBody
      aRequestWith(`text/csv(UTF-8)`) should not( haveJsonBody )
    }

    "exact match on request text plain content type" in new ctx {
      aRequestWith(`text/plain(UTF-8)`) should haveTextPlainBody
      aRequestWith(`text/csv(UTF-8)`) should not( haveTextPlainBody )
    }

    "exact match on request form url encoded content type" in new ctx {
      aRequestWith(`application/x-www-form-urlencoded`) should haveFormUrlEncodedBody
      aRequestWith(`text/csv(UTF-8)`) should not( haveFormUrlEncodedBody )
    }

    "exact match on multipart request content type" in new ctx {
      aRequestWith(`multipart/form-data`) should haveMultipartFormBody
      aRequestWith(`text/csv(UTF-8)`) should not( haveMultipartFormBody )
    }
  }
} 
Example 153
Source File: RequestMethodMatchersTest.scala    From wix-http-testkit   with MIT License 5 votes vote down vote up
package com.wix.e2e.http.matchers.internal

import akka.http.scaladsl.model.HttpMethods._
import com.wix.e2e.http.matchers.RequestMatchers._
import com.wix.e2e.http.matchers.drivers.HttpMessageTestSupport
import com.wix.e2e.http.matchers.drivers.HttpRequestFactory._
import org.scalatest.matchers.should.Matchers._
import org.scalatest.wordspec.AnyWordSpec


class RequestMethodMatchersTest extends AnyWordSpec {

  trait ctx extends HttpMessageTestSupport

  "RequestMethodMatchers" should {

    "match all request methods" in new ctx {
      Seq(POST -> bePost, GET -> beGet, PUT -> bePut, DELETE -> beDelete,
          HEAD -> beHead, OPTIONS -> beOptions,
          PATCH -> bePatch, TRACE -> beTrace, CONNECT -> beConnect)
        .foreach { case (method, matcherForMethod) =>

          aRequestWith( method ) should matcherForMethod
          aRequestWith( randomMethodThatIsNot( method )) should not( matcherForMethod )
        }
    }
  }
} 
Example 154
Source File: ResponseTransferEncodingMatchersTest.scala    From wix-http-testkit   with MIT License 5 votes vote down vote up
package com.wix.e2e.http.matchers.internal

import akka.http.scaladsl.model.TransferEncodings
import akka.http.scaladsl.model.TransferEncodings._
import com.wix.e2e.http.matchers.ResponseMatchers._
import com.wix.e2e.http.matchers.drivers.HttpResponseFactory._
import com.wix.e2e.http.matchers.drivers.{HttpMessageTestSupport, MatchersTestSupport}
import org.scalatest.matchers.should.Matchers._
import org.scalatest.wordspec.AnyWordSpec


class ResponseTransferEncodingMatchersTest extends AnyWordSpec with MatchersTestSupport {

  trait ctx extends HttpMessageTestSupport


  "ResponseTransferEncodingMatchersTest" should {

    "support matching against chunked transfer encoding" in new ctx {
      aChunkedResponse should beChunkedResponse
      aResponseWithoutTransferEncoding should not( beChunkedResponse )
      aResponseWithTransferEncodings(compress) should not( beChunkedResponse )
      aResponseWithTransferEncodings(chunked) should beChunkedResponse
    }

    "failure message in case no transfer encoding header should state that response did not have the proper header" in new ctx {
      failureMessageFor(beChunkedResponse, matchedOn = aResponseWithoutTransferEncoding) shouldBe
        "Expected Chunked response while response did not contain `Transfer-Encoding` header"
    }

    "failure message in case transfer encoding header exists should state that transfer encoding has a different value" in new ctx {
      failureMessageFor(beChunkedResponse, matchedOn = aResponseWithTransferEncodings(compress, TransferEncodings.deflate)) shouldBe
        "Expected Chunked response while response has `Transfer-Encoding` header with values ['compress', 'deflate']"
    }

    "support matching against transfer encoding header values" in new ctx {
      aResponseWithTransferEncodings(compress) should haveTransferEncodings("compress")
      aResponseWithTransferEncodings(compress) should not( haveTransferEncodings("deflate") )
    }

    "support matching against transfer encoding header with multiple values, matcher will validate that response has all of the expected values" in new ctx {
      aResponseWithTransferEncodings(compress, deflate) should haveTransferEncodings("deflate", "compress")
      aResponseWithTransferEncodings(compress, deflate) should haveTransferEncodings("compress")
    }

    "properly match chunked encoding" in new ctx {
      aChunkedResponse should haveTransferEncodings("chunked")
      aChunkedResponseWith(compress) should haveTransferEncodings("compress", "chunked")
      aChunkedResponseWith(compress) should haveTransferEncodings("chunked")
    }

    "failure message should describe what was the expected transfer encodings and what was found" in new ctx {
      failureMessageFor(haveTransferEncodings("deflate", "compress"), matchedOn = aChunkedResponseWith(gzip)) shouldBe
        s"Expected transfer encodings ['deflate', 'compress'] does not match actual transfer encoding ['chunked', 'gzip']"
    }

    "failure message in case no Transfer-Encoding for response should be handled" in new ctx {
      failureMessageFor(haveTransferEncodings("chunked"), matchedOn = aResponseWithoutTransferEncoding) shouldBe
        "Response did not contain `Transfer-Encoding` header."
    }

    "failure message if someone tries to match content-type in headers matchers" in new ctx {
      failureMessageFor(haveAllHeadersOf(transferEncodingHeader), matchedOn = aResponseWithContentType(contentType)) shouldBe
        """`Transfer-Encoding` is a special header and cannot be used in `haveAnyHeadersOf`, `haveAllHeadersOf`, `haveTheSameHeadersAs` matchers.
          |Use `beChunkedResponse` or `haveTransferEncodings` matcher instead.""".stripMargin
      failureMessageFor(haveAnyHeadersOf(transferEncodingHeader), matchedOn = aResponseWithContentType(contentType)) shouldBe
        """`Transfer-Encoding` is a special header and cannot be used in `haveAnyHeadersOf`, `haveAllHeadersOf`, `haveTheSameHeadersAs` matchers.
          |Use `beChunkedResponse` or `haveTransferEncodings` matcher instead.""".stripMargin
      failureMessageFor(haveTheSameHeadersAs(transferEncodingHeader), matchedOn = aResponseWithContentType(contentType)) shouldBe
        """`Transfer-Encoding` is a special header and cannot be used in `haveAnyHeadersOf`, `haveAllHeadersOf`, `haveTheSameHeadersAs` matchers.
          |Use `beChunkedResponse` or `haveTransferEncodings` matcher instead.""".stripMargin
    }
  }
} 
Example 155
Source File: ResponseHeadersMatchersTest.scala    From wix-http-testkit   with MIT License 5 votes vote down vote up
package com.wix.e2e.http.matchers.internal

import com.wix.e2e.http.matchers.ResponseMatchers._
import com.wix.e2e.http.matchers.drivers.CommonTestMatchers.AlwaysMatcher
import com.wix.e2e.http.matchers.drivers.HttpResponseFactory._
import com.wix.e2e.http.matchers.drivers.{HttpMessageTestSupport, MatchersTestSupport}
import org.scalatest.matchers.should.Matchers._
import org.scalatest.wordspec.AnyWordSpec


class ResponseHeadersMatchersTest extends AnyWordSpec with MatchersTestSupport {

  trait ctx extends HttpMessageTestSupport

  "ResponseHeadersMatchers" should {

    "contain header will check if any header is present" in new ctx {
      aResponseWithHeaders(header, anotherHeader) should haveAnyHeadersOf(header)
    }

    "return detailed message on hasAnyOf match failure" in new ctx {
      failureMessageFor(haveAnyHeadersOf(header, anotherHeader), matchedOn = aResponseWithHeaders(yetAnotherHeader, andAnotherHeader)) shouldBe
        s"Could not find header [${header._1}, ${anotherHeader._1}] but found those: [${yetAnotherHeader._1}, ${andAnotherHeader._1}]"
    }

    "contain header will check if all headers are present" in new ctx {
      aResponseWithHeaders(header, anotherHeader, yetAnotherHeader) should haveAllHeadersOf(header, anotherHeader)
    }

    "allOf matcher will return a message stating what was found, and what is missing from header list" in new ctx {
      failureMessageFor(haveAllHeadersOf(header, anotherHeader), matchedOn = aResponseWithHeaders(yetAnotherHeader, header)) shouldBe
        s"Could not find header [${anotherHeader._1}] but found those: [${header._1}]."
    }

    "same header as will check if the same headers is present" in new ctx {
      aResponseWithHeaders(header, anotherHeader) should haveTheSameHeadersAs(header, anotherHeader)
      aResponseWithHeaders(header, anotherHeader) should not( haveTheSameHeadersAs(header) )
      aResponseWithHeaders(header) should not( haveTheSameHeadersAs(header, anotherHeader) )
    }

    "haveTheSameHeadersAs matcher will return a message stating what was found, and what is missing from header list" in new ctx {
      failureMessageFor(haveTheSameHeadersAs(header, anotherHeader), matchedOn = aResponseWithHeaders(yetAnotherHeader, header)) shouldBe
        s"Request header is not identical, missing headers from request: [${anotherHeader._1}], request contained extra headers: [${yetAnotherHeader._1}]."
    }

    "header name compare should be case insensitive" in new ctx {
      aResponseWithHeaders(header) should haveAnyHeadersOf(header.copy(_1 = header._1.toUpperCase))
      aResponseWithHeaders(header) should not( haveAnyHeadersOf(header.copy(_2 = header._2.toUpperCase)) )

      aResponseWithHeaders(header) should haveAllHeadersOf(header.copy(_1 = header._1.toUpperCase))
      aResponseWithHeaders(header) should not( haveAllHeadersOf(header.copy(_2 = header._2.toUpperCase)) )

      aResponseWithHeaders(header) should haveTheSameHeadersAs(header.copy(_1 = header._1.toUpperCase))
      aResponseWithHeaders(header) should not( haveTheSameHeadersAs(header.copy(_2 = header._2.toUpperCase)) )
    }

    "request with no headers will show a 'no headers' message" in new ctx {
      failureMessageFor(haveAnyHeadersOf(header), matchedOn = aResponseWithNoHeaders ) shouldBe
        "Response did not contain any headers."

      failureMessageFor(haveAllHeadersOf(header), matchedOn = aResponseWithNoHeaders ) shouldBe
        "Response did not contain any headers."

      failureMessageFor(haveTheSameHeadersAs(header), matchedOn = aResponseWithNoHeaders ) shouldBe
        "Response did not contain any headers."
    }

    "ignore cookies and set cookies from headers comparison" in new ctx {
      aResponseWithCookies(cookie) should not( haveAnyHeadersOf("Set-Cookie" -> s"${cookie.name}=${cookie.value}") )
      aResponseWithCookies(cookie) should not( haveAllHeadersOf("Set-Cookie" -> s"${cookie.name}=${cookie.value}") )
      aResponseWithCookies(cookie) should not( haveTheSameHeadersAs("Set-Cookie" -> s"${cookie.name}=${cookie.value}") )
    }

    "match if any header satisfy the composed matcher" in new ctx {
      aResponseWithHeaders(header) should haveAnyHeaderThat(must = be(header._2), withHeaderName = header._1)
      aResponseWithHeaders(header) should not( haveAnyHeaderThat(must = be(anotherHeader._2), withHeaderName = header._1) )
    }

    "return informative error messages" in new ctx {
      failureMessageFor(haveAnyHeaderThat(must = AlwaysMatcher(), withHeaderName = nonExistingHeaderName), matchedOn = aResponseWithHeaders(header)) shouldBe
        s"Response contain header names: [${header._1}] which did not contain: [$nonExistingHeaderName]"
      failureMessageFor(haveAnyHeaderThat(must = AlwaysMatcher(), withHeaderName = nonExistingHeaderName), matchedOn = aResponseWithNoHeaders) shouldBe
        "Response did not contain any headers."
      failureMessageFor(haveAnyHeaderThat(must = be(anotherHeader._2), withHeaderName = header._1), matchedOn = aResponseWithHeaders(header)) shouldBe
        s"Response header [${header._1}], did not match { ${be(anotherHeader._2).apply(header._2).failureMessage} }"
    }
  }
} 
Example 156
Source File: ResponseCookiesMatchersTest.scala    From wix-http-testkit   with MIT License 5 votes vote down vote up
package com.wix.e2e.http.matchers.internal

import com.wix.e2e.http.matchers.ResponseMatchers._
import com.wix.e2e.http.matchers.drivers.HttpResponseFactory._
import com.wix.e2e.http.matchers.drivers.HttpResponseMatchers._
import com.wix.e2e.http.matchers.drivers.{HttpMessageTestSupport, MatchersTestSupport}
import org.scalatest.matchers.should.Matchers._
import org.scalatest.wordspec.AnyWordSpec

class ResponseCookiesMatchersTest extends AnyWordSpec with MatchersTestSupport {

  trait ctx extends HttpMessageTestSupport

  "ResponseCookiesMatchers" should {

    "match if cookiePair with name is found" in new ctx {
      aResponseWithCookies(cookie) should receivedCookieWith(cookie.name)
    }

    "failure message should describe which cookies are present and which did not match" in new ctx {
      failureMessageFor(receivedCookieWith(cookie.name), matchedOn = aResponseWithCookies(anotherCookie, yetAnotherCookie)) should
        ( include(cookie.name) and include(anotherCookie.name) and include(yetAnotherCookie.name) )
    }

    "failure message for response withoout cookies will print that the response did not contain any cookies" in new ctx {
      receivedCookieWith(cookie.name).apply( aResponseWithNoCookies ).failureMessage should
        include("Response did not contain any `Set-Cookie` headers.")
    }

    "allow to compose matcher with custom cookiePair matcher" in new ctx {
      aResponseWithCookies(cookie) should receivedCookieThat(must = cookieWith(cookie.value))
    }
  }
} 
Example 157
Source File: RequestBodyMatchersTest.scala    From wix-http-testkit   with MIT License 5 votes vote down vote up
package com.wix.e2e.http.matchers.internal

import com.wix.e2e.http.exceptions.MarshallerErrorException
import com.wix.e2e.http.matchers.RequestMatchers._
import com.wix.e2e.http.matchers.drivers.HttpRequestFactory._
import com.wix.e2e.http.matchers.drivers.MarshallingTestObjects.SomeCaseClass
import com.wix.e2e.http.matchers.drivers.{CustomMarshallerProvider, HttpMessageTestSupport, MarshallerTestSupport, MatchersTestSupport}
import org.scalatest.matchers.should.Matchers._
import org.scalatest.wordspec.AnyWordSpec


class RequestBodyMatchersTest extends AnyWordSpec with MatchersTestSupport {

  trait ctx extends HttpMessageTestSupport with MarshallerTestSupport with CustomMarshallerProvider

  "ResponseBodyMatchers" should {

    "exact match on response body" in new ctx {
      aRequestWith(content) should haveBodyWith(content)
      aRequestWith(content) should not( haveBodyWith(anotherContent) )
    }

    "match underlying matcher with body content" in new ctx {
      aRequestWith(content) should haveBodyThat(must = be( content ))
      aRequestWith(content) should not( haveBodyThat(must = be( anotherContent )) )
    }

    "exact match on response binary body" in new ctx {
      aRequestWith(binaryContent) should haveBodyWith(binaryContent)
      aRequestWith(binaryContent) should not( haveBodyWith(anotherBinaryContent) )
    }

    "match underlying matcher with binary body content" in new ctx {
      aRequestWith(binaryContent) should haveBodyDataThat(must = be( binaryContent ))
      aRequestWith(binaryContent) should not( haveBodyDataThat(must = be( anotherBinaryContent )) )
    }

    "handle empty body" in new ctx {
      aRequestWithoutBody should not( haveBodyWith(content))
    }

    "support unmarshalling body content with user custom unmarshaller" in new ctx {
      givenUnmarshallerWith[SomeCaseClass](someObject, forContent = content)

      aRequestWith(content) should haveBodyWith(entity = someObject)
      aRequestWith(content) should not( haveBodyWith(entity = anotherObject) )
    }

    "provide a meaningful explanation why match failed" in new ctx {
      givenUnmarshallerWith[SomeCaseClass](someObject, forContent = content)

      failureMessageFor(haveBodyEntityThat(must = be(anotherObject)), matchedOn = aRequestWith(content)) shouldBe
        s"Failed to match: ['$someObject' != '$anotherObject'] with content: ['$content']"
      failureMessageFor(not(haveBodyEntityThat(must = be(anotherObject))), matchedOn = aRequestWith(content)) shouldBe
        s"Failed to match: ['$someObject'] was not equal to ['$anotherObject'] for content: ['$content']"
      failureMessageFor(not( haveBodyEntityThat(must = be(someObject))), matchedOn = aRequestWith(content)) shouldBe
        s"Failed to match: ['$someObject'] was equal to content: ['$content']"
    }

    "provide a proper message to user sent a matcher to an entity matcher" in new ctx {
      failureMessageFor(haveBodyWith(entity = be(someObject)), matchedOn = aRequestWith(content)) shouldBe
        "Matcher misuse: `haveBodyWith` received a matcher to match against, please use `haveBodyThat` instead."
      failureMessageFor(not( haveBodyWith(entity = be(someObject)) ), matchedOn = aRequestWith(content)) shouldBe
        "Matcher misuse: `haveBodyWith` received a matcher to match against, please use `haveBodyThat` instead."
    }

    "provide a proper message to user in case of a badly behaving marshaller" in new ctx {
      givenBadlyBehavingUnmarshallerFor[SomeCaseClass](withContent = content)

      the [MarshallerErrorException] thrownBy haveBodyWith(entity = someObject).apply( aRequestWith(content) )
    }

    "support custom matcher for user object" in new ctx {
      givenUnmarshallerWith[SomeCaseClass](someObject, forContent = content)

      aRequestWith(content) should haveBodyEntityThat(must = be(someObject))
      aRequestWith(content) should not( haveBodyEntityThat(must = be(anotherObject)) )
    }
  }
} 
Example 158
Source File: ResponseBodyAndStatusMatchersTest.scala    From wix-http-testkit   with MIT License 5 votes vote down vote up
package com.wix.e2e.http.matchers.internal

import com.wix.e2e.http.api.Marshaller.Implicits.marshaller
import com.wix.e2e.http.matchers.ResponseMatchers._
import com.wix.e2e.http.matchers.drivers.HttpResponseFactory._
import com.wix.e2e.http.matchers.drivers.HttpResponseMatchers._
import com.wix.e2e.http.matchers.drivers.MarshallingTestObjects.SomeCaseClass
import com.wix.e2e.http.matchers.drivers.{HttpMessageTestSupport, MatchersTestSupport}
import org.scalatest.matchers.should.Matchers._
import org.scalatest.wordspec.AnyWordSpec


class ResponseBodyAndStatusMatchersTest extends AnyWordSpec with MatchersTestSupport {

  trait ctx extends HttpMessageTestSupport

  "ResponseBodyAndStatusMatchers" should {

    "match successful request with body content" in new ctx {
      aSuccessfulResponseWith(content) should beSuccessfulWith(content)
      aSuccessfulResponseWith(content) should not( beSuccessfulWith(anotherContent) )
    }

    "provide a proper message to user sent a matcher to an entity matcher" in new ctx {
      failureMessageFor(beSuccessfulWith(entity = be(content)), matchedOn = aResponseWith(content)) shouldBe
        s"Matcher misuse: `beSuccessfulWith` received a matcher to match against, please use `beSuccessfulWithEntityThat` instead."
    }

    "match successful request with body content matcher" in new ctx {
      aSuccessfulResponseWith(content) should beSuccessfulWithBodyThat(must = be( content ))
      aSuccessfulResponseWith(content) should not( beSuccessfulWithBodyThat(must = be( anotherContent )) )
    }

    "match invalid request with body content" in new ctx {
      anInvalidResponseWith(content) should beInvalidWith(content)
      anInvalidResponseWith(content) should not( beInvalidWith(anotherContent) )
    }

    "match invalid request with body content matcher" in new ctx {
      anInvalidResponseWith(content) should beInvalidWithBodyThat(must = be( content ))
      anInvalidResponseWith(content) should not( beInvalidWithBodyThat(must = be( anotherContent )) )
    }

    "match successful request with binary body content" in new ctx {
      aSuccessfulResponseWith(binaryContent) should beSuccessfulWith(binaryContent)
      aSuccessfulResponseWith(binaryContent) should not( beSuccessfulWith(anotherBinaryContent) )
    }

    "match successful request with binary body content matcher" in new ctx {
      aSuccessfulResponseWith(binaryContent) should beSuccessfulWithBodyDataThat(must = be( binaryContent ))
      aSuccessfulResponseWith(binaryContent) should not( beSuccessfulWithBodyDataThat(must = be( anotherBinaryContent )) )
    }

    "match successful request with entity" in new ctx {
      aSuccessfulResponseWith(marshaller.marshall(someObject)) should beSuccessfulWith( someObject )
      aSuccessfulResponseWith(marshaller.marshall(someObject)) should not( beSuccessfulWith( anotherObject ) )
    }

    "match successful request with entity with custom marshaller" in new ctx {
      aSuccessfulResponseWith(marshaller.marshall(someObject)) should beSuccessfulWith( someObject )
      aSuccessfulResponseWith(marshaller.marshall(someObject)) should not( beSuccessfulWith( anotherObject ) )
    }

    "match successful request with entity matcher" in new ctx {
      aSuccessfulResponseWith(marshaller.marshall(someObject)) should beSuccessfulWithEntityThat[SomeCaseClass]( must = be( someObject ) )
      aSuccessfulResponseWith(marshaller.marshall(someObject)) should not( beSuccessfulWithEntityThat[SomeCaseClass]( must = be( anotherObject ) ) )
    }

    "match successful request with headers" in new ctx {
      aSuccessfulResponseWith(header, anotherHeader) should beSuccessfulWithHeaders(header, anotherHeader)
      aSuccessfulResponseWith(header) should not( beSuccessfulWithHeaders(anotherHeader) )
    }

    "match successful request with header matcher" in new ctx {
      aSuccessfulResponseWith(header) should beSuccessfulWithHeaderThat(must = be(header._2), withHeaderName = header._1)
      aSuccessfulResponseWith(header) should not( beSuccessfulWithHeaderThat(must = be(anotherHeader._2), withHeaderName = header._1) )
    }

    "match successful request with cookies" in new ctx {
      aSuccessfulResponseWithCookies(cookie, anotherCookie) should beSuccessfulWithCookie(cookie.name)
      aSuccessfulResponseWithCookies(cookie) should not( beSuccessfulWithCookie(anotherCookie.name) )
    }

    "match successful request with cookiePair matcher" in new ctx {
      aSuccessfulResponseWithCookies(cookie) should beSuccessfulWithCookieThat(must = cookieWith(cookie.value))
      aSuccessfulResponseWithCookies(cookie) should not( beSuccessfulWithCookieThat(must = cookieWith(anotherCookie.value)) )
    }

    "provide a proper message to user sent a matcher to an `haveBodyWith` matcher" in new ctx {
      failureMessageFor(haveBodyWith(entity = be(someObject)), matchedOn = aResponseWith(content)) shouldBe
        s"Matcher misuse: `haveBodyWith` received a matcher to match against, please use `haveBodyThat` instead."
    }
  }
} 
Example 159
Source File: ResponseStatusMatchersTest.scala    From wix-http-testkit   with MIT License 5 votes vote down vote up
package com.wix.e2e.http.matchers.internal

import akka.http.scaladsl.model.StatusCodes._
import com.wix.e2e.http.matchers.ResponseMatchers._
import com.wix.e2e.http.matchers.drivers.HttpMessageTestSupport
import com.wix.e2e.http.matchers.drivers.HttpResponseFactory._
import org.scalatest.matchers.should.Matchers._
import org.scalatest.wordspec.AnyWordSpec

class ResponseStatusMatchersTest extends AnyWordSpec {

  trait ctx extends HttpMessageTestSupport


  "ResponseStatusMatchers" should {
      Seq(OK -> beSuccessful, NoContent -> beNoContent, Created -> beSuccessfullyCreated, Accepted -> beAccepted, // 2xx

          Found -> beRedirect, MovedPermanently -> bePermanentlyRedirect, //3xx

          // 4xx
          Forbidden -> beRejected, NotFound -> beNotFound, BadRequest -> beInvalid, RequestEntityTooLarge -> beRejectedTooLarge,
          Unauthorized -> beUnauthorized, MethodNotAllowed -> beNotSupported, Conflict -> beConflict, PreconditionFailed -> bePreconditionFailed,
          UnprocessableEntity -> beUnprocessableEntity, PreconditionRequired -> bePreconditionRequired, TooManyRequests -> beTooManyRequests,

          ServiceUnavailable -> beUnavailable, InternalServerError -> beInternalServerError, NotImplemented -> beNotImplemented // 5xx
         ).foreach { case (status, matcherForStatus) =>

        s"match against status ${status.value}" in new ctx {
          aResponseWith( status ) should matcherForStatus
          aResponseWith( randomStatusThatIsNot(status) ) should not( matcherForStatus )
        }
      }

    "allow matching against status code" in new ctx {
      val status = randomStatus
      aResponseWith( status ) should haveStatus(code = status.intValue )
      aResponseWith( status ) should not( haveStatus(code = randomStatusThatIsNot(status).intValue ) )
    }
  }
} 
Example 160
Source File: RequestUrlMatchersTest.scala    From wix-http-testkit   with MIT License 5 votes vote down vote up
package com.wix.e2e.http.matchers.internal

import com.wix.e2e.http.matchers.RequestMatchers._
import com.wix.e2e.http.matchers.drivers.CommonTestMatchers._
import com.wix.e2e.http.matchers.drivers.HttpRequestFactory._
import com.wix.e2e.http.matchers.drivers.{HttpMessageTestSupport, MatchersTestSupport}
import org.scalatest.matchers.should.Matchers._
import org.scalatest.wordspec.AnyWordSpec


class RequestUrlMatchersTest extends AnyWordSpec with MatchersTestSupport {

  trait ctx extends HttpMessageTestSupport

  "RequestUrlMatchers" should {

    "match exact path" in new ctx {
      aRequestWithPath(somePath) should havePath(somePath)
      aRequestWithPath(somePath) should not( havePath(anotherPath) )
    }

    "match exact path matcher" in new ctx {
      aRequestWithPath(somePath) should havePathThat(must = be( somePath ))
      aRequestWithPath(somePath) should not( havePathThat(must = be( anotherPath )) )
    }
    // if first ignore first slash ???

    "contain parameter will check if any parameter is present" in new ctx {
      aRequestWithParameters(parameter, anotherParameter) should haveAnyParamOf(parameter)
      aRequestWithParameters(parameter) should not( haveAnyParamOf(anotherParameter) )
    }

    "return detailed message on hasAnyOf match failure" in new ctx {
      failureMessageFor(haveAnyParamOf(parameter, anotherParameter), matchedOn = aRequestWithParameters(yetAnotherParameter, andAnotherParameter)) shouldBe
        s"Could not find parameter [${parameter._1}, ${anotherParameter._1}] but found those: [${yetAnotherParameter._1}, ${andAnotherParameter._1}]"
    }

    "contain parameter will check if all parameters are present" in new ctx {
      aRequestWithParameters(parameter, anotherParameter, yetAnotherParameter) should haveAllParamFrom(parameter, anotherParameter)
      aRequestWithParameters(parameter, yetAnotherParameter) should not( haveAllParamFrom(parameter, anotherParameter) )
    }

    "allOf matcher will return a message stating what was found, and what is missing from parameter list" in new ctx {
      failureMessageFor(haveAllParamFrom(parameter, anotherParameter), matchedOn = aRequestWithParameters(parameter, yetAnotherParameter)) shouldBe
        s"Could not find parameter [${anotherParameter._1}] but found those: [${parameter._1}]."
    }

    "same parameter as will check if the same parameters is present" in new ctx {
      aRequestWithParameters(parameter, anotherParameter) should haveTheSameParamsAs(parameter, anotherParameter)
      aRequestWithParameters(parameter, anotherParameter) should not( haveTheSameParamsAs(parameter) )
      aRequestWithParameters(parameter) should not( haveTheSameParamsAs(parameter, anotherParameter) )
    }

    "haveTheSameParametersAs matcher will return a message stating what was found, and what is missing from parameter list" in new ctx {
      failureMessageFor(haveTheSameParamsAs(parameter, anotherParameter), matchedOn = aRequestWithParameters(parameter, yetAnotherParameter)) shouldBe
        s"Request parameters are not identical, missing parameters from request: [${anotherParameter._1}], request contained extra parameters: [${yetAnotherParameter._1}]."
    }

    "request with no parameters will show a 'no parameters' message" in new ctx {
      failureMessageFor(haveAnyParamOf(parameter), matchedOn = aRequestWithNoParameters ) shouldBe
        "Request did not contain any request parameters."

      failureMessageFor(haveAllParamFrom(parameter), matchedOn = aRequestWithNoParameters ) shouldBe
        "Request did not contain any request parameters."

      failureMessageFor(haveTheSameParamsAs(parameter), matchedOn = aRequestWithNoParameters ) shouldBe
        "Request did not contain any request parameters."
    }

    "match if any parameter satisfy the composed matcher" in new ctx {
      aRequestWithParameters(parameter) should haveAnyParamThat(must = be(parameter._2), withParamName = parameter._1)
      aRequestWithParameters(parameter) should not( haveAnyParamThat(must = be(anotherParameter._2), withParamName = anotherParameter._1) )
    }

    "return informative error messages" in new ctx {
      failureMessageFor(haveAnyParamThat(must = AlwaysMatcher(), withParamName = nonExistingParamName), matchedOn = aRequestWithParameters(parameter)) shouldBe
        s"Request contain parameter names: [${parameter._1}] which did not contain: [$nonExistingParamName]"
      failureMessageFor(haveAnyParamThat(must = AlwaysMatcher(), withParamName = nonExistingParamName), matchedOn = aRequestWithNoParameters) shouldBe
        "Request did not contain any parameters."
      failureMessageFor(haveAnyParamThat(must = be(anotherParameter._2), withParamName = parameter._1), matchedOn = aRequestWithParameters(parameter)) shouldBe
        s"Request parameter [${parameter._1}], did not match { ${be(anotherParameter._2).apply(parameter._2).failureMessage} }"
    }
  }
} 
Example 161
Source File: RequestHeadersMatchersTest.scala    From wix-http-testkit   with MIT License 5 votes vote down vote up
package com.wix.e2e.http.matchers.internal

import com.wix.e2e.http.matchers.RequestMatchers._
import com.wix.e2e.http.matchers.drivers.CommonTestMatchers._
import com.wix.e2e.http.matchers.drivers.HttpRequestFactory._
import com.wix.e2e.http.matchers.drivers.{HttpMessageTestSupport, MatchersTestSupport}
import org.scalatest.matchers.should.Matchers._
import org.scalatest.wordspec.AnyWordSpec

class RequestHeadersMatchersTest extends AnyWordSpec with MatchersTestSupport {

  trait ctx extends HttpMessageTestSupport

  "RequestHeadersMatchers" should {

    "contain header will check if any header is present" in new ctx {
      aRequestWithHeaders(header, anotherHeader) should haveAnyHeadersOf(header)
    }

    "return detailed message on hasAnyOf match failure" in new ctx {
      failureMessageFor(haveAnyHeadersOf(header, anotherHeader), matchedOn = aRequestWithHeaders(yetAnotherHeader, andAnotherHeader)) shouldBe
        s"Could not find header [${header._1}, ${anotherHeader._1}] but found those: [${yetAnotherHeader._1}, ${andAnotherHeader._1}]"
    }

    "contain header will check if all headers are present" in new ctx {
      aRequestWithHeaders(header, anotherHeader, yetAnotherHeader) should haveAllHeadersOf(header, anotherHeader)
    }

    "allOf matcher will return a message stating what was found, and what is missing from header list" in new ctx {
      failureMessageFor(haveAllHeadersOf(header, anotherHeader), matchedOn = aRequestWithHeaders(yetAnotherHeader, header)) shouldBe
        s"Could not find header [${anotherHeader._1}] but found those: [${header._1}]."
    }

    "same header as will check if the same headers is present" in new ctx {
      aRequestWithHeaders(header, anotherHeader) should haveTheSameHeadersAs(header, anotherHeader)
      aRequestWithHeaders(header, anotherHeader) should not( haveTheSameHeadersAs(header) )
      aRequestWithHeaders(header) should not( haveTheSameHeadersAs(header, anotherHeader) )
    }

    "haveTheSameHeadersAs matcher will return a message stating what was found, and what is missing from header list" in new ctx {
      failureMessageFor(haveTheSameHeadersAs(header, anotherHeader), matchedOn = aRequestWithHeaders(yetAnotherHeader, header)) shouldBe
        s"Request header is not identical, missing headers from request: [${anotherHeader._1}], request contained extra headers: [${yetAnotherHeader._1}]."
    }

    "header name compare should be case insensitive" in new ctx {
      aRequestWithHeaders(header) should haveAnyHeadersOf(header.copy(_1 = header._1.toUpperCase))
      aRequestWithHeaders(header) should not( haveAnyHeadersOf(header.copy(_2 = header._2.toUpperCase)) )

      aRequestWithHeaders(header) should haveAllHeadersOf(header.copy(_1 = header._1.toUpperCase))
      aRequestWithHeaders(header) should not( haveAllHeadersOf(header.copy(_2 = header._2.toUpperCase)) )

      aRequestWithHeaders(header) should haveTheSameHeadersAs(header.copy(_1 = header._1.toUpperCase))
      aRequestWithHeaders(header) should not( haveTheSameHeadersAs(header.copy(_2 = header._2.toUpperCase)) )
    }

    "request with no headers will show a 'no headers' message" in new ctx {
      failureMessageFor(haveAnyHeadersOf(header), matchedOn = aRequestWithNoHeaders ) shouldBe
        "Request did not contain any headers."

      failureMessageFor(haveAllHeadersOf(header), matchedOn = aRequestWithNoHeaders ) shouldBe
        "Request did not contain any headers."

      failureMessageFor(haveTheSameHeadersAs(header), matchedOn = aRequestWithNoHeaders ) shouldBe
        "Request did not contain any headers."
    }

    "ignore cookies and set cookies from headers comparison" in new ctx {
      aRequestWithCookies(cookiePair) should not( haveAnyHeadersOf("Cookie" -> s"${cookiePair._1}=${cookiePair._2}") )
      aRequestWithCookies(cookiePair) should not( haveAllHeadersOf("Cookie" -> s"${cookiePair._1}=${cookiePair._2}") )
      aRequestWithCookies(cookiePair) should not( haveTheSameHeadersAs("Cookie" -> s"${cookiePair._1}=${cookiePair._2}") )
      aRequestWithCookies(cookiePair) should not( haveAnyHeaderThat(must = be(s"${cookiePair._1}=${cookiePair._2}"), withHeaderName = "Cookie") )
    }

    "match if any header satisfy the composed matcher" in new ctx {
      aRequestWithHeaders(header) should haveAnyHeaderThat(must = be(header._2), withHeaderName = header._1)
      aRequestWithHeaders(header) should not( haveAnyHeaderThat(must = be(anotherHeader._2), withHeaderName = header._1) )
    }

    "return informative error messages" in new ctx {
      failureMessageFor(haveAnyHeaderThat(must = AlwaysMatcher(), withHeaderName = nonExistingHeaderName), matchedOn = aRequestWithHeaders(header)) shouldBe
        s"Request contain header names: [${header._1}] which did not contain: [$nonExistingHeaderName]"
      failureMessageFor(haveAnyHeaderThat(must = AlwaysMatcher(), withHeaderName = nonExistingHeaderName), matchedOn = aRequestWithNoHeaders) shouldBe
        "Request did not contain any headers."
      failureMessageFor(haveAnyHeaderThat(must = be(anotherHeader._2), withHeaderName = header._1), matchedOn = aRequestWithHeaders(header)) shouldBe
        s"Request header [${header._1}], did not match { ${be(anotherHeader._2).apply(header._2).failureMessage} }"
    }
  }
} 
Example 162
Source File: ResponseContentTypeMatchersTest.scala    From wix-http-testkit   with MIT License 5 votes vote down vote up
package com.wix.e2e.http.matchers.internal

import com.wix.e2e.http.matchers.ResponseMatchers._
import com.wix.e2e.http.matchers.drivers.HttpResponseFactory._
import com.wix.e2e.http.matchers.drivers.{HttpMessageTestSupport, MatchersTestSupport}
import org.scalatest.matchers.should.Matchers._
import org.scalatest.wordspec.AnyWordSpec


class ResponseContentTypeMatchersTest extends AnyWordSpec with MatchersTestSupport {

  trait ctx extends HttpMessageTestSupport


  "ResponseContentTypeMatchers" should {

    "support matching against json content type" in new ctx {
      aResponseWithContentType("application/json") should beJsonResponse
      aResponseWithContentType("text/plain") should not( beJsonResponse )
    }

    "support matching against text plain content type" in new ctx {
      aResponseWithContentType("text/plain") should beTextPlainResponse
      aResponseWithContentType("application/json") should not( beTextPlainResponse )
    }

    "support matching against form url encoded content type" in new ctx {
      aResponseWithContentType("application/x-www-form-urlencoded") should beFormUrlEncodedResponse
      aResponseWithContentType("application/json") should not( beFormUrlEncodedResponse )
    }

    "show proper error in case matching against a malformed content type" in new ctx {
      failureMessageFor(haveContentType(malformedContentType), matchedOn = aResponseWithContentType(anotherContentType)) should
        include(s"Cannot match against a malformed content type: $malformedContentType")
    }

    "support matching against content type" in new ctx {
      aResponseWithContentType(contentType) should haveContentType(contentType)
    }

    "failure message should describe what was the expected content type and what was found" in new ctx {
      failureMessageFor(haveContentType(contentType), matchedOn = aResponseWithContentType(anotherContentType)) shouldBe
        s"Expected content type [$contentType] does not match actual content type [$anotherContentType]"
    }

    "failure message in case no content type for body should be handled" in new ctx {
      failureMessageFor(haveContentType(contentType), matchedOn = aResponseWithoutBody) shouldBe
        "Request body does not have a set content type"
    }

    "failure message if someone tries to match content-type in headers matchers" in new ctx {
      failureMessageFor(haveAllHeadersOf(contentTypeHeader), matchedOn = aResponseWithContentType(contentType)) shouldBe
        """`Content-Type` is a special header and cannot be used in `haveAnyHeadersOf`, `haveAllHeadersOf`, `haveTheSameHeadersAs` matchers.
          |Use `haveContentType` matcher instead (or `beJsonResponse`, `beTextPlainResponse`, `beFormUrlEncodedResponse`).""".stripMargin
      failureMessageFor(haveAnyHeadersOf(contentTypeHeader), matchedOn = aResponseWithContentType(contentType)) shouldBe
        """`Content-Type` is a special header and cannot be used in `haveAnyHeadersOf`, `haveAllHeadersOf`, `haveTheSameHeadersAs` matchers.
          |Use `haveContentType` matcher instead (or `beJsonResponse`, `beTextPlainResponse`, `beFormUrlEncodedResponse`).""".stripMargin
      failureMessageFor(haveTheSameHeadersAs(contentTypeHeader), matchedOn = aResponseWithContentType(contentType)) shouldBe
        """`Content-Type` is a special header and cannot be used in `haveAnyHeadersOf`, `haveAllHeadersOf`, `haveTheSameHeadersAs` matchers.
          |Use `haveContentType` matcher instead (or `beJsonResponse`, `beTextPlainResponse`, `beFormUrlEncodedResponse`).""".stripMargin
    }
  }
} 
Example 163
Source File: ResponseStatusAndHeaderMatchersTest.scala    From wix-http-testkit   with MIT License 5 votes vote down vote up
package com.wix.e2e.http.matchers.internal

import akka.http.scaladsl.model.StatusCodes.{Found, MovedPermanently}
import com.wix.e2e.http.matchers.ResponseMatchers._
import com.wix.e2e.http.matchers.drivers.HttpResponseFactory._
import com.wix.e2e.http.matchers.drivers.{HttpMessageTestSupport, MatchersTestSupport}
import org.scalatest.matchers.should.Matchers._
import org.scalatest.wordspec.AnyWordSpec

class ResponseStatusAndHeaderMatchersTest extends AnyWordSpec with MatchersTestSupport {

  trait ctx extends HttpMessageTestSupport

  "ResponseStatusAndHeaderMatchers" should {

    "match against a response that is temporarily redirected to url" in new ctx {
      aRedirectResponseTo(url) should beRedirectedTo(url)
      aRedirectResponseTo(url) should not( beRedirectedTo(anotherUrl) )
      aRedirectResponseTo(url).copy(status = randomStatusThatIsNot(Found)) should not( beRedirectedTo(url) )
    }

    "match against a response that is permanently redirected to url" in new ctx {
      aPermanentlyRedirectResponseTo(url) should bePermanentlyRedirectedTo(url)
      aPermanentlyRedirectResponseTo(url) should not( bePermanentlyRedirectedTo(anotherUrl) )
      aPermanentlyRedirectResponseTo(url).copy(status = randomStatusThatIsNot(MovedPermanently)) should not( bePermanentlyRedirectedTo(url) )
    }

    "match against url params even if params has a different order" in new ctx {
      aRedirectResponseTo(s"$url?param1=val1&param2=val2") should beRedirectedTo(s"$url?param2=val2&param1=val1")
      aPermanentlyRedirectResponseTo(s"$url?param1=val1&param2=val2") should bePermanentlyRedirectedTo(s"$url?param2=val2&param1=val1")
    }

    "match will fail for different protocol" in new ctx {
      aRedirectResponseTo(s"http://example.com") should not( beRedirectedTo(s"https://example.com") )
      aPermanentlyRedirectResponseTo(s"http://example.com") should not( bePermanentlyRedirectedTo(s"https://example.com") )
    }

    "match will fail for different host and port" in new ctx {
      aRedirectResponseTo(s"http://example.com") should not( beRedirectedTo(s"http://example.org") )
      aRedirectResponseTo(s"http://example.com:99") should not( beRedirectedTo(s"http://example.com:81") )
      aPermanentlyRedirectResponseTo(s"http://example.com") should not( bePermanentlyRedirectedTo(s"http://example.org") )
      aPermanentlyRedirectResponseTo(s"http://example.com:99") should not( bePermanentlyRedirectedTo(s"http://example.com:81") )
    }

    "port 80 is removed by akka http" in new ctx {
      aRedirectResponseTo(s"http://example.com:80") should beRedirectedTo(s"http://example.com")
      aPermanentlyRedirectResponseTo(s"http://example.com:80") should bePermanentlyRedirectedTo(s"http://example.com")
    }

    "match will fail for different path" in new ctx {
      aRedirectResponseTo(s"http://example.com/path1") should not( beRedirectedTo(s"http://example.com/path2") )
      aPermanentlyRedirectResponseTo(s"http://example.com/path1") should not( bePermanentlyRedirectedTo(s"http://example.org/path2") )
    }

    "match will fail for different hash fragment" in new ctx {
      aRedirectResponseTo(s"http://example.com/path#fragment") should not( beRedirectedTo(s"http://example.com/path#anotherFxragment") )
      aPermanentlyRedirectResponseTo(s"http://example.com/path#fragment") should not( bePermanentlyRedirectedTo(s"http://example.com/path#anotherFxragment") )
    }

    "failure message in case response does not have location header" in new ctx {
      failureMessageFor(beRedirectedTo(url), matchedOn = aRedirectResponseWithoutLocationHeader) shouldBe
        "Response does not contain Location header."
      failureMessageFor(bePermanentlyRedirectedTo(url), matchedOn = aPermanentlyRedirectResponseWithoutLocationHeader) shouldBe
        "Response does not contain Location header."
    }

    "failure message in case trying to match against a malformed url" in new ctx {
      failureMessageFor(beRedirectedTo(malformedUrl), matchedOn = aRedirectResponseTo(url)) shouldBe
        s"Matching against a malformed url: [$malformedUrl]."
      failureMessageFor(bePermanentlyRedirectedTo(malformedUrl), matchedOn = aPermanentlyRedirectResponseTo(url)) shouldBe
        s"Matching against a malformed url: [$malformedUrl]."
    }

    "failure message in case response have different urls should show the actual url and the expected url" in new ctx {
      failureMessageFor(beRedirectedTo(url), matchedOn = aRedirectResponseTo(s"$url?param1=val1")) shouldBe
        s"""Response is redirected to a different url:
           |actual:   $url?param1=val1
           |expected: $url
           |""".stripMargin
      failureMessageFor(bePermanentlyRedirectedTo(url), matchedOn = aPermanentlyRedirectResponseTo(s"$url?param1=val1")) shouldBe
        s"""Response is redirected to a different url:
           |actual:   $url?param1=val1
           |expected: $url
           |""".stripMargin
    }
  }
} 
Example 164
Source File: RequestCookiesMatchersTest.scala    From wix-http-testkit   with MIT License 5 votes vote down vote up
package com.wix.e2e.http.matchers.internal

import com.wix.e2e.http.matchers.RequestMatchers._
import com.wix.e2e.http.matchers.drivers.CommonTestMatchers._
import com.wix.e2e.http.matchers.drivers.HttpRequestFactory._
import com.wix.e2e.http.matchers.drivers.{HttpMessageTestSupport, MatchersTestSupport}
import org.scalatest.matchers.should.Matchers._
import org.scalatest.wordspec.AnyWordSpec

class RequestCookiesMatchersTest extends AnyWordSpec with MatchersTestSupport {

  trait ctx extends HttpMessageTestSupport

  "ResponseCookiesMatchers" should {

    "match if cookiePair with name is found" in new ctx {
      aRequestWithCookies(cookiePair) should receivedCookieWith(cookiePair._1)
    }

    "failure message should describe which cookies are present and which did not match" in new ctx {
      failureMessageFor( receivedCookieWith(cookiePair._1), matchedOn = aRequestWithCookies(anotherCookiePair, yetAnotherCookiePair)) should
        include(s"Could not find cookie that matches for request contained cookies with names: ['${anotherCookiePair._1}', '${yetAnotherCookiePair._1}'")
      failureMessageFor( not( receivedCookieThat(be(cookiePair._1)) ), matchedOn = aRequestWithCookies(cookiePair, anotherCookiePair)) shouldBe
        s"Request contained a cookie that matched, request has the following cookies: ['${cookiePair._1}', '${anotherCookiePair._1}'"
    }

    "failure message for response withoout cookies will print that the response did not contain any cookies" in new ctx {
      failureMessageFor( receivedCookieWith(cookiePair._1), matchedOn = aRequestWithNoCookies) shouldBe
        "Request did not contain any Cookie headers."
      failureMessageFor( not( receivedCookieWith(cookiePair._1) ), matchedOn = aRequestWithNoCookies) shouldBe
        "Request did not contain any Cookie headers."
    }

    "allow to compose matcher with custom cookiePair matcher" in new ctx {
      aRequestWithCookies(cookiePair) should receivedCookieThat(must = cookieWith(cookiePair._2))
    }
  }
} 
Example 165
Source File: ResponseContentLengthMatchersTest.scala    From wix-http-testkit   with MIT License 5 votes vote down vote up
package com.wix.e2e.http.matchers.internal

import com.wix.e2e.http.matchers.ResponseMatchers._
import com.wix.e2e.http.matchers.drivers.HttpResponseFactory._
import com.wix.e2e.http.matchers.drivers.{HttpMessageTestSupport, MatchersTestSupport}
import org.scalatest.matchers.should.Matchers._
import org.scalatest.wordspec.AnyWordSpec


class ResponseContentLengthMatchersTest extends AnyWordSpec with MatchersTestSupport {

  trait ctx extends HttpMessageTestSupport

  "ResponseContentLengthMatchers" should {

    "support matching against specific content length" in new ctx {
      aResponseWith(contentWith(length = length)) should haveContentLength(length = length)
      aResponseWith(contentWith(length = anotherLength)) should not( haveContentLength(length = length) )
    }

    "support matching content length against response without content length" in new ctx {
      aResponseWithoutContentLength should not( haveContentLength(length = length) )
    }

    "support matching against response without content length" in new ctx {
      aResponseWithoutContentLength should haveNoContentLength
      aResponseWith(contentWith(length = length)) should not( haveNoContentLength )
    }

    "failure message should describe what was the expected content length and what was found" in new ctx {
      failureMessageFor(haveContentLength(length = length), matchedOn = aResponseWith(contentWith(length = anotherLength))) shouldBe
        s"Expected content length [$length] does not match actual content length [$anotherLength]"
    }

    "failure message should reflect that content length header was not found" in new ctx {
      failureMessageFor(haveContentLength(length = length), matchedOn = aResponseWithoutContentLength) shouldBe
        s"Expected content length [$length] but response did not contain `content-length` header."
    }

    "failure message should reflect that content length header exists while trying to match against a content length that doesn't exists" in new ctx {
      failureMessageFor(haveNoContentLength, matchedOn = aResponseWith(contentWith(length = length))) shouldBe
        s"Expected no `content-length` header but response did contain `content-length` header with size [$length]."
    }

    "failure message if someone tries to match content-length in headers matchers" in new ctx {
      failureMessageFor(haveAllHeadersOf(contentLengthHeader), matchedOn = aResponseWithContentType(contentType)) shouldBe
        """`Content-Length` is a special header and cannot be used in `haveAnyHeadersOf`, `haveAllHeadersOf`, `haveTheSameHeadersAs` matchers.
          |Use `haveContentLength` matcher instead.""".stripMargin
      failureMessageFor(haveAnyHeadersOf(contentLengthHeader), matchedOn = aResponseWithContentType(contentType)) shouldBe
        """`Content-Length` is a special header and cannot be used in `haveAnyHeadersOf`, `haveAllHeadersOf`, `haveTheSameHeadersAs` matchers.
          |Use `haveContentLength` matcher instead.""".stripMargin
      failureMessageFor(haveTheSameHeadersAs(contentLengthHeader), matchedOn = aResponseWithContentType(contentType)) shouldBe
        """`Content-Length` is a special header and cannot be used in `haveAnyHeadersOf`, `haveAllHeadersOf`, `haveTheSameHeadersAs` matchers.
          |Use `haveContentLength` matcher instead.""".stripMargin
    }
  }
} 
Example 166
Source File: CirceCodecSpec.scala    From hammock   with MIT License 5 votes vote down vote up
package hammock
package circe

import io.circe.generic.auto._

import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class CirceCodecSpec extends AnyWordSpec with Matchers {
  case class Dummy(a: Int, b: String)
  import implicits._

  val dummyValue = Dummy(1, "patata")
  val json       = Entity.StringEntity("""{"a":1,"b":"patata"}""", ContentType.`application/json`)

  "Codec.encode" should {
    "return the string representation of a type" in {
      Codec[Dummy].encode(dummyValue) shouldEqual json
    }
  }

  "Codec.decode" should {
    "parse a valid value" in {
      Codec[Dummy].decode(json) shouldEqual Right(dummyValue)
    }

    "fail to parse an invalid value" in {
      Codec[Dummy].decode(Entity.StringEntity("this is of course not valid")) shouldBe a[Left[_, _]]
    }
  }

} 
Example 167
Source File: StatusSpec.scala    From hammock   with MIT License 5 votes vote down vote up
package hammock

import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class StatusSpec extends AnyWordSpec with Matchers {
  val status: Int => Status = (code: Int) => Status(code, "", "")

  "Status.isInformational" when {
    "status code is 1xx" should {
      "return true" in {
        (100 to 102).foreach(code => assert(status(code).isInformational))
      }
    }

    "status code is not 1xx" should {
      "return false" in {
        assert(!status(200).isInformational)
        assert(!status(300).isInformational)
        assert(!status(400).isInformational)
        assert(!status(500).isInformational)
      }
    }
  }

  "Status.isSuccess" when {
    "status code is 2xx" should {
      "return true" in {
        (200 to 208).foreach(code => assert(status(code).isSuccess))
      }
    }

    "status code is not 2xx" should {
      "return false" in {
        assert(!status(100).isSuccess)
        assert(!status(300).isSuccess)
        assert(!status(400).isSuccess)
        assert(!status(500).isSuccess)
      }
    }
  }

  "Status.isRedirection" when {
    "status code is 3xx" should {
      "return true" in {
        (300 to 308).foreach(code => assert(status(code).isRedirection))
      }
    }

    "status code is not 3xx" should {
      "return false" in {
        assert(!status(100).isRedirection)
        assert(!status(200).isRedirection)
        assert(!status(400).isRedirection)
        assert(!status(500).isRedirection)
      }
    }
  }

  "Status.isClientError" when {
    "status code is 4xx" should {
      "return true" in {
        (400 to 451).foreach(code => assert(status(code).isClientError))
      }
    }

    "status code is not 4xx" should {
      "return false" in {
        assert(!status(100).isClientError)
        assert(!status(200).isClientError)
        assert(!status(300).isClientError)
        assert(!status(500).isClientError)
      }
    }
  }

  "Status.isServerError" when {
    "status code is 5xx" should {
      "return true" in {
        (500 to 599).foreach(code => assert(status(code).isServerError))
      }
    }

    "status code is not 5xx" should {
      "return false" in {
        assert(!status(100).isServerError)
        assert(!status(200).isServerError)
        assert(!status(300).isServerError)
        assert(!status(400).isServerError)
      }
    }
  }
} 
Example 168
Source File: CookieSpec.scala    From hammock   with MIT License 5 votes vote down vote up
package hammock
package hi

import java.time.ZonedDateTime

import cats._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class CookieSpec extends AnyWordSpec with Matchers {

  "Show[Cookie].show" should {
    "render a simple cookie in the correct format" in {
      val cookie = Cookie("name", "value")

      Show[Cookie].show(cookie) shouldEqual "name=value"
    }

    "render a complex cookie in the correct format" in {
      val cookie = Cookie(
        "name",
        "value",
        Some(ZonedDateTime.parse("2020-01-04T17:03:54.000Z")),
        Some(123),
        Some("pepegar.com"),
        Some("/blog"),
        Some(false),
        Some(true),
        Some(Cookie.SameSite.Strict)
      )

      Show[Cookie].show(cookie) shouldEqual "name=value; Expires=Sat, 04 Jan 2020 17:03:54 GMT; MaxAge=123; Domain=pepegar.com; Path=/blog; Secure=false; HttpOnly=true; SameSite=Strict"
    }

    "render a cookie with custom values in the correct format" in {
      val cookie = Cookie("hello", "dolly", custom = Some(Map("potatoes" -> "22")))

      Show[Cookie].show(cookie) shouldEqual "hello=dolly; potatoes=22"
    }
  }

} 
Example 169
Source File: DslSpec.scala    From hammock   with MIT License 5 votes vote down vote up
package hammock
package hi

import cats.implicits._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class DslSpec extends AnyWordSpec with Matchers {

  "`cookies`" should {
    "work when there were no cookies before" in {
      val opts = cookies(List(Cookie("a", "b"), Cookie("c", "d")))(Opts.empty)

      opts shouldEqual Opts(None, Map(), Some(List(Cookie("a", "b"), Cookie("c", "d"))))
    }

    "preppend cookies when there were cookies before" in {
      val opts = cookies(List(Cookie("c", "d"), Cookie("e", "f")))(Opts(None, Map(), Some(List(Cookie("a", "b")))))

      opts shouldEqual Opts(None, Map(), Some(List(Cookie("c", "d"), Cookie("e", "f"), Cookie("a", "b"))))
    }
  }

  "high level dsl" should {
    "allow concatenation of operations" in {
      val req = (auth(Auth.BasicAuth("pepegar", "h4rdp4ssw0rd")) >>>
        header("X-Forwarded-Proto" -> "https") >>>
        cookie(Cookie("track", "A lot")))(Opts.empty)

      req shouldEqual Opts(
        Some(Auth.BasicAuth("pepegar", "h4rdp4ssw0rd")),
        Map("X-Forwarded-Proto" -> "https"),
        Some(List(Cookie("track", "A lot"))))
    }
  }

} 
Example 170
Source File: HammockSpec.scala    From hammock   with MIT License 5 votes vote down vote up
package hammock

import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import cats._
import hi.{Auth, Cookie, Opts}

class HammockSpec extends AnyWordSpec with Matchers {
  val methods =
    Seq(Method.OPTIONS, Method.GET, Method.HEAD, Method.POST, Method.PUT, Method.DELETE, Method.TRACE, Method.PATCH)

  implicit val stringCodec = new Codec[String] {
    def decode(a: hammock.Entity): Either[hammock.CodecException, String] = a match {
      case Entity.StringEntity(str, _) => Right(str)
      case _                           => Left(CodecException.withMessage("expected string entity"))
    }
    def encode(a: String): hammock.Entity = Entity.StringEntity(a)
  }

  
  def test(assertions: HttpF[_] => Any) = new (HttpF ~> Id) {
    def apply[A](h: HttpF[A]): A = {
      assertions(h)

      null.asInstanceOf[A]
    }
  }

  val uri = Uri.fromString("http://pepegar.com").right.get

  "Hammock.request" should {

    methods.foreach { method =>
      s"create a valid $method request without a body" in {
        Hammock.request(method, uri, Map()) foldMap test { r =>
          r.req.uri shouldEqual Uri.fromString("http://pepegar.com").right.get
          r.req.headers shouldEqual Map()
        }
      }

      s"create a valid $method request with a body" in {
        val body = None
        Hammock.request(method, uri, Map(), body) foldMap test { r =>
          r.req.uri shouldEqual Uri.fromString("http://pepegar.com").right.get
          r.req.headers shouldEqual Map()
          r.req.entity shouldEqual None
        }
      }
    }

    "work with the options variant" in {
      val opts = Opts(None, Map("header" -> "3"), Some(List(Cookie("thisisacookie", "thisisthevalue"))))

      Uri.fromString("http://pepegar.com") match {
        case Right(uri) =>
          Hammock.getWithOpts(uri, opts) foldMap test { r =>
            r.req.uri shouldEqual Uri.fromString("http://pepegar.com").right.get
            r.req.headers shouldEqual Map("header" -> "3", "Cookie" -> "thisisacookie=thisisthevalue")
          }
        case Left(err) => fail(s"failed with $err")
      }
    }

    "construct the correct headers" in {
      val basicAuth = Auth.BasicAuth("user", "p4ssw0rd")
      val opts      = Opts(Option(basicAuth), Map.empty, Option.empty)
      val shown     = Show[Auth].show(basicAuth)

      Uri.fromString("http://pepegar.com") match {
        case Right(uri) =>
          Hammock.getWithOpts(uri, opts) foldMap test { r =>
            r.req.uri shouldEqual Uri.fromString("http://pepegar.com").right.get
            r.req.headers shouldEqual Map("Authorization" -> shown)
          }
        case Left(err) => fail(s"failed with $err")
      }
    }
  }
} 
Example 171
Source File: ContentTypeSpec.scala    From hammock   with MIT License 5 votes vote down vote up
package hammock

import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ContentTypeSpec extends AnyWordSpec with Matchers {

  "Eq[ContentType]" should {

    "equal ContentType instances" in {
      val applicationJsonOne = ContentType.`application/json`
      val applicationJsonTwo = ContentType.`application/json`

      assert(applicationJsonOne == applicationJsonTwo)
    }

    "not equal ContentType instances" in {
      val applicationJson = ContentType.`application/json`
      val textPlain       = ContentType.`text/plain`

      assert(applicationJson != textPlain)
    }

    "equal ContentType from string instances" in {
      val applicationJsonOne = ContentType.fromName("application/json")
      val applicationJsonTwo = ContentType.fromName("application/json")

      applicationJsonOne.name shouldEqual applicationJsonTwo.name
    }
  }
} 
Example 172
Source File: EntitySpec.scala    From hammock   with MIT License 5 votes vote down vote up
package hammock

import hammock.Entity._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class EntitySpec extends AnyWordSpec with Matchers {

  "EmptyEntity" should {

    "valid instance" in {
      assert(EmptyEntity.contentLength == 0)
      assert(EmptyEntity.contentType == ContentType.notUsed)
      assert(!EmptyEntity.chunked)
      assert(EmptyEntity.repeatable)
      assert(!EmptyEntity.streaming)
    }
  }

  "StringEntity" should {

    "valid instance" in {
      val instance = StringEntity("body")
      assert(instance.contentLength == 4)
      assert(!EmptyEntity.chunked)
      assert(EmptyEntity.repeatable)
      assert(!EmptyEntity.streaming)
    }
  }

  "ByteArrayEntity" should {

    "valid instance" in {
      val instance = ByteArrayEntity("body".getBytes)
      assert(instance.contentLength == 4)
      assert(!EmptyEntity.chunked)
      assert(EmptyEntity.repeatable)
      assert(!EmptyEntity.streaming)
    }
  }

  "Eq[Entity]" should {

    "equal instance" in {
      val body        = "body".getBytes
      val instanceOne = ByteArrayEntity(body)
      val instanceTwo = ByteArrayEntity(body)
      assert(instanceOne == instanceTwo)
    }
  }
} 
Example 173
Source File: PrintableSpec.scala    From tutorial-cats   with MIT License 5 votes vote down vote up
package example

import example.fixtures.GatoFixture
import org.scalatest.wordspec.AnyWordSpec
import org.scalatest.matchers.must.Matchers

class PrintableSpec extends AnyWordSpec with Matchers with GatoFixture {

  "Printable" must {

    "handle Int" in {
      // TODO 02: Define the mandatory type class to make this work
      import PrintableInstances._
      //format(123) must be("value=123")
      fail("WIP")
    }

    "handle String" in {
      // TODO 02: Define the mandatory type class to make this work
      import PrintableInstances._
      //format("a") must be("value=a")
      fail("WIP")
    }

    "allow a printable" in {
      // TODO 02: Implement the printable syntax to make this work
      import PrintableInstances._
      import PrintableSyntax._

      //michin.format must be("name=michin, age=3, color=black")
      fail("WIP")
    }
  }

} 
Example 174
Source File: ExampleTest.scala    From scala-steward   with Apache License 2.0 5 votes vote down vote up
package org.scalasteward.core.data

import org.scalasteward.core.TestSyntax._
import org.scalasteward.core.data.Update.Single
import org.scalasteward.core.edit.UpdateHeuristicTest.UpdateOps
import org.scalasteward.core.util.Nel
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ExampleTest extends AnyWordSpec with Matchers {
  "Good examples of dependency definitions".which {
    "will be identified by scala-steward without any problems are".that {
      val goodExample1 = """val scalajsJqueryVersion = "0.9.3""""
      s"$goodExample1" in {
        val expectedResult = Some("""val scalajsJqueryVersion = "0.9.4"""")
        Single("be.doeraene" % "scalajs-jquery" % "0.9.3", Nel.of("0.9.4"))
          .replaceVersionIn(goodExample1)
          ._1 shouldBe expectedResult
      }

      val goodExample2 = """val SCALAJSJQUERYVERSION = "0.9.3""""
      s"$goodExample2" in {
        val expectedResult = Some("""val SCALAJSJQUERYVERSION = "0.9.4"""")
        Single("be.doeraene" % "scalajs-jquery" % "0.9.3", Nel.of("0.9.4"))
          .replaceVersionIn(goodExample2)
          ._1 shouldBe expectedResult
      }

      val goodExample3 = """val scalajsjquery = "0.9.3""""
      s"$goodExample3" in {
        val expectedResult = Some("""val scalajsjquery = "0.9.4"""")
        Single("be.doeraene" % "scalajs-jquery" % "0.9.3", Nel.of("0.9.4"))
          .replaceVersionIn(goodExample3)
          ._1 shouldBe expectedResult
      }

      val goodExample4 = """addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.24")"""
      s"$goodExample4" in {
        val expectedResult = Some("""addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.25")""")
        Single("org.scala-js" % "sbt-scalajs" % "0.6.24", Nel.of("0.6.25"))
          .replaceVersionIn(goodExample4)
          ._1 shouldBe expectedResult
      }

      val goodExample5 = """"be.doeraene" %% "scalajs-jquery"  % "0.9.3""""
      s"$goodExample5" in {
        val expectedResult = Some(""""be.doeraene" %% "scalajs-jquery"  % "0.9.4"""")
        Single("be.doeraene" % "scalajs-jquery" % "0.9.3", Nel.of("0.9.4"))
          .replaceVersionIn(goodExample5)
          ._1 shouldBe expectedResult
      }

      val goodExample6 = """val `scalajs-jquery-version` = "0.9.3""""
      s"$goodExample6" in {
        val expectedResult = Some("""val `scalajs-jquery-version` = "0.9.4"""")
        Single("be.doeraene" % "scalajs-jquery" % "0.9.3", Nel.of("0.9.4"))
          .replaceVersionIn(goodExample6)
          ._1 shouldBe expectedResult
      }
    }
  }

  "Bad examples of dependency definitions".which {
    "won't be identified by scala-steward are".that {
      val badExample1 =
        """val scalajsJqueryVersion =
          |  "0.9.3"""".stripMargin
      s"$badExample1" in {
        val expectedResult = None
        Single("be.doeraene" % "scalajs-jquery" % "0.9.3", Nel.of("0.9.4"))
          .replaceVersionIn(badExample1)
          ._1 shouldBe expectedResult
      }

      val badExample2 =
        """val scalajsJqueryVersion = "0.9.3" // val scalajsJqueryVersion = "0.9.3""""
      s"$badExample2" in {
        val expectedResult =
          Some("""val scalajsJqueryVersion = "0.9.3" // val scalajsJqueryVersion = "0.9.4"""")
        Single("be.doeraene" % "scalajs-jquery" % "0.9.3", Nel.of("0.9.4"))
          .replaceVersionIn(badExample2)
          ._1 shouldBe expectedResult
      }
    }
  }
} 
Example 175
Source File: SLF4JSpec.scala    From scribe   with MIT License 5 votes vote down vote up
package spec

import java.util.TimeZone

import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import org.slf4j.{LoggerFactory, MDC}
import scribe.handler.LogHandler
import scribe.output.LogOutput
import scribe.util.Time
import scribe.writer.Writer
import scribe.{Level, LogRecord, Logger}

class SLF4JSpec extends AnyWordSpec with Matchers {
  TimeZone.setDefault(TimeZone.getTimeZone("UTC"))

  private var logs: List[LogRecord[_]] = Nil
  private var logOutput: List[String] = Nil
  private val recordHolder = LogHandler.default.withMinimumLevel(Level.Info).withWriter(new Writer {
    override def write[M](record: LogRecord[M], output: LogOutput): Unit = {
      logs = record :: logs
      logOutput = output.plainText :: logOutput
    }
  })

  "SLF4J" should {
    "set the time to an arbitrary value" in {
      Time.function = () => 1542376191920L
    }
    "remove existing handlers from Root" in {
      Logger.root.clearHandlers().replace()
    }
    "add a testing handler" in {
      Logger.root.withHandler(recordHolder).replace()
    }
    "verify not records are in the RecordHolder" in {
      logs.isEmpty should be(true)
    }
    "log to Scribe" in {
      val logger = LoggerFactory.getLogger(getClass)
      logger.info("Hello World!")
    }
    "verify Scribe received the record" in {
      logs.size should be(1)
      val r = logs.head
      r.level should be(Level.Info)
      r.message.plainText should be("Hello World!")
      r.className should be("spec.SLF4JSpec")
      logs = Nil
    }
    "verify Scribe wrote value" in {
      logOutput.size should be(1)
      val s = logOutput.head
      s should be("2018.11.16 13:49:51 [INFO] spec.SLF4JSpec - Hello World!")
    }
    "use MDC" in {
      MDC.put("name", "John Doe")
      val logger = LoggerFactory.getLogger(getClass)
      logger.info("A generic name")
      logOutput.head should be("2018.11.16 13:49:51 [INFO] spec.SLF4JSpec - A generic name (name: John Doe)")
    }
    "clear MDC" in {
      MDC.clear()
      val logger = LoggerFactory.getLogger(getClass)
      logger.info("MDC cleared")
      logOutput.head should be("2018.11.16 13:49:51 [INFO] spec.SLF4JSpec - MDC cleared")
    }
    "make sure logging nulls doesn't error" in {
      val logger = LoggerFactory.getLogger(getClass)
      logger.error(null)
      logs.length should be(3)
      logOutput.head should be("2018.11.16 13:49:51 [ERROR] spec.SLF4JSpec - null")
    }
  }
} 
Example 176
Source File: SLF4JSpec.scala    From scribe   with MIT License 5 votes vote down vote up
package spec

import java.util.TimeZone

import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import org.slf4j.{LoggerFactory, MDC}
import scribe.handler.LogHandler
import scribe.output.LogOutput
import scribe.util.Time
import scribe.writer.Writer
import scribe.{Level, LogRecord, Logger}

class SLF4JSpec extends AnyWordSpec with Matchers {
  TimeZone.setDefault(TimeZone.getTimeZone("UTC"))

  private var logs: List[LogRecord[_]] = Nil
  private var logOutput: List[String] = Nil
  private val recordHolder = LogHandler.default.withMinimumLevel(Level.Info).withWriter(new Writer {
    override def write[M](record: LogRecord[M], output: LogOutput): Unit = {
      logs = record :: logs
      logOutput = output.plainText :: logOutput
    }
  })

  "SLF4J" should {
    "set the time to an arbitrary value" in {
      Time.function = () => 1542376191920L
    }
    "remove existing handlers from Root" in {
      Logger.root.clearHandlers().replace()
    }
    "add a testing handler" in {
      Logger.root.withHandler(recordHolder).replace()
    }
    "verify not records are in the RecordHolder" in {
      logs.isEmpty should be(true)
    }
    "log to Scribe" in {
      val logger = LoggerFactory.getLogger(getClass)
      logger.info("Hello World!")
    }
    "verify Scribe received the record" in {
      logs.size should be(1)
      val r = logs.head
      r.level should be(Level.Info)
      r.message.plainText should be("Hello World!")
      r.className should be("spec.SLF4JSpec")
      logs = Nil
    }
    "verify Scribe wrote value" in {
      logOutput.size should be(1)
      val s = logOutput.head
      s should be("2018.11.16 13:49:51 [INFO] spec.SLF4JSpec - Hello World!")
    }
    "use MDC" in {
      MDC.put("name", "John Doe")
      val logger = LoggerFactory.getLogger(getClass)
      logger.info("A generic name")
      logOutput.head should be("2018.11.16 13:49:51 [INFO] spec.SLF4JSpec - A generic name (name: John Doe)")
    }
    "clear MDC" in {
      MDC.clear()
      val logger = LoggerFactory.getLogger(getClass)
      logger.info("MDC cleared")
      logOutput.head should be("2018.11.16 13:49:51 [INFO] spec.SLF4JSpec - MDC cleared")
    }
    "make sure logging nulls doesn't error" in {
      val logger = LoggerFactory.getLogger(getClass)
      logger.error(null)
      logs.length should be(3)
      logOutput.head should be("2018.11.16 13:49:51 [ERROR] spec.SLF4JSpec - null")
    }
  }
} 
Example 177
Source File: FutureTracingSpec.scala    From scribe   with MIT License 5 votes vote down vote up
package spec

import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.concurrent.duration.Duration
import scala.concurrent.Await

class FutureTracingSpec extends AnyWordSpec with Matchers {
  "Future tracing" when {
    "using scribe implicits" should {
      "future trace back" in {
        val exception = intercept[RuntimeException](Await.result(FutureTesting.position(), Duration.Inf))
        val trace = exception.getStackTrace
        trace(0).getFileName should be("FutureTesting.scala")
        trace(0).getLineNumber should be(27)

        trace(1).getFileName should be("FutureTesting.scala")
        trace(1).getMethodName should be("three")
        trace(1).getLineNumber should be(26)

        trace(2).getFileName should be("FutureTesting.scala")
        trace(2).getMethodName should be("two")
        trace(2).getLineNumber should be(20)

        trace(3).getFileName should be("FutureTesting.scala")
        trace(3).getMethodName should be("one")
        trace(3).getLineNumber should be(14)

        trace(4).getFileName should be("FutureTesting.scala")
        trace(4).getMethodName should be("position")
        trace(4).getLineNumber should be(9)
      }
      "async trace back" in {
        val exception = intercept[RuntimeException](Await.result(AsyncTesting.position(), Duration.Inf))
        val trace = exception.getStackTrace

        var i = 0

        trace(i).getFileName should be("AsyncTesting.scala")
        trace(i).getLineNumber should be(34)
        i += 1

        trace(i).getFileName should be("AsyncTesting.scala")
        trace(i).getMethodName should be("three")
        trace(i).getLineNumber should be(32)
        i += 1

        if (trace(i).getMethodName == "three") {
          trace(i).getFileName should be("AsyncTesting.scala")
          trace(i).getMethodName should be("three")
          trace(i).getLineNumber should be(33)
          i += 1
        }

        trace(i).getFileName should be("AsyncTesting.scala")
        trace(i).getMethodName should be("two")
        trace(i).getLineNumber should be(25)
        i += 1

        trace(i).getFileName should be("AsyncTesting.scala")
        trace(i).getMethodName should be("one")
        trace(i).getLineNumber should be(17)
        i += 1

        trace(i).getFileName should be("AsyncTesting.scala")
        trace(i).getMethodName should be("position")
        trace(i).getLineNumber should be(10)
      }
    }
  }
} 
Example 178
Source File: ImplicitLoggingSpec.scala    From scribe   with MIT License 5 votes vote down vote up
package specs

import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ImplicitLoggingSpec extends AnyWordSpec with Matchers {
  "implicit logger" should {
    "config properly" in {
      ImplicitLoggingTestObject.initialize()
    }
    "properly log a simple message" in {
      val line = Some(19)

      ImplicitLoggingTestObject.doSomething()
      ImplicitLoggingTestObject.testingModifier.records.length should be(1)
      val record = ImplicitLoggingTestObject.testingModifier.records.head
      record.className should be("specs.ImplicitLoggingTestObject")
      record.methodName should be(Some("doSomething"))
      record.line should be(line)
    }
  }
} 
Example 179
Source File: AbbreviatorSpec.scala    From scribe   with MIT License 5 votes vote down vote up
package specs

import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import scribe.util.Abbreviator

class AbbreviatorSpec extends AnyWordSpec with Matchers {
  "Abbreviator" should {
    val className1 = "mainPackage.sub.sample.Bar"
    val className2 = "mainPackage.sub.sample.FooBar"

    "properly abbreviate 26 length" in {
      val s = Abbreviator(className1, 26)
      s should be(className1)
    }
    "properly abbreviate 16 length" in {
      val s = Abbreviator(className1, 16)
      s should be("m.sub.sample.Bar")
    }
    "properly abbreviate 15 length" in {
      val s = Abbreviator(className1, 15)
      s should be("m.s.sample.Bar")
    }
    "properly abbreviate 10 length" in {
      val s = Abbreviator(className1, 10)
      s should be("m.s.s.Bar")
    }
    "properly abbreviate 5 length" in {
      val s = Abbreviator(className1, 5)
      s should be("Bar")
    }
    "properly abbreviate 0 length" in {
      val s = Abbreviator(className1, 0)
      s should be("Bar")
    }
    "properly abbreviate longer class name at 5" in {
      val s = Abbreviator(className2, 5, abbreviateName = true)
      s should be("Fo...")
    }
  }
} 
Example 180
Source File: ProjectionStateSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.projection

import com.lightbend.lagom.internal.projection.ProjectionRegistryActor.ProjectionName
import com.lightbend.lagom.internal.projection.ProjectionRegistryActor.WorkerCoordinates
import com.lightbend.lagom.projection.Projection
import com.lightbend.lagom.projection.Started
import com.lightbend.lagom.projection.State
import com.lightbend.lagom.projection.Status
import com.lightbend.lagom.projection.Stopped
import com.lightbend.lagom.projection.Worker
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec


class ProjectionStateSpec extends AnyWordSpec with Matchers {
  private val prj001   = "prj001"
  private val prj002   = "prj002"
  val p1w1             = prj001 + "-workers-1"
  val p1w2             = prj001 + "-workers-2"
  val p1w3             = prj001 + "-workers-3"
  val p2w1             = s"$prj002-workers-1"
  val coordinates001_1 = WorkerCoordinates(prj001, p1w1)
  val coordinates001_2 = WorkerCoordinates(prj001, p1w2)
  val coordinates001_3 = WorkerCoordinates(prj001, p1w3)
  val coordinates002_1 = WorkerCoordinates(prj002, p2w1)

  val nameIndex: Map[ProjectionName, Set[WorkerCoordinates]] = Map(
    prj001 -> Set(coordinates001_1, coordinates001_2, coordinates001_3),
    prj002 -> Set(coordinates002_1)
  )

  val requestedStatus: Map[WorkerCoordinates, Status] = Map(
    coordinates001_1 -> Stopped,
    coordinates001_2 -> Started,
    coordinates001_3 -> Stopped,
    coordinates002_1 -> Started
  )
  val observedStatus: Map[WorkerCoordinates, Status] = Map(
    coordinates001_1 -> Stopped,
    coordinates001_2 -> Stopped,
    coordinates001_3 -> Started,
    coordinates002_1 -> Started
  )

  def findProjection(state: State)(projectionName: String): Option[Projection] =
    state.projections.find(_.name == projectionName)

  def findWorker(state: State)(workerKey: String): Option[Worker] =
    state.projections.flatMap(_.workers).find(_.key == workerKey)

  "ProjectionStateSpec" should {
    "be build from a replicatedData" in {
      val state = State.fromReplicatedData(nameIndex, requestedStatus, observedStatus, Started, Stopped)
      state.projections.size should equal(2)
      state.projections.flatMap(_.workers).size should equal(4)
      state.projections.flatMap(_.workers).find(_.key == coordinates001_3.asKey) shouldBe Some(
        Worker(p1w3, coordinates001_3.asKey, Stopped, Started)
      )
    }

    "find projection by name" in {
      val state = State.fromReplicatedData(nameIndex, requestedStatus, observedStatus, Started, Stopped)
      findProjection(state)(prj001) should not be None
    }

    "find worker by key" in {
      val state       = State.fromReplicatedData(nameIndex, requestedStatus, observedStatus, Started, Stopped)
      val maybeWorker = findWorker(state)("prj001-prj001-workers-3")
      maybeWorker shouldBe Some(
        Worker(p1w3, coordinates001_3.asKey, Stopped, Started)
      )
    }

    "build from default values when workers in nameIndex don't have request or observed values" in {
      val newProjectionName = "new-projection"
      val newWorkerName     = "new-worker-001"
      val newCoordinates    = WorkerCoordinates(newProjectionName, newWorkerName)
      val richIndex = nameIndex ++ Map(
        newProjectionName -> Set(newCoordinates)
      )

      val defaultRequested = Stopped
      val defaultObserved  = Started

      val state =
        State.fromReplicatedData(richIndex, requestedStatus, observedStatus, defaultRequested, defaultObserved)
      val maybeWorker = findWorker(state)(newCoordinates.asKey)
      maybeWorker shouldBe Some(
        Worker(newWorkerName, newCoordinates.asKey, defaultRequested, defaultObserved)
      )
    }
  }
} 
Example 181
Source File: ScalaSupportSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.javadsl.api

import org.scalatest.Inside
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ScalaSupportSpec extends AnyWordSpec with Matchers with Inside {
  "scala support" should {
    "resolve a function" in {
      val method: ScalaServiceSupport.ScalaMethodCall[String] = testMethod _
      method.method.getDeclaringClass should ===(this.getClass)
      method.method.getName should ===("testMethod")
    }
  }

  def testMethod(s: String): String = s
} 
Example 182
Source File: ConfigurationServiceLocatorSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.javadsl.api

import java.net.URI
import java.util.concurrent.TimeUnit

import com.typesafe.config.ConfigFactory

import scala.compat.java8.OptionConverters._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ConfigurationServiceLocatorSpec extends AnyWordSpec with Matchers {
  val serviceLocator = new ConfigurationServiceLocator(
    ConfigFactory.parseString(
      """
        |lagom.services {
        |  foo = "http://localhost:10001"
        |  bar = "http://localhost:10002"
        |}
    """.stripMargin
    )
  )

  def locate(serviceName: String) =
    serviceLocator.locate(serviceName).toCompletableFuture.get(10, TimeUnit.SECONDS).asScala

  "ConfigurationServiceLocator" should {
    "return a found service" in {
      locate("foo") should contain(URI.create("http://localhost:10001"))
      locate("bar") should contain(URI.create("http://localhost:10002"))
    }
    "return none for not found service" in {
      locate("none") shouldBe None
    }
  }
} 
Example 183
Source File: TransportExceptionSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.javadsl.api.transport

import java.util
import java.util.Optional

import com.lightbend.lagom.javadsl.api.deser.DeserializationException
import com.lightbend.lagom.javadsl.api.deser.SerializationException

import scala.collection.immutable
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec


class TransportExceptionSpec extends AnyWordSpec with Matchers {
  val protocolTextPlain = new MessageProtocol(Optional.of("text/plain"), Optional.of("utf-8"), Optional.empty[String])
  val protocolJson      = new MessageProtocol(Optional.of("application/json"), Optional.of("utf-8"), Optional.empty[String])
  val protocolHtml      = new MessageProtocol(Optional.of("text/html"), Optional.of("utf-8"), Optional.empty[String])

  val supportedExceptions: immutable.Seq[TransportException] = List(
    new DeserializationException("some msg - DeserializationException"),
    new BadRequest("some msg - BadRequest"),
    new Forbidden("some msg - Forbidden"),
    new PolicyViolation("some msg - PolicyViolation"),
    new NotFound("some msg - NotFound"),
    new NotAcceptable(util.Arrays.asList(protocolJson, protocolTextPlain), protocolHtml),
    new PayloadTooLarge("some msg - PayloadTooLarge"),
    new UnsupportedMediaType(protocolTextPlain, protocolJson),
    new SerializationException("some msg - SerializationException")
  )

  "Lagom-provided TransportExceptions" should {
    supportedExceptions.foreach { ex =>
      s"be buildable from code and message (${ex.getClass.getName})" in {
        val reconstructed = TransportException.fromCodeAndMessage(ex.errorCode(), ex.exceptionMessage())
        reconstructed.getClass.getName should ===(ex.getClass.getName)
        reconstructed.exceptionMessage() should ===(ex.exceptionMessage())
      }
    }

    // TODO: implement roundtrip de/ser tests like in com.lightbend.lagom.scaladsl.api.ExceptionsSpec
  }
} 
Example 184
Source File: ConfigurationServiceLocatorSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.javadsl.client

import java.net.URI
import java.util.concurrent.CompletionStage
import java.util.concurrent.TimeUnit
import java.util.function.Supplier
import com.typesafe.config.ConfigFactory

import scala.compat.java8.OptionConverters._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ConfigurationServiceLocatorSpec extends AnyWordSpec with Matchers {
  val serviceLocator = new ConfigurationServiceLocator(
    ConfigFactory.parseString(
      """
        |lagom.services {
        |  foo = "http://localhost:10001"
        |  bar = "http://localhost:10002"
        |}
    """.stripMargin
    ),
    new CircuitBreakersPanel {
      override def withCircuitBreaker[T](id: String, body: Supplier[CompletionStage[T]]): CompletionStage[T] =
        body.get()
    }
  )

  def locate(serviceName: String) =
    serviceLocator.locate(serviceName).toCompletableFuture.get(10, TimeUnit.SECONDS).asScala

  "ConfigurationServiceLocator" should {
    "return a found service" in {
      locate("foo") should contain(URI.create("http://localhost:10001"))
      locate("bar") should contain(URI.create("http://localhost:10002"))
    }
    "return none for not found service" in {
      locate("none") shouldBe None
    }
  }
} 
Example 185
Source File: AdditionalRoutersSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.scaladsl.it.routers

import akka.NotUsed
import com.lightbend.lagom.scaladsl.api.ServiceLocator
import com.lightbend.lagom.scaladsl.server.LagomApplication
import com.lightbend.lagom.scaladsl.server.LagomApplicationContext
import com.lightbend.lagom.scaladsl.server.LagomServer
import com.lightbend.lagom.scaladsl.server.LocalServiceLocator
import com.lightbend.lagom.scaladsl.testkit.ServiceTest
import com.lightbend.lagom.scaladsl.testkit.ServiceTest.TestServer
import org.scalatest.concurrent.ScalaFutures
import play.api.http.DefaultWriteables
import play.api.http.HeaderNames
import play.api.libs.ws.WSClient
import play.api.libs.ws.ahc.AhcWSComponents
import play.api.mvc
import play.api.mvc._
import play.api.routing.SimpleRouterImpl
import play.api.test.FakeHeaders
import play.api.test.FakeRequest
import play.api.test.Helpers
import play.core.j.JavaRouterAdapter
import play.api.test.Helpers._

import scala.concurrent.ExecutionContext
import scala.concurrent.Future
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class AdditionalRoutersSpec extends AnyWordSpec with Matchers with ScalaFutures {
  "A LagomServer " should {
    "be extensible with a Play Router" in withServer { server =>
      val request = FakeRequest(GET, "/hello/")
      val result  = Helpers.route(server.application.application, request).get.futureValue

      result.header.status shouldBe OK
      val body = result.body.consumeData(server.materializer).futureValue.utf8String
      body shouldBe "hello"
    }
  }

  def withServer(block: TestServer[TestApp] => Unit): Unit = {
    ServiceTest.withServer(ServiceTest.defaultSetup.withCassandra(false).withCluster(false)) { ctx =>
      new TestApp(ctx)
    } { server =>
      block(server)
    }
  }

  class TestApp(context: LagomApplicationContext)
      extends LagomApplication(context)
      with AhcWSComponents
      with LocalServiceLocator {
    override def lagomServer: LagomServer =
      serverFor[AdditionalRoutersService](new AdditionalRoutersServiceImpl)
        .additionalRouter(FixedResponseRouter("hello").withPrefix("/hello"))
  }
}


object FixedResponseRouter {
  def apply(msg: String) =
    new SimpleRouterImpl({
      case _ =>
        new Action[Unit] {
          override def parser: BodyParser[Unit] = mvc.BodyParsers.utils.empty

          override def apply(request: Request[Unit]): Future[Result] =
            Future.successful(Results.Ok(msg))

          override def executionContext: ExecutionContext =
            scala.concurrent.ExecutionContext.global
        }
    })
} 
Example 186
Source File: MessageSerializerSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.scaladsl.api.deser

import akka.util.ByteString
import com.lightbend.lagom.scaladsl.api.deser.MessageSerializer._
import com.lightbend.lagom.scaladsl.api.transport.DeserializationException
import com.lightbend.lagom.scaladsl.api.transport.MessageProtocol
import play.api.libs.json._
import scala.collection.immutable.Seq
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class MessageSerializerSpec extends AnyWordSpec with Matchers {
  case class Dummy(prop: Option[String])

  "ByteString-to-PlayJson (via JsValueMessageSerializer)" should {
    "deserialize empty ByteString as JSON null" in {
      val deserializer = JsValueMessageSerializer.deserializer(MessageProtocol.empty)
      deserializer.deserialize(ByteString.empty) shouldBe JsNull
    }
  }

  implicit def optionFormat[T: Format]: Format[Option[T]] = new Format[Option[T]] {
    override def reads(json: JsValue): JsResult[Option[T]] = json.validateOpt[T]
    override def writes(o: Option[T]): JsValue = o match {
      case Some(t) => implicitly[Writes[T]].writes(t)
      case None    => JsNull
    }
  }

  "PlayJson-to-RequestPayload formatters" should {
    implicit val format: Format[Dummy] = Json.format

    "fail when converting JSNull into T." in {
      intercept[JsResultException] {
        JsNull.as[Dummy]
      }
    }

    "convert JS null to None by default" in {
      val dummy = JsNull.as[Option[Dummy]]
      dummy shouldBe None
    }
  }

  "ByteString-to-RequestPayload (for JSON payloads, using jsValueFormatMessageSerializer)" should {
    "deserialize empty ByteString's to Option[T] as None" in {
      val serializer = jsValueFormatMessageSerializer(JsValueMessageSerializer, optionFormat[String])
      val out        = serializer.deserializer(MessageProtocol.empty).deserialize(ByteString.empty)
      out shouldBe None
    }

    "fail to deserialize empty ByteString to Dummy(prop: Option[T])" in {
      val format: Format[Dummy] = Json.format
      val serializer            = jsValueFormatMessageSerializer(JsValueMessageSerializer, format)

      intercept[DeserializationException] {
        serializer.deserializer(MessageProtocol.empty).deserialize(ByteString.empty)
      }
    }
  }

  "ByteString-to-ByteString" should {
    "serialize any request of type ByteString to the same ByteSting" in {
      val serializer = NoopMessageSerializer.serializerForRequest
      val out        = serializer.serialize(ByteString("sample string"))
      out shouldBe ByteString("sample string")
    }

    "serialize any response of type ByteString to the same ByteSting" in {
      val serializer = NoopMessageSerializer.serializerForResponse(Seq(MessageProtocol.empty))
      val out        = serializer.serialize(ByteString("sample string"))
      out shouldBe ByteString("sample string")
    }

    "deserialize any ByteString's to the same ByteSting" in {
      val deserializer = NoopMessageSerializer.deserializer(MessageProtocol.empty)
      val out          = deserializer.deserialize(ByteString("sample string"))
      out shouldBe ByteString("sample string")
    }
  }
} 
Example 187
Source File: ServiceAclResolverSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.scaladsl.server

import akka.NotUsed
import akka.stream.scaladsl.Source
import com.lightbend.lagom.internal.scaladsl.client.ScaladslServiceResolver
import com.lightbend.lagom.scaladsl.api.Service
import com.lightbend.lagom.scaladsl.api.ServiceAcl
import com.lightbend.lagom.scaladsl.api.ServiceCall
import com.lightbend.lagom.scaladsl.api.deser.DefaultExceptionSerializer
import com.lightbend.lagom.scaladsl.api.transport.Method

import scala.concurrent.Future
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ServiceAclResolverSpec extends AnyWordSpec with Matchers {
  class SomeService extends Service {
    private def echo[A]                                                                 = ServiceCall[A, A](Future.successful)
    def callString: ServiceCall[String, String]                                         = echo
    def callStreamed: ServiceCall[Source[String, NotUsed], Source[String, NotUsed]]     = echo
    def callNotUsed: ServiceCall[NotUsed, NotUsed]                                      = echo
    def restCallString: ServiceCall[String, String]                                     = echo
    def restCallStreamed: ServiceCall[Source[String, NotUsed], Source[String, NotUsed]] = echo
    def restCallNotUsed: ServiceCall[NotUsed, NotUsed]                                  = echo
    def withAutoAclTrue: ServiceCall[String, String]                                    = echo
    def withAutoAclFalse: ServiceCall[String, String]                                   = echo

    override def descriptor = {
      import Service._

      named("some-service").withCalls(
        call(callString),
        call(callStreamed),
        call(callNotUsed),
        restCall(Method.PUT, "/restcallstring", restCallString),
        restCall(Method.PUT, "/restcallstreamed", restCallStreamed),
        restCall(Method.PUT, "/restcallnotused", restCallNotUsed),
        call(withAutoAclTrue).withAutoAcl(true),
        call(withAutoAclFalse).withAutoAcl(false)
      )
    }
  }

  val resolver = new ScaladslServiceResolver(DefaultExceptionSerializer.Unresolved)

  "ScaladslServiceResolver" when {
    "when auto acl is true" should {
      val acls = resolver.resolve(new SomeService().descriptor.withAutoAcl(true)).acls

      "default to POST for service calls with used request messages" in {
        acls should contain(ServiceAcl.forMethodAndPathRegex(Method.POST, "\\Q/callString\\E"))
      }

      "default to GET for streamed service calls" in {
        acls should contain(ServiceAcl.forMethodAndPathRegex(Method.GET, "\\Q/callStreamed\\E"))
      }

      "default to GET for service calls with not used request messages" in {
        acls should contain(ServiceAcl.forMethodAndPathRegex(Method.GET, "\\Q/callNotUsed\\E"))
      }

      "use the specified method and path for rest calls" in {
        acls should contain(ServiceAcl.forMethodAndPathRegex(Method.PUT, "\\Q/restcallstring\\E"))
      }

      "use the specified method for rest calls when the request is streamed" in {
        acls should contain(ServiceAcl.forMethodAndPathRegex(Method.PUT, "\\Q/restcallstreamed\\E"))
      }

      "use the specified method and path for rest calls even when the request is unused" in {
        acls should contain(ServiceAcl.forMethodAndPathRegex(Method.PUT, "\\Q/restcallnotused\\E"))
      }

      "create an acl when an individual method has auto acl set to true" in {
        acls should contain(ServiceAcl.forMethodAndPathRegex(Method.POST, "\\Q/withAutoAclTrue\\E"))
      }

      "not create an acl when an individual method has auto acl set to false" in {
        acls should not contain ServiceAcl.forMethodAndPathRegex(Method.POST, "\\Q/withAutoAclFalse\\E")
      }

      "generate the right number of acls" in {
        acls should have size 7
      }
    }

    "auto acl is false" should {
      val acls = resolver.resolve(new SomeService().descriptor.withAutoAcl(false)).acls

      "create an acl when an individual method has auto acl set to true" in {
        acls should contain only ServiceAcl.forMethodAndPathRegex(Method.POST, "\\Q/withAutoAclTrue\\E")
      }
    }
  }
} 
Example 188
Source File: ServiceLocatorHolderSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.persistence.cassandra

import akka.actor.ActorSystem
import akka.testkit.TestKit
import org.scalatest.BeforeAndAfterAll

import scala.concurrent.Await
import scala.concurrent.duration._
import org.scalatest.matchers.must.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ServiceLocatorHolderSpec extends AnyWordSpec with Matchers with BeforeAndAfterAll {
  val system = ActorSystem("test")

  protected override def afterAll(): Unit = {
    TestKit.shutdownActorSystem(actorSystem = system, verifySystemShutdown = true)
  }

  "ServiceLocatorHolder" should {
    "timeout when no service locator is found" in {
      val eventually = ServiceLocatorHolder(system).serviceLocatorEventually
      assertThrows[NoServiceLocatorException](
        Await.result(eventually, ServiceLocatorHolder.TIMEOUT + 2.seconds)
      )
    }
  }
} 
Example 189
Source File: ServiceLocatorSessionProviderSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.persistence.cassandra

import java.net.InetSocketAddress
import java.net.URI

import akka.actor.ActorSystem
import akka.testkit.TestKit
import com.typesafe.config.Config
import com.typesafe.config.ConfigFactory
import org.scalatest.BeforeAndAfterAll

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.concurrent.Await
import scala.concurrent.Future
import org.scalatest.matchers.must.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ServiceLocatorSessionProviderSpec extends AnyWordSpec with Matchers with BeforeAndAfterAll {
  val system         = ActorSystem("test")
  val config: Config = ConfigFactory.load()
  val uri            = new URI("http://localhost:8080")

  protected override def afterAll(): Unit = {
    TestKit.shutdownActorSystem(actorSystem = system, verifySystemShutdown = true)
  }

  val locator = new ServiceLocatorAdapter {
    override def locateAll(name: String): Future[List[URI]] = {
      name match {
        case "existing" => Future.successful(List(uri))
        case "absent"   => Future.successful(Nil)
      }
    }
  }

  val providerConfig: Config = config.getConfig("lagom.persistence.read-side.cassandra")
  val provider               = new ServiceLocatorSessionProvider(system, providerConfig)
  ServiceLocatorHolder(system).setServiceLocator(locator)

  "ServiceLocatorSessionProvider" should {
    "Get the address when the contact points exist" in {
      val future = provider.lookupContactPoints("existing")

      Await.result(future, 3.seconds) mustBe Seq(new InetSocketAddress(uri.getHost, uri.getPort))
    }

    "Fail the future when the contact points do not exist" in {
      val future = provider.lookupContactPoints("absent")

      intercept[NoContactPointsException] {
        Await.result(future, 3.seconds)
      }
    }
  }
} 
Example 190
Source File: ServiceTestSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.javadsl.testkit

import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.Paths

import javax.inject.Inject
import akka.japi.function.Procedure
import com.google.inject.AbstractModule
import com.lightbend.lagom.javadsl.api.Descriptor
import com.lightbend.lagom.javadsl.api.Service
import com.lightbend.lagom.javadsl.persistence.PersistentEntityRegistry
import com.lightbend.lagom.javadsl.server.ServiceGuiceSupport
import com.lightbend.lagom.javadsl.testkit.ServiceTest.Setup
import com.lightbend.lagom.javadsl.testkit.ServiceTest.TestServer
import play.inject.guice.GuiceApplicationBuilder

import scala.collection.JavaConverters._
import scala.compat.java8.FunctionConverters._
import scala.util.Properties
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ServiceTestSpec extends AnyWordSpec with Matchers {
  "ServiceTest" when {
    "started with Cassandra" should {
      "create a temporary directory" in {
        val temporaryFileCountBeforeRun = listTemporaryFiles().size

        withServer(ServiceTest.defaultSetup.withCassandra()) { _ =>
          val temporaryFilesDuringRun = listTemporaryFiles()

          temporaryFilesDuringRun should have size (temporaryFileCountBeforeRun + 1)
        }
      }
    }

    "stopped after starting" should {
      "remove its temporary directory" in {
        val temporaryFileCountBeforeRun = listTemporaryFiles().size

        withServer(ServiceTest.defaultSetup.withCassandra()) { _ =>
          ()
        }

        val temporaryFilesAfterRun = listTemporaryFiles()

        temporaryFilesAfterRun should have size temporaryFileCountBeforeRun
      }
    }

    "started with JDBC" should {
      "start successfully" in {
        withServer(ServiceTest.defaultSetup.withJdbc()) { _ =>
          ()
        }
      }
    }
  }

  def withServer(setup: Setup)(block: TestServer => Unit): Unit = {
    ServiceTest.withServer(setup.configureBuilder((registerService _).asJava), block(_))
  }

  def registerService(builder: GuiceApplicationBuilder): GuiceApplicationBuilder =
    builder.bindings(new TestServiceModule)

  def listTemporaryFiles(): Iterator[Path] = {
    val tmpDir = Paths.get(Properties.tmpDir)
    Files
      .newDirectoryStream(tmpDir, "ServiceTest_*")
      .iterator()
      .asScala
  }
}

trait TestService extends Service {
  import Service._

  final override def descriptor: Descriptor = named("test")
}

class TestServiceImpl @Inject() (persistentEntityRegistry: PersistentEntityRegistry) extends TestService

class TestServiceModule extends AbstractModule with ServiceGuiceSupport {
  override def configure(): Unit = bindService(classOf[TestService], classOf[TestServiceImpl])
} 
Example 191
Source File: ServiceTestSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.scaladsl.testkit

import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.Paths

import com.lightbend.lagom.scaladsl.api.Descriptor
import com.lightbend.lagom.scaladsl.api.Service
import com.lightbend.lagom.scaladsl.persistence.cassandra.CassandraPersistenceComponents
import com.lightbend.lagom.scaladsl.persistence.jdbc.JdbcPersistenceComponents
import com.lightbend.lagom.scaladsl.persistence.PersistenceComponents
import com.lightbend.lagom.scaladsl.persistence.PersistentEntityRegistry
import com.lightbend.lagom.scaladsl.playjson.EmptyJsonSerializerRegistry
import com.lightbend.lagom.scaladsl.playjson.JsonSerializerRegistry
import com.lightbend.lagom.scaladsl.server._
import play.api.db.HikariCPComponents
import play.api.libs.ws.ahc.AhcWSComponents

import scala.collection.JavaConverters._
import scala.util.Properties
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ServiceTestSpec extends AnyWordSpec with Matchers {
  "ServiceTest" when {
    "started with Cassandra" should {
      "create a temporary directory" in {
        val temporaryFileCountBeforeRun = listTemporaryFiles().size

        ServiceTest.withServer(ServiceTest.defaultSetup.withCassandra())(new CassandraTestApplication(_)) { _ =>
          val temporaryFilesDuringRun = listTemporaryFiles()

          temporaryFilesDuringRun should have size (temporaryFileCountBeforeRun + 1)
        }
      }
    }

    "stopped after starting" should {
      "remove its temporary directory" in {
        val temporaryFileCountBeforeRun = listTemporaryFiles().size

        ServiceTest.withServer(ServiceTest.defaultSetup.withCassandra())(new CassandraTestApplication(_)) { _ =>
          ()
        }

        val temporaryFilesAfterRun = listTemporaryFiles()

        temporaryFilesAfterRun should have size temporaryFileCountBeforeRun
      }
    }

    "started with JDBC" should {
      "start successfully" in {
        ServiceTest.withServer(ServiceTest.defaultSetup.withJdbc())(new JdbcTestApplication(_)) { _ =>
          ()
        }
      }
    }
  }

  def listTemporaryFiles(): Iterator[Path] = {
    val tmpDir = Paths.get(Properties.tmpDir)
    Files
      .newDirectoryStream(tmpDir, "ServiceTest_*")
      .iterator()
      .asScala
  }
}

trait TestService extends Service {
  import Service._

  final override def descriptor: Descriptor = named("test")
}

class TestServiceImpl(persistentEntityRegistry: PersistentEntityRegistry) extends TestService

class TestApplication(context: LagomApplicationContext)
    extends LagomApplication(context)
    with LocalServiceLocator
    with AhcWSComponents { self: PersistenceComponents =>

  override lazy val jsonSerializerRegistry: JsonSerializerRegistry = EmptyJsonSerializerRegistry

  override lazy val lagomServer: LagomServer = serverFor[TestService](new TestServiceImpl(persistentEntityRegistry))
}

class CassandraTestApplication(context: LagomApplicationContext)
    extends TestApplication(context)
    with CassandraPersistenceComponents

class JdbcTestApplication(context: LagomApplicationContext)
    extends TestApplication(context)
    with JdbcPersistenceComponents
    with HikariCPComponents 
Example 192
Source File: ReleaseTwoIntegrationSpec.scala    From self-assessment-api   with Apache License 2.0 5 votes vote down vote up
package support

import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import org.scalatestplus.play.guice.GuiceOneServerPerSuite
import play.api.Application
import play.api.http.{HeaderNames, MimeTypes, Status}
import play.api.inject.guice.GuiceApplicationBuilder
import support.functional.FunctionalSyntax
import support.wiremock.WireMockSupport

trait ReleaseTwoIntegrationSpec extends AnyWordSpec
  with GuiceOneServerPerSuite
  with WireMockSupport
  with Matchers
  with Status
  with HeaderNames
  with MimeTypes
  with FakeApplicationConfig
  with FunctionalSyntax {

  override implicit lazy val app: Application = new GuiceApplicationBuilder()
    .configure(fakeApplicationConfig + ("feature-switch.release-2.enabled" -> true))
    .build()

} 
Example 193
Source File: IntegrationSpec.scala    From self-assessment-api   with Apache License 2.0 5 votes vote down vote up
package support

import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import org.scalatestplus.play.guice.GuiceOneServerPerSuite
import play.api.Application
import play.api.http.{HeaderNames, MimeTypes, Status}
import play.api.inject.guice.GuiceApplicationBuilder
import support.functional.FunctionalSyntax
import support.wiremock.WireMockSupport

trait IntegrationSpec extends AnyWordSpec
  with GuiceOneServerPerSuite
  with WireMockSupport
  with Matchers
  with Status
  with HeaderNames
  with MimeTypes
  with FakeApplicationConfig
  with FunctionalSyntax {

  override implicit lazy val app: Application = new GuiceApplicationBuilder()
    .configure(fakeApplicationConfig)
    .build()

} 
Example 194
Source File: ScalarLeafsSpec.scala    From sangria   with Apache License 2.0 5 votes vote down vote up
package sangria.validation.rules

import sangria.util.{Pos, ValidationSupport}
import org.scalatest.wordspec.AnyWordSpec

class ScalarLeafsSpec extends AnyWordSpec with ValidationSupport {

  override val defaultRule = Some(new ScalarLeafs)

  "Validate: Scalar leafs" should {
    "valid scalar selection" in expectPasses(
      """
        fragment scalarSelection on Dog {
          barks
        }
      """)

    "object type missing selection" in expectFails(
      """
        query directQueryOnObjectWithoutSubFields {
          human
        }
      """,
      List(
        "Field 'human' of type 'Human' must have a sub selection." -> Some(Pos(3, 11))
      ))

    "interface type missing selection" in expectFails(
      """
        {
          human { pets }
        }
      """,
      List(
        "Field 'pets' of type '[Pet]' must have a sub selection." -> Some(Pos(3, 19))
      ))

    "valid scalar selection with args" in expectPasses(
      """
        fragment scalarSelectionWithArgs on Dog {
          doesKnowCommand(dogCommand: SIT)
        }
      """)

    "scalar selection not allowed on Boolean" in expectFails(
      """
        fragment scalarSelectionsNotAllowedOnBoolean on Dog {
          barks { sinceWhen }
        }
      """,
      List(
        "Field 'barks' of type 'Boolean' must not have a sub selection." -> Some(Pos(3, 11))
      ))

    "scalar selection not allowed on Enum" in expectFails(
      """
        fragment scalarSelectionsNotAllowedOnEnum on Cat {
          furColor { inHexdec }
        }
      """,
      List(
        "Field 'furColor' of type 'FurColor' must not have a sub selection." -> Some(Pos(3, 11))
      ))

    "scalar selection not allowed with args" in expectFails(
      """
        fragment scalarSelectionsNotAllowedWithArgs on Dog {
          doesKnowCommand(dogCommand: SIT) { sinceWhen }
        }
      """,
      List(
        "Field 'doesKnowCommand' of type 'Boolean' must not have a sub selection." -> Some(Pos(3, 11))
      ))

    "Scalar selection not allowed with directives" in expectFails(
      """
        fragment scalarSelectionsNotAllowedWithDirectives on Dog {
          name @include(if: true) { isAlsoHumanName }
        }
      """,
      List(
        "Field 'name' of type 'String' must not have a sub selection." -> Some(Pos(3, 11))
      ))

    "Scalar selection not allowed with directives and args" in expectFails(
      """
        fragment scalarSelectionsNotAllowedWithDirectivesAndArgs on Dog {
          doesKnowCommand(dogCommand: SIT) @include(if: true) { sinceWhen }
        }
      """,
      List(
        "Field 'doesKnowCommand' of type 'Boolean' must not have a sub selection." -> Some(Pos(3, 11))
      ))
  }
} 
Example 195
Source File: KnownFragmentNamesSpec.scala    From sangria   with Apache License 2.0 5 votes vote down vote up
package sangria.validation.rules

import sangria.util.{Pos, ValidationSupport}
import org.scalatest.wordspec.AnyWordSpec

class KnownFragmentNamesSpec extends AnyWordSpec with ValidationSupport {

  override val defaultRule = Some(new KnownFragmentNames)

  "Validate: Known fragment names" should {
    "known fragment names are valid" in expectPasses(
      """
        {
          human(id: 4) {
            ...HumanFields1
            ... on Human {
              ...HumanFields2
            }
            ... {
              name
            }
          }
        }
        fragment HumanFields1 on Human {
          name
          ...HumanFields3
        }
        fragment HumanFields2 on Human {
          name
        }
        fragment HumanFields3 on Human {
          name
        }
      """)

    "unknown fragment names are invalid" in expectFails(
      """
        {
          human(id: 4) {
            ...UnknownFragment1
            ... on Human {
              ...UnknownFragment2
            }
          }
        }
        fragment HumanFields on Human {
          name
          ...UnknownFragment3
        }
      """,
      List(
        "Unknown fragment 'UnknownFragment1'." -> Some(Pos(4, 13)),
        "Unknown fragment 'UnknownFragment2'." -> Some(Pos(6, 15)),
        "Unknown fragment 'UnknownFragment3'." -> Some(Pos(12, 11))
      ))
  }
} 
Example 196
Source File: FragmentsOnCompositeTypesSpec.scala    From sangria   with Apache License 2.0 5 votes vote down vote up
package sangria.validation.rules

import sangria.util.{Pos, ValidationSupport}
import org.scalatest.wordspec.AnyWordSpec

class FragmentsOnCompositeTypesSpec extends AnyWordSpec with ValidationSupport {

  override val defaultRule = Some(new FragmentsOnCompositeTypes)

  "Validate: Fragments on composite types" should {
    "object is valid fragment type" in expectPasses(
      """
        fragment validFragment on Dog {
          barks
        }
      """)

    "interface is valid fragment type" in expectPasses(
      """
        fragment validFragment on Pet {
          name
        }
      """)

    "object is valid inline fragment type" in expectPasses(
      """
        fragment validFragment on Pet {
          ... on Dog {
            barks
          }
        }
      """)

    "union is valid fragment type" in expectPasses(
      """
        fragment validFragment on CatOrDog {
          __typename
        }
      """)

    "scalar is invalid fragment type" in expectFails(
      """
        fragment scalarFragment on Boolean {
          bad
        }
      """,
      List(
        "Fragment 'scalarFragment' cannot condition on non composite type 'Boolean'." -> Some(Pos(2, 36))
      ))

    "enum is invalid fragment type" in expectFails(
      """
        fragment scalarFragment on FurColor {
          bad
        }
      """,
      List(
        "Fragment 'scalarFragment' cannot condition on non composite type 'FurColor'." -> Some(Pos(2, 36))
      ))

    "input object is invalid fragment type" in expectFails(
      """
        fragment inputFragment on ComplexInput {
          stringField
        }
      """,
      List(
        "Fragment 'inputFragment' cannot condition on non composite type 'ComplexInput'." -> Some(Pos(2, 35))
      ))

    "scalar is invalid inline fragment type" in expectFails(
      """
        fragment invalidFragment on Pet {
          ... on String {
            barks
          }
        }
      """,
      List(
        "Fragment cannot condition on non composite type 'String'." -> Some(Pos(3, 18))
      ))

    "inline fragment without type is valid" in expectPasses(
      """
        fragment validFragment on Pet {
          ... {
            name
          }
        }
      """)
  }
} 
Example 197
Source File: SingleFieldSubscriptionsSpec.scala    From sangria   with Apache License 2.0 5 votes vote down vote up
package sangria.validation.rules

import sangria.util.{Pos, ValidationSupport}
import org.scalatest.wordspec.AnyWordSpec

class SingleFieldSubscriptionsSpec extends AnyWordSpec with ValidationSupport {

  override val defaultRule = Some(new SingleFieldSubscriptions)

  "Validate: Subscriptions with single field" should {
    "valid subscription" in expectPasses(
      """
        subscription ImportantEmails {
          importantEmails
        }
      """)

    "fails with more than one root field" in expectFails(
      """
        subscription ImportantEmails {
          importantEmails
          notImportantEmails
        }
      """,
      List(
        "Subscription 'ImportantEmails' must select only one top level field." -> Some(Pos(4, 11))
      ))

    "fails with more than one root field including introspection" in expectFails(
      """
        subscription ImportantEmails {
          importantEmails
          __typename
        }
      """,
      List(
        "Subscription 'ImportantEmails' must select only one top level field." -> Some(Pos(4, 11))
      ))

    "fails with many more than one root field" in expectFailsPosList(
      """
        subscription ImportantEmails {
          importantEmails
          notImportantEmails
          spamEmails
        }
      """,
      List(
        "Subscription 'ImportantEmails' must select only one top level field." -> List(Pos(4, 11), Pos(5, 11))
      ))

    "fails with more than one root field in anonymous subscriptions" in expectFailsPosList(
      """
        subscription {
          importantEmails
          notImportantEmails
        }
      """,
      List(
        "Anonymous Subscription must select only one top level field." -> List(Pos(4, 11))
      ))
  }
} 
Example 198
Source File: KnownTypeNamesSpec.scala    From sangria   with Apache License 2.0 5 votes vote down vote up
package sangria.validation.rules

import sangria.util.{Pos, ValidationSupport}
import org.scalatest.wordspec.AnyWordSpec

class KnownTypeNamesSpec extends AnyWordSpec with ValidationSupport {

  override val defaultRule = Some(new KnownTypeNames)

  "Validate: Known type names" should {
    "known type names are valid" in expectPasses(
      """
        query Foo($var: String, $required: [String!]!) {
          user(id: 4) {
            pets { ... on Pet { name }, ...PetFields, ... { name }}
          }
        }
        fragment PetFields on Pet {
          name
        }
      """)

    "unknown type names are invalid" in expectFails(
      """
        query Foo($var: JumbledUpLetters) {
          user(id: 4) {
            name
            pets { ... on Badger { name }, ...PetFields }
          }
        }
        fragment PetFields on Peettt {
          name
        }
      """,
      List(
        "Unknown type 'JumbledUpLetters'." -> Some(Pos(2, 25)),
        "Unknown type 'Badger'." -> Some(Pos(5, 27)),
        "Unknown type 'Peettt'. Did you mean 'Pet'?" -> Some(Pos(8, 31))
      ))
  }
} 
Example 199
Source File: LoneAnonymousOperationSpec.scala    From sangria   with Apache License 2.0 5 votes vote down vote up
package sangria.validation.rules

import sangria.util.{Pos, ValidationSupport}
import org.scalatest.wordspec.AnyWordSpec

class LoneAnonymousOperationSpec extends AnyWordSpec with ValidationSupport {

  override val defaultRule = Some(new LoneAnonymousOperation)

  "Validate: Anonymous operation must be alone" should {
    "no operations" in expectPasses(
      """
        fragment fragA on Type {
          field
        }
      """)

    "one anon operation" in expectPasses(
      """
        {
          field
        }
      """)

    "multiple named operations" in expectPasses(
      """
        query Foo {
          field
        }

        query Bar {
          field
        }
      """)

    "anon operation with fragment" in expectPasses(
      """
        {
          ...Foo
        }
        fragment Foo on Type {
          field
        }
      """)

    "multiple anon operations" in expectFails(
      """
        {
          fieldA
        }
        {
          fieldB
        }
      """,
      List(
        "This anonymous operation must be the only defined operation." -> Some(Pos(2, 9)),
        "This anonymous operation must be the only defined operation." -> Some(Pos(5, 9))
      ))

    "anon operation with another operation" in expectFails(
      """
        {
          fieldA
        }
        mutation Foo {
          fieldB
        }
      """,
      List(
        "This anonymous operation must be the only defined operation." -> Some(Pos(2, 9))
      ))

    "anon operation with another operation with subscription" in expectFails(
      """
        {
          fieldA
        }
        subscription Foo {
          fieldB
        }
      """,
      List(
        "This anonymous operation must be the only defined operation." -> Some(Pos(2, 9))
      ))
  }
} 
Example 200
Source File: UniqueDirectivesPerLocationSpec.scala    From sangria   with Apache License 2.0 5 votes vote down vote up
package sangria.validation.rules

import sangria.util.{Pos, ValidationSupport}
import org.scalatest.wordspec.AnyWordSpec

class UniqueDirectivesPerLocationSpec extends AnyWordSpec with ValidationSupport {

  override val defaultRule = Some(new UniqueDirectivesPerLocation)

  "Validate: Directives Are Unique Per Location" should {
    "no directives" in expectPasses(
      """
        fragment Test on Type {
          field
        }
      """)

    "unique directives in different locations" in expectPasses(
      """
        fragment Test on Type @directiveA {
          field @directiveB
        }
      """)

    "unique directives in same locations" in expectPasses(
      """
        fragment Test on Type @directiveA @directiveB {
          field @directiveA @directiveB
        }
      """)

    "same directives in different locations" in expectPasses(
      """
        fragment Test on Type @directiveA {
          field @directiveA
        }
      """)

    "same directives in similar locations" in expectPasses(
      """
        fragment Test on Type {
          field @directive
          field @directive
        }
      """)

    "duplicate directives in one location" in expectFailsPosList(
      """
        fragment Test on Type {
          field @directive @directive
        }
      """,
      List(
        "The directive 'directive' can only be used once at this location." -> List(Pos(3, 17), Pos(3, 28))
      ))

    "many duplicate directives in one location" in expectFailsPosList(
      """
        fragment Test on Type {
          field @directive @directive @directive
        }
      """,
      List(
        "The directive 'directive' can only be used once at this location." -> List(Pos(3, 17), Pos(3, 28)),
        "The directive 'directive' can only be used once at this location." -> List(Pos(3, 17), Pos(3, 39))
      ))

    "different duplicate directives in one location" in expectFailsPosList(
      """
        fragment Test on Type {
          field @directiveA @directiveB @directiveA @directiveB
        }
      """,
      List(
        "The directive 'directiveA' can only be used once at this location." -> List(Pos(3, 17), Pos(3, 41)),
        "The directive 'directiveB' can only be used once at this location." -> List(Pos(3, 29), Pos(3, 53))
      ))

    "duplicate directives in many locations" in expectFailsPosList(
      """
        fragment Test on Type @directive @directive {
          field @directive @directive
        }
      """,
      List(
        "The directive 'directive' can only be used once at this location." -> List(Pos(2, 31), Pos(2, 42)),
        "The directive 'directive' can only be used once at this location." -> List(Pos(3, 17), Pos(3, 28))
      ))
  }
}