monix.execution.Scheduler.Implicits.global Scala Examples

The following examples show how to use monix.execution.Scheduler.Implicits.global. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: ParEnvSuite.scala    From tofu   with Apache License 2.0 5 votes vote down vote up
package tofu.env

import cats.Parallel
import cats.instances.list._
import monix.execution.Scheduler.Implicits.global
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers

import scala.concurrent.duration._

class ParEnvSuite extends AnyFlatSpec with Matchers {
  "parSequence" should "not stack overflow on large collection" in {

    Parallel
      .parSequence(
        List
          .range(1, 10000)
          .map(i => Env.fromFunc((j: Int) => i * j))
      )
      .run(3)
      .runSyncUnsafe(Duration.Inf) shouldBe List.range(3, 30000, 3)
  }
} 
Example 2
Source File: Server.scala    From core   with Apache License 2.0 5 votes vote down vote up
package com.smartbackpackerapp

import cats.Parallel
import cats.effect.Effect
import com.smartbackpackerapp.http.auth.JwtTokenAuthMiddleware
import fs2.StreamApp.ExitCode
import fs2.{Scheduler, Stream, StreamApp}
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import org.http4s.client.blaze.Http1Client
import org.http4s.server.blaze.BlazeBuilder

object Server extends HttpServer[Task, Task.Par]

class HttpServer[F[_], G[_]](implicit F: Effect[F], P: Parallel[F, G]) extends StreamApp[F] {

  // Workaround until something like mirror comes out: https://github.com/typelevel/cats/pull/2019
  implicit val parallel: Parallel[F, F] = P.asInstanceOf[Parallel[F, F]]

  private lazy val ApiToken: F[Option[String]] = F.delay(sys.env.get("SB_API_TOKEN"))

  override def stream(args: List[String], requestShutdown: F[Unit]): Stream[F, ExitCode] =
    Scheduler(corePoolSize = 2).flatMap { implicit scheduler =>
      for {
        httpClient      <- Http1Client.stream[F]()
        ctx             = new Module[F](httpClient)
        _               <- Stream.eval(ctx.migrateDb)
        _               <- Stream.eval(ctx.startMetricsReporter)
        apiToken        <- Stream.eval(ApiToken)
        authMiddleware  <- Stream.eval(JwtTokenAuthMiddleware[F](apiToken))
        exitCode        <- BlazeBuilder[F]
                            .bindHttp(sys.env.getOrElse("PORT", "8080").toInt, "0.0.0.0")
                            .mountService(authMiddleware(ctx.httpEndpointsWithMetrics))
                            .serve
      } yield exitCode
    }

} 
Example 3
Source File: OkHttpHighLevelMonixWebsocketTest.scala    From sttp   with Apache License 2.0 5 votes vote down vote up
package sttp.client.okhttp.monix

import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import org.scalatest.Assertion
import sttp.client._
import sttp.client.impl.monix.{TaskMonadAsyncError, convertMonixTaskToFuture}
import sttp.client.monad.MonadError
import sttp.client.monad.syntax._
import sttp.client.okhttp.WebSocketHandler
import sttp.client.okhttp.monix.internal.SendMessageException
import sttp.client.testing.ConvertToFuture
import sttp.client.testing.websocket.HighLevelWebsocketTest
import sttp.client.ws.WebSocket
import sttp.client.testing.HttpTest.wsEndpoint

import scala.concurrent.duration._

class OkHttpHighLevelMonixWebsocketTest extends HighLevelWebsocketTest[Task, WebSocketHandler] {
  override implicit val backend: SttpBackend[Task, Nothing, WebSocketHandler] =
    OkHttpMonixBackend().runSyncUnsafe()
  override implicit val convertToFuture: ConvertToFuture[Task] = convertMonixTaskToFuture
  override implicit val monad: MonadError[Task] = TaskMonadAsyncError

  override def createHandler: Option[Int] => Task[WebSocketHandler[WebSocket[Task]]] = MonixWebSocketHandler(_)

  it should "error if the endpoint is not a websocket" in {
    monad
      .handleError {
        basicRequest
          .get(uri"$wsEndpoint/echo")
          .openWebsocketF(createHandler(None))
          .map(_ => fail: Assertion)
      } {
        case e: Exception => (e shouldBe a[SttpClientException.ReadException]).unit
      }
      .toFuture()
  }

  it should "error if incoming messages overflow the buffer" in {
    basicRequest
      .get(uri"$wsEndpoint/ws/echo")
      .openWebsocketF(createHandler(Some(3)))
      .flatMap { response =>
        val ws = response.result
        send(ws, 1000) >> eventually(10 millis, 400)(ws.isOpen.map(_ shouldBe false))
      }
      .onErrorRecover {
        case _: SendMessageException => succeed
      }
      .toFuture()
  }

  override def eventually[T](interval: FiniteDuration, attempts: Int)(f: => Task[T]): Task[T] = {
    (Task.sleep(interval) >> f).onErrorRestart(attempts)
  }
} 
Example 4
Source File: HttpClientHighLevelMonixWebsocketTest.scala    From sttp   with Apache License 2.0 5 votes vote down vote up
package sttp.client.httpclient.monix

import java.nio.ByteBuffer

import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import monix.reactive.Observable
import sttp.client._
import sttp.client.httpclient.WebSocketHandler
import sttp.client.impl.monix.{TaskMonadAsyncError, convertMonixTaskToFuture}
import sttp.client.monad.MonadError
import sttp.client.testing.ConvertToFuture
import sttp.client.testing.websocket.HighLevelWebsocketTest
import sttp.client.ws.WebSocket
import sttp.client.testing.HttpTest.wsEndpoint

import scala.concurrent.duration._

class HttpClientHighLevelMonixWebsocketTest extends HighLevelWebsocketTest[Task, WebSocketHandler] {
  implicit val backend: SttpBackend[Task, Observable[ByteBuffer], WebSocketHandler] =
    HttpClientMonixBackend().runSyncUnsafe()
  implicit val convertToFuture: ConvertToFuture[Task] = convertMonixTaskToFuture
  implicit val monad: MonadError[Task] = TaskMonadAsyncError

  override def createHandler: Option[Int] => Task[WebSocketHandler[WebSocket[Task]]] = _ => MonixWebSocketHandler()

  it should "handle backpressure correctly" in {
    basicRequest
      .get(uri"$wsEndpoint/ws/echo")
      .openWebsocketF(createHandler(None))
      .flatMap { response =>
        val ws = response.result
        send(ws, 1000) >> eventually(10.millis, 500) { ws.isOpen.map(_ shouldBe true) }
      }
      .toFuture()
  }

  override def eventually[T](interval: FiniteDuration, attempts: Int)(f: => Task[T]): Task[T] = {
    (Task.sleep(interval) >> f).onErrorRestart(attempts.toLong)
  }
} 
Example 5
Source File: WebsocketMonix.scala    From sttp   with Apache License 2.0 5 votes vote down vote up
package sttp.client.examples

import monix.eval.Task
import sttp.client._
import sttp.client.ws.{WebSocket, WebSocketResponse}
import sttp.model.ws.WebSocketFrame
import sttp.client.asynchttpclient.monix.{AsyncHttpClientMonixBackend, MonixWebSocketHandler}

object WebsocketMonix extends App {
  import monix.execution.Scheduler.Implicits.global

  def useWebsocket(ws: WebSocket[Task]): Task[Unit] = {
    def send(i: Int) = ws.send(WebSocketFrame.text(s"Hello $i!"))
    val receive = ws.receiveText().flatMap(t => Task(println(s"RECEIVED: $t")))
    send(1) *> send(2) *> receive *> receive *> ws.close
  }

  val websocketTask: Task[Unit] = AsyncHttpClientMonixBackend().flatMap { implicit backend =>
    val response: Task[WebSocketResponse[WebSocket[Task]]] = basicRequest
      .get(uri"wss://echo.websocket.org")
      .openWebsocketF(MonixWebSocketHandler())

    response
      .flatMap(r => useWebsocket(r.result))
      .guarantee(backend.close())
  }

  websocketTask.runSyncUnsafe()
} 
Example 6
Source File: PostSerializeJsonMonixAsyncHttpClientCirce.scala    From sttp   with Apache License 2.0 5 votes vote down vote up
package sttp.client.examples

object PostSerializeJsonMonixAsyncHttpClientCirce extends App {
  import sttp.client._
  import sttp.client.circe._
  import sttp.client.asynchttpclient.monix._
  import io.circe.generic.auto._
  import monix.eval.Task

  case class Info(x: Int, y: String)

  val postTask = AsyncHttpClientMonixBackend().flatMap { implicit backend =>
    val r = basicRequest
      .body(Info(91, "abc"))
      .post(uri"https://httpbin.org/post")

    r.send()
      .flatMap { response => Task(println(s"""Got ${response.code} response, body:\n${response.body}""")) }
      .guarantee(backend.close())
  }

  import monix.execution.Scheduler.Implicits.global
  postTask.runSyncUnsafe()
} 
Example 7
Source File: AsyncHttpClientHighLevelMonixWebsocketTest.scala    From sttp   with Apache License 2.0 5 votes vote down vote up
package sttp.client.asynchttpclient.monix

import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import sttp.client._
import sttp.client.asynchttpclient.{AsyncHttpClientHighLevelWebsocketTest, WebSocketHandler}
import sttp.client.impl.monix.{TaskMonadAsyncError, convertMonixTaskToFuture}
import sttp.client.monad.MonadError
import sttp.client.testing.ConvertToFuture
import sttp.client.ws.WebSocket

import scala.concurrent.duration._

class AsyncHttpClientHighLevelMonixWebsocketTest extends AsyncHttpClientHighLevelWebsocketTest[Task] {
  override implicit val backend: SttpBackend[Task, Nothing, WebSocketHandler] =
    AsyncHttpClientMonixBackend().runSyncUnsafe()
  override implicit val convertToFuture: ConvertToFuture[Task] = convertMonixTaskToFuture
  override implicit val monad: MonadError[Task] = TaskMonadAsyncError

  override def createHandler: Option[Int] => Task[WebSocketHandler[WebSocket[Task]]] = MonixWebSocketHandler(_)

  override def eventually[T](interval: FiniteDuration, attempts: Int)(f: => Task[T]): Task[T] = {
    (Task.sleep(interval) >> f).onErrorRestart(attempts.toLong)
  }
} 
Example 8
Source File: AsyncHttpClientMonixHttpTest.scala    From sttp   with Apache License 2.0 5 votes vote down vote up
package sttp.client.asynchttpclient.monix

import java.util.concurrent.TimeoutException

import monix.eval.Task
import sttp.client._
import sttp.client.impl.monix.convertMonixTaskToFuture
import sttp.client.testing.{CancelTest, ConvertToFuture, HttpTest}
import monix.execution.Scheduler.Implicits.global

import scala.concurrent.duration._

class AsyncHttpClientMonixHttpTest extends HttpTest[Task] with CancelTest[Task, Nothing] {
  override implicit val backend: SttpBackend[Task, Nothing, NothingT] = AsyncHttpClientMonixBackend().runSyncUnsafe()
  override implicit val convertToFuture: ConvertToFuture[Task] = convertMonixTaskToFuture

  override def timeoutToNone[T](t: Task[T], timeoutMillis: Int): Task[Option[T]] =
    t.map(Some(_))
      .timeout(timeoutMillis.milliseconds)
      .onErrorRecover {
        case _: TimeoutException => None
      }

  override def throwsExceptionOnUnsupportedEncoding = false
} 
Example 9
Source File: AsyncHttpClientLowLevelMonixWebsocketTest.scala    From sttp   with Apache License 2.0 5 votes vote down vote up
package sttp.client.asynchttpclient.monix

import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import org.asynchttpclient.ws.{WebSocketListener, WebSocket => AHCWebSocket}
import sttp.client._
import sttp.client.asynchttpclient.WebSocketHandler
import sttp.client.impl.monix.convertMonixTaskToFuture
import sttp.client.testing.ConvertToFuture
import sttp.client.testing.websocket.LowLevelListenerWebSocketTest

class AsyncHttpClientLowLevelMonixWebsocketTest
    extends LowLevelListenerWebSocketTest[Task, AHCWebSocket, WebSocketHandler] {
  override implicit val backend: SttpBackend[Task, Nothing, WebSocketHandler] =
    AsyncHttpClientMonixBackend().runSyncUnsafe()
  override implicit val convertToFuture: ConvertToFuture[Task] = convertMonixTaskToFuture

  override def createHandler(_onTextFrame: String => Unit): WebSocketHandler[AHCWebSocket] =
    WebSocketHandler.fromListener(new WebSocketListener {
      override def onOpen(websocket: AHCWebSocket): Unit = {}
      override def onClose(websocket: AHCWebSocket, code: Int, reason: String): Unit = {}
      override def onError(t: Throwable): Unit = {}
      override def onTextFrame(payload: String, finalFragment: Boolean, rsv: Int): Unit = {
        _onTextFrame(payload)
      }
    })

  override def sendText(ws: AHCWebSocket, t: String): Unit = ws.sendTextFrame(t).await()

  override def sendCloseFrame(ws: AHCWebSocket): Unit = ws.sendCloseFrame()
} 
Example 10
Source File: MonixAsyncHandlerTest.scala    From pulsar4s   with Apache License 2.0 5 votes vote down vote up
package com.sksamuel.pulsar4s.monixs

import java.util.UUID

import com.sksamuel.pulsar4s._
import org.apache.pulsar.client.api.Schema
import org.scalatest.BeforeAndAfterAll

import scala.concurrent.Await
import scala.concurrent.duration.Duration
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers

class MonixAsyncHandlerTest extends AnyFunSuite with Matchers with BeforeAndAfterAll {

  import monix.execution.Scheduler.Implicits.global
  import MonixAsyncHandler._

  implicit val schema: Schema[String] = Schema.STRING

  val client = PulsarClient("pulsar://localhost:6650")
  val topic = Topic("persistent://sample/standalone/ns1/monix_" + UUID.randomUUID())

  override def afterAll(): Unit = {
    client.close()
  }

  test("async producer should use monix") {
    val producer = client.producer(ProducerConfig(topic))
    val t = producer.sendAsync("wibble")
    val f = t.runToFuture
    Await.result(f, Duration.Inf) should not be null
    producer.close()
  }

  test("async consumer should use monix") {
    val consumer = client.consumer(ConsumerConfig(topics = Seq(topic), subscriptionName = Subscription("mysub_" + UUID.randomUUID())))
    consumer.seekEarliest()
    val t = consumer.receiveAsync
    val f = t.runToFuture
    new String(Await.result(f, Duration.Inf).data) shouldBe "wibble"
    consumer.close()
  }

  test("async consumer getMessageById should use monix") {
    val consumer = client.consumer(ConsumerConfig(topics = Seq(topic), subscriptionName = Subscription("mysub_" + UUID.randomUUID())))
    consumer.seekEarliest()
    val receive = consumer.receiveAsync
    val valueFuture = receive.runToFuture
    val value = Await.result(valueFuture, Duration.Inf)
    val t = consumer.getLastMessageIdAsync
    val rFuture = t.runToFuture
    val r = Await.result(rFuture, Duration.Inf)
    val zipped = r.toString.split(":") zip value.messageId.toString.split(":")
    zipped.foreach(t => t._1 shouldBe t._2)
    consumer.close()
  }
} 
Example 11
Source File: TakeWhileInclusiveSuite.scala    From tofu   with Apache License 2.0 5 votes vote down vote up
package tofu.observable

import monix.catnap.MVar
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import monix.reactive.Observable
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers

import scala.concurrent.duration._

class TakeWhileInclusiveSuite extends AnyFlatSpec with Matchers {

  private def writeElement[A](mvar: MVar[Task, Vector[A]])(a: A): Task[Unit]                        =
    mvar.take.flatMap(v => mvar.put(v :+ a))
  private def inclusiveElements[A](obs: Observable[A])(p: A => Boolean): Task[(Vector[A], List[A])] =
    for {
      mvar     <- MVar[Task].of(Vector.empty[A])
      produced <- obs.doOnNext(writeElement(mvar)).takeWhileInclusive(p).toListL
      written  <- mvar.read
    } yield (written, produced)

  "Observable.takeWhileInclusibe" should "produce and generate same elements" in {
    inclusiveElements(Observable.range(1, 100))(_ <= 10).runSyncUnsafe(Duration.Inf) shouldEqual
      ((Vector.range(1, 12), List.range(1, 12)))
  }
} 
Example 12
Source File: Http4sUtils.scala    From core   with Apache License 2.0 5 votes vote down vote up
package com.smartbackpackerapp.http

import cats.{Applicative, Monad}
import cats.data.{Kleisli, OptionT}
import cats.effect.IO
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import org.http4s.server.AuthMiddleware
import org.http4s.{EntityBody, Request}

import scala.concurrent.Await
import scala.concurrent.duration.Duration

object Http4sUtils {

  private def authUser[F[_]](implicit F: Applicative[F]): Kleisli[OptionT[F, ?], Request[F], String] =
    Kleisli(_ => OptionT.liftF(F.pure("access_token")))

  def middleware[F[_]: Monad]: AuthMiddleware[F, String] = AuthMiddleware.apply[F, String](authUser)

  val taskMiddleware: AuthMiddleware[Task, String] = middleware[Task]
  val ioMiddleware: AuthMiddleware[IO, String] = middleware[IO]

  implicit class ByteVector2String(body: EntityBody[IO]) {
    def asString: String = {
      val array = body.compile.toVector.unsafeRunSync().toArray
      new String(array.map(_.toChar))
    }
  }

  implicit class ByteVector2StringTask(body: EntityBody[Task]) {
    def asString: String = {
      val array = Await.result(body.compile.toVector.runAsync, Duration.Inf).toArray
      new String(array.map(_.toChar))
    }
  }

} 
Example 13
Source File: EnvSuite.scala    From tofu   with Apache License 2.0 5 votes vote down vote up
package tofu.env

import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import tofu.{HasContextRun, RunContext, WithRun}
import tofu.lift.{Lift, Unlift, UnliftIO}

import scala.concurrent.duration._

class EnvSuite extends AnyFlatSpec with Matchers {
  "flatMap" should "not stack overflow on folding large collection" in {
    (0 to 100000).toList
      .foldLeft(Env.pure[Unit, Int](3)) {
        case (acc, _) => acc.flatMap(Env.pure)
      }
      .run(())
      .runSyncUnsafe(Duration.Inf) shouldBe 3
  }

  "map" should "not stack overflow on folding large collection" in {
    (0 to 100000).toList
      .foldLeft(Env.pure[Unit, Int](3)) {
        case (acc, _) => acc.map(identity)
      }
      .run(())
      .runSyncUnsafe(Duration.Inf) shouldBe 3
  }

  "map2" should "not stack overflow on folding large collection" in {
    (0 to 100000).toList
      .foldLeft(Env.pure[Unit, Int](3)) {
        case (acc, _) => acc.map2(Env.unit[Unit])((a, _) => a)
      }
      .run(())
      .runSyncUnsafe(Duration.Inf) shouldBe 3
  }

  "map3" should "not stack overflow on folding large collection" in {
    (0 to 100000).toList
      .foldLeft(Env.pure[Unit, Int](3)) {
        case (acc, _) => acc.map3(Env.unit[Unit], Env.unit[Unit])((a, _, _) => a)
      }
      .run(())
      .runSyncUnsafe(Duration.Inf) shouldBe 3
  }

  "parMap2" should "not stack overflow on folding large collection" in {
    (0 to 100000).toList
      .foldLeft(Env.pure[Unit, Int](3)) {
        case (acc, _) => acc.parMap2(Env.unit[Unit])((a, _) => a)
      }
      .run(())
      .runSyncUnsafe(Duration.Inf) shouldBe 3
  }

  "parMap3" should "not stack overflow on folding large collection" in {
    (0 to 100000).toList
      .foldLeft(Env.pure[Unit, Int](3)) {
        case (acc, _) => acc.parMap3(Env.unit[Unit], Env.unit[Unit])((a, _, _) => a)
      }
      .run(())
      .runSyncUnsafe(Duration.Inf) shouldBe 3
  }
}

object EnvSuite {
  def summonEnvInstances[E](): Unit = {
    implicitly[Lift[Task, Env[E, *]]]
    implicitly[Unlift[Task, Env[E, *]]]

    implicitly[UnliftIO[Env[E, *]]]

    implicitly[RunContext[Env[E, *]]]
    implicitly[HasContextRun[Env[E, *], Task, E]]
    implicitly[WithRun[Env[E, *], Task, E]]
    ()
  }
} 
Example 14
Source File: Http4sClientSpec.scala    From cornichon   with Apache License 2.0 5 votes vote down vote up
package com.github.agourlay.cornichon.http

import com.github.agourlay.cornichon.http.client.Http4sClient
import monix.execution.Scheduler.Implicits.global
import utest._

object Http4sClientSpec extends TestSuite {

  private val client = new Http4sClient(true, true, true)

  override def utestAfterAll(): Unit = {
    client.shutdown().runSyncUnsafe()
  }

  def tests = Tests {
    test("conserves duplicates http params") {
      val uri = client.parseUri("http://web.com").valueUnsafe
      val finalUri = client.addQueryParams(uri, List("p1" -> "v1", "p1" -> "v1'", "p2" -> "v2"))
      assert(finalUri.query.multiParams("p1") == "v1" :: "v1'" :: Nil)
      assert(finalUri.query.multiParams("p2") == "v2" :: Nil)
    }
  }
} 
Example 15
Source File: MatchersProperties.scala    From cornichon   with Apache License 2.0 5 votes vote down vote up
package com.github.agourlay.cornichon.matchers

import java.time.Instant
import java.time.format.DateTimeFormatter

import com.github.agourlay.cornichon.matchers.Matchers._
import io.circe.Json
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import org.scalacheck._
import org.scalacheck.Prop._
import org.typelevel.claimant.Claim

object MatchersProperties extends Properties("Matchers") {

  val reasonablyRandomInstantGen: Gen[Instant] = for {
    randomOffset <- Arbitrary.arbLong.arbitrary
  } yield Instant.now().plusMillis(randomOffset % 1000000000000L)

  val instantGen: Gen[Instant] = for {
    randomOffset <- Arbitrary.arbLong.arbitrary
  } yield Instant.now().plusMillis(randomOffset)

  property("any-integer correct for any int") =
    forAll(Gen.size) { int =>
      Claim {
        anyInteger.predicate(Json.fromInt(int))
      }
    }

  property("any-integer incorrect for any alphanum string") =
    forAll(Gen.alphaNumStr) { alphanum =>
      Claim {
        !anyInteger.predicate(Json.fromString(alphanum))
      }
    }

  property("any-positive-integer correct for any positive int") =
    forAll(Gen.choose(1, Int.MaxValue)) { int =>
      Claim {
        anyPositiveInteger.predicate(Json.fromInt(int))
      }
    }

  property("any-positive-integer incorrect for any alphanum string") =
    forAll(Gen.alphaNumStr) { alphanum =>
      Claim {
        !anyPositiveInteger.predicate(Json.fromString(alphanum))
      }
    }

  property("any-negative-integer correct for any negative int") =
    forAll(Gen.negNum[Int]) { int =>
      Claim {
        anyNegativeInteger.predicate(Json.fromInt(int))
      }
    }

  property("any-negative-integer incorrect for any alphanum string") =
    forAll(Gen.alphaNumStr) { alphanum =>
      Claim {
        !anyNegativeInteger.predicate(Json.fromString(alphanum))
      }
    }

  property("any-uuid correct for any valid UUID") =
    forAll(Gen.uuid) { uuid =>
      Claim {
        anyUUID.predicate(Json.fromString(uuid.toString))
      }
    }

  property("any-uuid incorrect for any alphanum string") =
    forAll(Gen.alphaNumStr) { alphanum =>
      Claim {
        !anyUUID.predicate(Json.fromString(alphanum))
      }
    }

  property("any-date-time correct for all ISO-compliant values, including Y10K+ dates") =
    forAll(instantGen) { instant =>
      Claim {
        anyDateTime.predicate(Json.fromString(DateTimeFormatter.ISO_INSTANT.format(instant)))
      }
    }

  property("any-date-time correct in parallel") = {
    forAll(reasonablyRandomInstantGen) { instant =>
      val booleans = 1.to(64).map { _ =>
        Task.delay {
          anyDateTime.predicate(Json.fromString(DateTimeFormatter.ISO_INSTANT.format(instant)))
        }
      }

      val res = Task.parSequenceUnordered(booleans).runSyncUnsafe().foldLeft(List.empty[Boolean]) { case (acc, e) => e :: acc }

      Claim(res.forall(_ == true))
    }
  }
} 
Example 16
Source File: CornichonFeatureSbtTask.scala    From cornichon   with Apache License 2.0 5 votes vote down vote up
package com.github.agourlay.cornichon.framework

import com.github.agourlay.cornichon.framework.CornichonFeatureRunner._
import monix.execution.Scheduler.Implicits.global
import sbt.testing._

import scala.concurrent.duration.Duration
import scala.concurrent.Await

class CornichonFeatureSbtTask(task: TaskDef, scenarioNameFilter: Set[String], explicitSeed: Option[Long]) extends Task {

  override def tags(): Array[String] = Array.empty
  override def taskDef(): TaskDef = task

  override def execute(eventHandler: EventHandler, loggers: Array[Logger]): Array[Task] = {
    val fqn = task.fullyQualifiedName()
    val featureInfo = FeatureInfo(fqn, Class.forName(fqn), task.fingerprint(), task.selectors().head)
    val featureTask = loadAndExecute(featureInfo, eventHandler, explicitSeed, scenarioNameFilter)
    Await.result(featureTask.runToFuture, Duration.Inf)
    Array.empty
  }
} 
Example 17
Source File: MainRunner.scala    From cornichon   with Apache License 2.0 5 votes vote down vote up
package com.github.agourlay.cornichon.framework

import java.util

import cats.syntax.apply._
import com.github.agourlay.cornichon.CornichonFeature
import com.github.agourlay.cornichon.core.CornichonError
import com.github.agourlay.cornichon.framework.CornichonFeatureRunner._
import com.monovore.decline._
import com.openpojo.reflection.PojoClass
import com.openpojo.reflection.impl.PojoClassFactory
import monix.execution.Scheduler.Implicits.global
import monix.reactive.Observable
import sbt.testing.TestSelector

import scala.jdk.CollectionConverters._
import scala.concurrent.Await
import scala.concurrent.duration.Duration

object MainRunner {

  private val packageToScanOpts = Opts.option[String]("packageToScan", help = "Package containing the feature files.")

  private val reportsOutputDirOpts = Opts.option[String]("reportsOutputDir", help = "Output directory for junit.xml files (default to current).").withDefault(".")

  private val featureParallelismOpts = Opts.option[Int]("featureParallelism", help = "Number of feature running in parallel (default=1).")
    .validate("must be positive")(_ > 0).withDefault(1)

  private val seedOpts = Opts.option[Long]("seed", help = "Seed to use for starting random processes.").orNone

  private val scenarioNameFilterOpts = Opts.option[String]("scenarioNameFilter", help = "Filter scenario to run by name.").orNone

  private val mainRunnerCommand = Command(
    name = "cornichon-test-framework",
    header = "Run your cornichon features without SBT."
  )((packageToScanOpts, reportsOutputDirOpts, featureParallelismOpts, seedOpts, scenarioNameFilterOpts).tupled)

  def main(args: Array[String]): Unit = mainRunnerCommand.parse(args.toSeq, sys.env) match {
    case Left(help) =>
      System.err.println(help)
      sys.exit(1)
    case Right((packageToScan, reportsOutputDir, featureParallelism, explicitSeed, scenarioNameFilter)) =>
      JUnitXmlReporter.checkReportsFolder(reportsOutputDir)
      println("Starting feature classes discovery")
      val classes = discoverFeatureClasses(packageToScan)
      println(s"Found ${classes.size} feature classes")
      val scenarioNameFilterSet = scenarioNameFilter.toSet
      val f = Observable.fromIterable(classes)
        .mapParallelUnordered(featureParallelism) { featureClass =>
          val startedAt = System.currentTimeMillis()
          val featureTypeName = featureClass.getTypeName
          val featureInfo = FeatureInfo(featureTypeName, featureClass, CornichonFingerprint, new TestSelector(featureTypeName))
          val eventHandler = new RecordEventHandler()
          loadAndExecute(featureInfo, eventHandler, explicitSeed, scenarioNameFilterSet)
            .timed
            .map {
              case (duration, res) =>
                JUnitXmlReporter.writeJunitReport(reportsOutputDir, featureTypeName, duration, startedAt, eventHandler.recorded) match {
                  case Left(e) =>
                    println(s"ERROR: Could not generate JUnit xml report for $featureTypeName due to\n${CornichonError.genStacktrace(e)}")
                  case Right(_) =>
                    ()
                }
                res
            }
        }
        .foldLeftL(true)(_ && _)
        .runToFuture

      if (Await.result(f, Duration.Inf))
        System.exit(0)
      else
        System.exit(1)
  }

  // https://stackoverflow.com/questions/492184/how-do-you-find-all-subclasses-of-a-given-class-in-java
  def discoverFeatureClasses(packageToExplore: String): List[Class[_]] = {
    val classes: util.List[PojoClass] = PojoClassFactory.enumerateClassesByExtendingType(packageToExplore, classOf[CornichonFeature], null)
    classes.iterator().asScala.collect { case pojo if pojo.isConcrete => pojo.getClazz }.toList
  }
} 
Example 18
Source File: IdentitiesRoutes.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.routes

import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import ch.epfl.bluebrain.nexus.iam.config.AppConfig.HttpConfig
import ch.epfl.bluebrain.nexus.iam.directives.AuthDirectives.authenticator
import ch.epfl.bluebrain.nexus.iam.marshallers.instances._
import ch.epfl.bluebrain.nexus.iam.realms.Realms
import ch.epfl.bluebrain.nexus.iam.types.Caller
import ch.epfl.bluebrain.nexus.iam.types.Caller.JsonLd._
import kamon.instrumentation.akka.http.TracingDirectives.operationName
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global


class IdentitiesRoutes(realms: Realms[Task])(implicit http: HttpConfig) {

  def routes: Route = {
    (pathPrefix("identities") & pathEndOrSingleSlash) {
      operationName(s"/${http.prefix}/identities") {
        authenticateOAuth2Async("*", authenticator(realms)).withAnonymousUser(Caller.anonymous) { implicit caller =>
          get {
            complete(caller)
          }
        }
      }
    }
  }
} 
Example 19
Source File: PermissionsRoutes.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.routes

import akka.http.javadsl.server.Rejections._
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import ch.epfl.bluebrain.nexus.iam.config.AppConfig.HttpConfig
import ch.epfl.bluebrain.nexus.iam.directives.AuthDirectives.authenticator
import ch.epfl.bluebrain.nexus.iam.marshallers.instances._
import ch.epfl.bluebrain.nexus.iam.permissions.Permissions
import ch.epfl.bluebrain.nexus.iam.realms.Realms
import ch.epfl.bluebrain.nexus.iam.routes.PermissionsRoutes.PatchPermissions
import ch.epfl.bluebrain.nexus.iam.routes.PermissionsRoutes.PatchPermissions.{Append, Replace, Subtract}
import ch.epfl.bluebrain.nexus.iam.types.ResourceF._
import ch.epfl.bluebrain.nexus.iam.types.{Caller, Permission}
import io.circe.{Decoder, DecodingFailure}
import kamon.instrumentation.akka.http.TracingDirectives.operationName
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global


class PermissionsRoutes(permissions: Permissions[Task], realms: Realms[Task])(implicit http: HttpConfig) {

  def routes: Route =
    (pathPrefix("permissions") & pathEndOrSingleSlash) {
      operationName(s"/${http.prefix}/permissions") {
        authenticateOAuth2Async("*", authenticator(realms)).withAnonymousUser(Caller.anonymous) { implicit caller =>
          concat(
            get {
              parameter("rev".as[Long].?) {
                case Some(rev) => complete(permissions.fetchAt(rev).runNotFound)
                case None      => complete(permissions.fetch.runToFuture)
              }
            },
            (put & parameter("rev" ? 0L)) { rev =>
              entity(as[PatchPermissions]) {
                case Replace(set) =>
                  complete(permissions.replace(set, rev).runToFuture)
                case _ => reject(validationRejection("Only @type 'Replace' is permitted when using 'put'."))
              }
            },
            delete {
              parameter("rev".as[Long]) { rev =>
                complete(permissions.delete(rev).runToFuture)
              }
            },
            (patch & parameter("rev" ? 0L)) { rev =>
              entity(as[PatchPermissions]) {
                case Append(set) =>
                  complete(permissions.append(set, rev).runToFuture)
                case Subtract(set) =>
                  complete(permissions.subtract(set, rev).runToFuture)
                case _ =>
                  reject(validationRejection("Only @type 'Append' or 'Subtract' is permitted when using 'patch'."))
              }
            }
          )
        }
      }
    }
}

object PermissionsRoutes {

  private[routes] sealed trait PatchPermissions extends Product with Serializable

  private[routes] object PatchPermissions {

    final case class Append(permissions: Set[Permission])   extends PatchPermissions
    final case class Subtract(permissions: Set[Permission]) extends PatchPermissions
    final case class Replace(permissions: Set[Permission])  extends PatchPermissions

    implicit val patchPermissionsDecoder: Decoder[PatchPermissions] =
      Decoder.instance { hc =>
        for {
          permissions <- hc.get[Set[Permission]]("permissions")
          tpe = hc.get[String]("@type").getOrElse("Replace")
          patch <- tpe match {
            case "Replace"  => Right(Replace(permissions))
            case "Append"   => Right(Append(permissions))
            case "Subtract" => Right(Subtract(permissions))
            case _          => Left(DecodingFailure("@type field must have Append or Subtract value", hc.history))
          }
        } yield patch
      }
  }

} 
Example 20
Source File: DecodeNullSpec.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.cassandra.streaming

import io.getquill._
import monix.reactive.Observable

class DecodeNullSpec extends Spec {

  "no default values when reading null" - {
    "stream" in {
      import monix.execution.Scheduler.Implicits.global
      import testStreamDB._
      val writeEntities = quote(querySchema[DecodeNullTestWriteEntity]("DecodeNullTestEntity"))

      val result =
        for {
          _ <- testStreamDB.run(writeEntities.delete)
          _ <- Observable.fromTask(testStreamDB.run(writeEntities.insert(lift(insertValue))).countL)
          result <- testStreamDB.run(query[DecodeNullTestEntity])
        } yield {
          result
        }
      intercept[IllegalStateException] {
        await {
          result.headL.runToFuture
        }
      }
    }
  }

  case class DecodeNullTestEntity(id: Int, value: Int)

  case class DecodeNullTestWriteEntity(id: Int, value: Option[Int])

  val insertValue = DecodeNullTestWriteEntity(0, None)
} 
Example 21
Source File: QueryResultTypeCassandraStreamSpec.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.cassandra.streaming

import io.getquill.context.cassandra.QueryResultTypeCassandraSpec
import monix.execution.Scheduler.Implicits.global
import monix.reactive.Observable

class QueryResultTypeCassandraStreamSpec extends QueryResultTypeCassandraSpec {

  val context = testStreamDB

  import context._

  def result[T](t: Observable[T]) =
    await(t.foldLeftL(List.empty[T])(_ :+ _).runToFuture)

  override def beforeAll = {
    result(context.run(deleteAll))
    result(context.run(liftQuery(entries).foreach(e => insert(e))))
    ()
  }

  "query" in {
    result(context.run(selectAll)) mustEqual entries
  }

  "querySingle" - {
    "size" in {
      result(context.run(entitySize)) mustEqual List(3)
    }
    "parametrized size" in {
      result(context.run(parametrizedSize(lift(10000)))) mustEqual List(0)
    }
  }
} 
Example 22
Source File: EncodingSpec.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.cassandra.streaming

import io.getquill.context.cassandra.EncodingSpecHelper
import monix.reactive.Observable
import io.getquill.Query

class EncodingSpec extends EncodingSpecHelper {
  "encodes and decodes types" - {
    "stream" in {
      import monix.execution.Scheduler.Implicits.global
      import testStreamDB._
      val result =
        for {
          _ <- testStreamDB.run(query[EncodingTestEntity].delete)
          inserts = Observable(insertValues: _*)
          _ <- Observable.fromTask(testStreamDB.run(liftQuery(insertValues).foreach(e => query[EncodingTestEntity].insert(e))).countL)
          result <- testStreamDB.run(query[EncodingTestEntity])
        } yield {
          result
        }
      val f = result.foldLeftL(List.empty[EncodingTestEntity])(_ :+ _).runToFuture
      verify(await(f))
    }
  }

  "encodes collections" - {
    "stream" in {
      import monix.execution.Scheduler.Implicits.global
      import testStreamDB._
      val q = quote {
        (list: Query[Int]) =>
          query[EncodingTestEntity].filter(t => list.contains(t.id))
      }
      val result =
        for {
          _ <- testStreamDB.run(query[EncodingTestEntity].delete)
          inserts = Observable(insertValues: _*)
          _ <- Observable.fromTask(testStreamDB.run(liftQuery(insertValues).foreach(e => query[EncodingTestEntity].insert(e))).countL)
          result <- testStreamDB.run(q(liftQuery(insertValues.map(_.id))))
        } yield {
          result
        }
      val f = result.foldLeftL(List.empty[EncodingTestEntity])(_ :+ _).runToFuture
      verify(await(f))
    }
  }
} 
Example 23
Source File: MonixTaskMain.scala    From advanced-scala-code   with Apache License 2.0 5 votes vote down vote up
object MonixTaskMain {

  def main(args: Array[String]): Unit = {
    import org.asynchttpclient.DefaultAsyncHttpClient
    val asyncHttpClient = new DefaultAsyncHttpClient()
    arm.ArmUtils.using(asyncHttpClient) {
      import java.nio.charset.Charset
      import monix.eval.Task
      val result6T = Task.create[String]( (_, callback) => {
        val lf = asyncHttpClient.prepareGet("https://httpbin.org/get").execute()
        val javaFuture = lf.toCompletableFuture

        javaFuture.whenComplete { (response, exc) => {
          if (exc == null) {
            callback.onSuccess(response.getResponseBody(Charset.forName("UTF-8")))
          } else callback.onError(exc)
        }}

        import monix.execution.Cancelable
        Cancelable.apply { () =>
          javaFuture.cancel(true)
        }
      })

      import monix.execution.Scheduler.Implicits.global
      val resultCF = result6T.runToFuture

      import scala.concurrent.Await
      import scala.concurrent.duration._
      val result = Await.result(resultCF, 5.seconds)
      println(result)
    }
  }
} 
Example 24
Source File: MonixKafkaTopicRegexTest.scala    From monix-kafka   with Apache License 2.0 5 votes vote down vote up
package monix.kafka

import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import monix.kafka.config.AutoOffsetReset
import monix.reactive.Observable
import org.apache.kafka.clients.producer.ProducerRecord
import org.scalatest.FunSuite

import scala.collection.JavaConverters._
import scala.concurrent.Await
import scala.concurrent.duration._

class MonixKafkaTopicRegexTest extends FunSuite with KafkaTestKit {
  val topicsRegex = "monix-kafka-tests-.*".r
  val topicMatchingRegex = "monix-kafka-tests-anything"

  val producerCfg = KafkaProducerConfig.default.copy(
    bootstrapServers = List("127.0.0.1:6001"),
    clientId = "monix-kafka-1-0-producer-test"
  )

  val consumerCfg = KafkaConsumerConfig.default.copy(
    bootstrapServers = List("127.0.0.1:6001"),
    groupId = "kafka-tests",
    clientId = "monix-kafka-1-0-consumer-test",
    autoOffsetReset = AutoOffsetReset.Earliest
  )

  test("publish one message when subscribed to topics regex") {
    withRunningKafka {

      val producer = KafkaProducer[String, String](producerCfg, io)
      val consumerTask = KafkaConsumerObservable.createConsumer[String, String](consumerCfg, topicsRegex).executeOn(io)
      val consumer = Await.result(consumerTask.runToFuture, 60.seconds)

      try {
        // Publishing one message
        val send = producer.send(topicMatchingRegex, "my-message")
        Await.result(send.runToFuture, 30.seconds)

        val records = consumer.poll(10.seconds.toMillis).asScala.map(_.value()).toList
        assert(records === List("my-message"))
      } finally {
        Await.result(producer.close().runToFuture, Duration.Inf)
        consumer.close()
      }
    }
  }

  test("listen for one message when subscribed to topics regex") {

    withRunningKafka {
      val producer = KafkaProducer[String, String](producerCfg, io)
      val consumer = KafkaConsumerObservable[String, String](consumerCfg, topicsRegex).executeOn(io)
      try {
        // Publishing one message
        val send = producer.send(topicMatchingRegex, "test-message")
        Await.result(send.runToFuture, 30.seconds)

        val first = consumer.take(1).map(_.value()).firstL
        val result = Await.result(first.runToFuture, 30.seconds)
        assert(result === "test-message")
      } finally {
        Await.result(producer.close().runToFuture, Duration.Inf)
      }
    }
  }

  test("full producer/consumer test when subscribed to topics regex") {
    withRunningKafka {
      val count = 10000

      val producer = KafkaProducerSink[String, String](producerCfg, io)
      val consumer = KafkaConsumerObservable[String, String](consumerCfg, topicsRegex).executeOn(io).take(count)

      val pushT = Observable
        .range(0, count)
        .map(msg => new ProducerRecord(topicMatchingRegex, "obs", msg.toString))
        .bufferIntrospective(1024)
        .consumeWith(producer)

      val listT = consumer
        .map(_.value())
        .toListL

      val (result, _) = Await.result(Task.parZip2(listT, pushT).runToFuture, 60.seconds)
      assert(result.map(_.toInt).sum === (0 until count).sum)
    }
  }
} 
Example 25
Source File: MergeByCommitCallbackTest.scala    From monix-kafka   with Apache License 2.0 5 votes vote down vote up
package monix.kafka

import monix.eval.Task
import monix.kafka.config.AutoOffsetReset
import monix.reactive.Observable
import org.apache.kafka.clients.producer.ProducerRecord
import org.scalatest.{FunSuite, Matchers}

import scala.concurrent.duration._
import scala.concurrent.Await
import monix.execution.Scheduler.Implicits.global
import org.apache.kafka.clients.consumer.OffsetCommitCallback
import org.apache.kafka.common.TopicPartition
import org.scalacheck.Gen
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks

class MergeByCommitCallbackTest extends FunSuite with KafkaTestKit with ScalaCheckDrivenPropertyChecks with Matchers {

  val commitCallbacks: List[Commit] = List.fill(4)(new Commit {
    override def commitBatchSync(batch: Map[TopicPartition, Long]): Task[Unit] = Task.unit

    override def commitBatchAsync(batch: Map[TopicPartition, Long], callback: OffsetCommitCallback): Task[Unit] =
      Task.unit
  })

  val committableOffsetsGen: Gen[CommittableOffset] = for {
    partition <- Gen.posNum[Int]
    offset <- Gen.posNum[Long]
    commit <- Gen.oneOf(commitCallbacks)
  } yield CommittableOffset(new TopicPartition("topic", partition), offset, commit)

  test("merge by commit callback works") {
    forAll(Gen.nonEmptyListOf(committableOffsetsGen)) { offsets =>
      val partitions = offsets.map(_.topicPartition)
      val received: List[CommittableOffsetBatch] = CommittableOffsetBatch.mergeByCommitCallback(offsets)

      received.foreach { batch => partitions should contain allElementsOf batch.offsets.keys }

      received.size should be <= 4
    }
  }

  test("merge by commit callback for multiple consumers") {
    withRunningKafka {
      val count = 10000
      val topicName = "monix-kafka-merge-by-commit"

      val producerCfg = KafkaProducerConfig.default.copy(
        bootstrapServers = List("127.0.0.1:6001"),
        clientId = "monix-kafka-1-0-producer-test"
      )

      val producer = KafkaProducerSink[String, String](producerCfg, io)

      val pushT = Observable
        .range(0, count)
        .map(msg => new ProducerRecord(topicName, "obs", msg.toString))
        .bufferIntrospective(1024)
        .consumeWith(producer)

      val listT = Observable
        .range(0, 4)
        .mergeMap(i => createConsumer(i.toInt, topicName).take(500))
        .bufferTumbling(2000)
        .map(CommittableOffsetBatch.mergeByCommitCallback)
        .map { offsetBatches => assert(offsetBatches.length == 4) }
        .completedL

      Await.result(Task.parZip2(listT, pushT).runToFuture, 60.seconds)
    }
  }

  private def createConsumer(i: Int, topicName: String): Observable[CommittableOffset] = {
    val cfg = KafkaConsumerConfig.default.copy(
      bootstrapServers = List("127.0.0.1:6001"),
      groupId = s"kafka-tests-$i",
      autoOffsetReset = AutoOffsetReset.Earliest
    )

    KafkaConsumerObservable
      .manualCommit[String, String](cfg, List(topicName))
      .executeOn(io)
      .map(_.committableOffset)
  }
} 
Example 26
Source File: MonixKafkaTopicRegexTest.scala    From monix-kafka   with Apache License 2.0 5 votes vote down vote up
package monix.kafka

import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import monix.kafka.config.AutoOffsetReset
import monix.reactive.Observable
import org.apache.kafka.clients.producer.ProducerRecord
import org.scalatest.FunSuite

import scala.collection.JavaConverters._
import scala.concurrent.Await
import scala.concurrent.duration._

class MonixKafkaTopicRegexTest extends FunSuite with KafkaTestKit {
  val topicsRegex = "monix-kafka-tests-.*".r
  val topicMatchingRegex = "monix-kafka-tests-anything"

  val producerCfg = KafkaProducerConfig.default.copy(
    bootstrapServers = List("127.0.0.1:6001"),
    clientId = "monix-kafka-1-0-producer-test"
  )

  val consumerCfg = KafkaConsumerConfig.default.copy(
    bootstrapServers = List("127.0.0.1:6001"),
    groupId = "kafka-tests",
    clientId = "monix-kafka-1-0-consumer-test",
    autoOffsetReset = AutoOffsetReset.Earliest
  )

  test("publish one message when subscribed to topics regex") {

    withRunningKafka {
      val producer = KafkaProducer[String, String](producerCfg, io)
      val consumerTask = KafkaConsumerObservable.createConsumer[String, String](consumerCfg, topicsRegex).executeOn(io)
      val consumer = Await.result(consumerTask.runToFuture, 60.seconds)

      try {
        // Publishing one message
        val send = producer.send(topicMatchingRegex, "my-message")
        Await.result(send.runToFuture, 30.seconds)

        val records = consumer.poll(10.seconds.toMillis).asScala.map(_.value()).toList
        assert(records === List("my-message"))
      } finally {
        Await.result(producer.close().runToFuture, Duration.Inf)
        consumer.close()
      }
    }
  }

  test("listen for one message when subscribed to topics regex") {
    withRunningKafka {
      val producer = KafkaProducer[String, String](producerCfg, io)
      val consumer = KafkaConsumerObservable[String, String](consumerCfg, topicsRegex).executeOn(io)
      try {
        // Publishing one message
        val send = producer.send(topicMatchingRegex, "test-message")
        Await.result(send.runToFuture, 30.seconds)

        val first = consumer.take(1).map(_.value()).firstL
        val result = Await.result(first.runToFuture, 30.seconds)
        assert(result === "test-message")
      } finally {
        Await.result(producer.close().runToFuture, Duration.Inf)
      }
    }
  }

  test("full producer/consumer test when subscribed to topics regex") {
    withRunningKafka {
      val count = 10000

      val producer = KafkaProducerSink[String, String](producerCfg, io)
      val consumer = KafkaConsumerObservable[String, String](consumerCfg, topicsRegex).executeOn(io).take(count)

      val pushT = Observable
        .range(0, count)
        .map(msg => new ProducerRecord(topicMatchingRegex, "obs", msg.toString))
        .bufferIntrospective(1024)
        .consumeWith(producer)

      val listT = consumer
        .map(_.value())
        .toListL

      val (result, _) = Await.result(Task.parZip2(listT, pushT).runToFuture, 60.seconds)
      assert(result.map(_.toInt).sum === (0 until count).sum)
    }
  }
} 
Example 27
Source File: MergeByCommitCallbackTest.scala    From monix-kafka   with Apache License 2.0 5 votes vote down vote up
package monix.kafka

import monix.eval.Task
import monix.kafka.config.AutoOffsetReset
import monix.reactive.Observable
import org.apache.kafka.clients.producer.ProducerRecord
import org.scalatest.{FunSuite, Matchers}

import scala.concurrent.duration._
import scala.concurrent.Await
import monix.execution.Scheduler.Implicits.global
import org.apache.kafka.clients.consumer.OffsetCommitCallback
import org.apache.kafka.common.TopicPartition
import org.scalacheck.Gen
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks

class MergeByCommitCallbackTest extends FunSuite with KafkaTestKit with ScalaCheckDrivenPropertyChecks with Matchers {

  val commitCallbacks: List[Commit] = List.fill(4)(new Commit {
    override def commitBatchSync(batch: Map[TopicPartition, Long]): Task[Unit] = Task.unit

    override def commitBatchAsync(batch: Map[TopicPartition, Long], callback: OffsetCommitCallback): Task[Unit] =
      Task.unit
  })

  val committableOffsetsGen: Gen[CommittableOffset] = for {
    partition <- Gen.posNum[Int]
    offset <- Gen.posNum[Long]
    commit <- Gen.oneOf(commitCallbacks)
  } yield CommittableOffset(new TopicPartition("topic", partition), offset, commit)

  test("merge by commit callback works") {
    forAll(Gen.nonEmptyListOf(committableOffsetsGen)) { offsets =>
      val partitions = offsets.map(_.topicPartition)
      val received: List[CommittableOffsetBatch] = CommittableOffsetBatch.mergeByCommitCallback(offsets)

      received.foreach { batch => partitions should contain allElementsOf batch.offsets.keys }

      received.size should be <= 4
    }
  }

  test("merge by commit callback for multiple consumers") {
    withRunningKafka {
      val count = 10000
      val topicName = "monix-kafka-merge-by-commit"

      val producerCfg = KafkaProducerConfig.default.copy(
        bootstrapServers = List("127.0.0.1:6001"),
        clientId = "monix-kafka-1-0-producer-test"
      )

      val producer = KafkaProducerSink[String, String](producerCfg, io)

      val pushT = Observable
        .range(0, count)
        .map(msg => new ProducerRecord(topicName, "obs", msg.toString))
        .bufferIntrospective(1024)
        .consumeWith(producer)

      val listT = Observable
        .range(0, 4)
        .mergeMap(i => createConsumer(i.toInt, topicName).take(500))
        .bufferTumbling(2000)
        .map(CommittableOffsetBatch.mergeByCommitCallback)
        .map { offsetBatches => assert(offsetBatches.length == 4) }
        .completedL

      Await.result(Task.parZip2(listT, pushT).runToFuture, 60.seconds)
    }
  }

  private def createConsumer(i: Int, topicName: String): Observable[CommittableOffset] = {
    val cfg = KafkaConsumerConfig.default.copy(
      bootstrapServers = List("127.0.0.1:6001"),
      groupId = s"kafka-tests-$i",
      autoOffsetReset = AutoOffsetReset.Earliest
    )

    KafkaConsumerObservable
      .manualCommit[String, String](cfg, List(topicName))
      .executeOn(io)
      .map(_.committableOffset)
  }
} 
Example 28
Source File: MonixKafkaTopicRegexTest.scala    From monix-kafka   with Apache License 2.0 5 votes vote down vote up
package monix.kafka

import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import monix.kafka.config.AutoOffsetReset
import monix.reactive.Observable
import org.apache.kafka.clients.producer.ProducerRecord
import org.scalatest.FunSuite

import scala.collection.JavaConverters._
import scala.concurrent.Await
import scala.concurrent.duration._

class MonixKafkaTopicRegexTest extends FunSuite with KafkaTestKit {
  val topicsRegex = "monix-kafka-tests-.*".r
  val topicMatchingRegex = "monix-kafka-tests-anything"

  val producerCfg = KafkaProducerConfig.default.copy(
    bootstrapServers = List("127.0.0.1:6001"),
    clientId = "monix-kafka-1-0-producer-test"
  )

  val consumerCfg = KafkaConsumerConfig.default.copy(
    bootstrapServers = List("127.0.0.1:6001"),
    groupId = "kafka-tests",
    clientId = "monix-kafka-1-0-consumer-test",
    autoOffsetReset = AutoOffsetReset.Earliest
  )

  test("publish one message when subscribed to topics regex") {

    withRunningKafka {
      val producer = KafkaProducer[String, String](producerCfg, io)
      val consumerTask = KafkaConsumerObservable.createConsumer[String, String](consumerCfg, topicsRegex).executeOn(io)
      val consumer = Await.result(consumerTask.runToFuture, 60.seconds)

      try {
        // Publishing one message
        val send = producer.send(topicMatchingRegex, "my-message")
        Await.result(send.runToFuture, 30.seconds)

        val records = consumer.poll(10.seconds.toMillis).asScala.map(_.value()).toList
        assert(records === List("my-message"))
      } finally {
        Await.result(producer.close().runToFuture, Duration.Inf)
        consumer.close()
      }
    }
  }

  test("listen for one message when subscribed to topics regex") {

    withRunningKafka {
      val producer = KafkaProducer[String, String](producerCfg, io)
      val consumer = KafkaConsumerObservable[String, String](consumerCfg, topicsRegex).executeOn(io)
      try {
        // Publishing one message
        val send = producer.send(topicMatchingRegex, "test-message")
        Await.result(send.runToFuture, 30.seconds)

        val first = consumer.take(1).map(_.value()).firstL
        val result = Await.result(first.runToFuture, 30.seconds)
        assert(result === "test-message")
      } finally {
        Await.result(producer.close().runToFuture, Duration.Inf)
      }
    }
  }

  test("full producer/consumer test when subscribed to topics regex") {
    withRunningKafka {
      val count = 10000

      val producer = KafkaProducerSink[String, String](producerCfg, io)
      val consumer = KafkaConsumerObservable[String, String](consumerCfg, topicsRegex).executeOn(io).take(count)

      val pushT = Observable
        .range(0, count)
        .map(msg => new ProducerRecord(topicMatchingRegex, "obs", msg.toString))
        .bufferIntrospective(1024)
        .consumeWith(producer)

      val listT = consumer
        .map(_.value())
        .toListL

      val (result, _) = Await.result(Task.parZip2(listT, pushT).runToFuture, 60.seconds)
      assert(result.map(_.toInt).sum === (0 until count).sum)
    }
  }
} 
Example 29
Source File: MergeByCommitCallbackTest.scala    From monix-kafka   with Apache License 2.0 5 votes vote down vote up
package monix.kafka

import monix.eval.Task
import monix.kafka.config.AutoOffsetReset
import monix.reactive.Observable
import org.apache.kafka.clients.producer.ProducerRecord
import org.scalatest.{FunSuite, Matchers}

import scala.concurrent.duration._
import scala.concurrent.Await
import monix.execution.Scheduler.Implicits.global
import org.apache.kafka.clients.consumer.OffsetCommitCallback
import org.apache.kafka.common.TopicPartition
import org.scalacheck.Gen
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks

class MergeByCommitCallbackTest extends FunSuite with KafkaTestKit with ScalaCheckDrivenPropertyChecks with Matchers {

  val commitCallbacks: List[Commit] = List.fill(4)(new Commit {
    override def commitBatchSync(batch: Map[TopicPartition, Long]): Task[Unit] = Task.unit

    override def commitBatchAsync(batch: Map[TopicPartition, Long], callback: OffsetCommitCallback): Task[Unit] =
      Task.unit
  })

  val committableOffsetsGen: Gen[CommittableOffset] = for {
    partition <- Gen.posNum[Int]
    offset <- Gen.posNum[Long]
    commit <- Gen.oneOf(commitCallbacks)
  } yield CommittableOffset(new TopicPartition("topic", partition), offset, commit)

  test("merge by commit callback works") {
    forAll(Gen.nonEmptyListOf(committableOffsetsGen)) { offsets =>
      val partitions = offsets.map(_.topicPartition)
      val received: List[CommittableOffsetBatch] = CommittableOffsetBatch.mergeByCommitCallback(offsets)

      received.foreach { batch => partitions should contain allElementsOf batch.offsets.keys }

      received.size should be <= 4
    }
  }

  test("merge by commit callback for multiple consumers") {
    withRunningKafka {
      val count = 10000
      val topicName = "monix-kafka-merge-by-commit"

      val producerCfg = KafkaProducerConfig.default.copy(
        bootstrapServers = List("127.0.0.1:6001"),
        clientId = "monix-kafka-1-0-producer-test"
      )

      val producer = KafkaProducerSink[String, String](producerCfg, io)

      val pushT = Observable
        .range(0, count)
        .map(msg => new ProducerRecord(topicName, "obs", msg.toString))
        .bufferIntrospective(1024)
        .consumeWith(producer)

      val listT = Observable
        .range(0, 4)
        .mergeMap(i => createConsumer(i.toInt, topicName).take(500))
        .bufferTumbling(2000)
        .map(CommittableOffsetBatch.mergeByCommitCallback)
        .map { offsetBatches => assert(offsetBatches.length == 4) }
        .completedL

      Await.result(Task.parZip2(listT, pushT).runToFuture, 60.seconds)
    }
  }

  private def createConsumer(i: Int, topicName: String): Observable[CommittableOffset] = {
    val cfg = KafkaConsumerConfig.default.copy(
      bootstrapServers = List("127.0.0.1:6001"),
      groupId = s"kafka-tests-$i",
      autoOffsetReset = AutoOffsetReset.Earliest
    )

    KafkaConsumerObservable
      .manualCommit[String, String](cfg, List(topicName))
      .executeOn(io)
      .map(_.committableOffset)
  }
} 
Example 30
Source File: Hello_PIDE.scala    From libisabelle   with Apache License 2.0 5 votes vote down vote up
package info.hupel.isabelle.examples.scala

import scala.concurrent._
import scala.concurrent.duration._

import monix.execution.Scheduler.Implicits.global

import info.hupel.isabelle._
import info.hupel.isabelle.api._
import info.hupel.isabelle.setup._

object Hello_PIDE extends App {

  val setup = Setup.default(Version.Stable("2017"), false).right.get // yolo
  val resources = Resources.dumpIsabelleResources().right.get // yolo
  val config = Configuration.simple("Protocol")

  val transaction =
    for {
      env <- setup.makeEnvironment(resources, Nil)
      sys <- System.create(env, config)
      response <- sys.invoke(Operation.Hello)("world")
      _ = println(response.unsafeGet)
      () <- sys.dispose
    } yield ()

  Await.result(transaction, Duration.Inf)

} 
Example 31
Source File: AdserverApp.scala    From scala-openrtb   with Apache License 2.0 5 votes vote down vote up
package com.powerspace.openrtb.examples.rtb.http4s.adserver

import cats.effect.Resource
import com.google.openrtb.{BidRequest, BidResponse}
import com.powerspace.openrtb.examples.rtb.http4s.common.ExampleSerdeModule
import io.circe.{Encoder, Json}
import monix.eval.Task
import org.http4s.client.Client
import org.http4s.client.blaze.BlazeClientBuilder

import scala.concurrent.duration.Duration

object AdserverApp extends App {

  import monix.execution.Scheduler.Implicits.global

  val httpClient: Resource[Task, Client[Task]] = buildHttpClient()
  val potentialBidResponse = httpBid(httpClient)
  private val bidRequest = Adserver.buildBidRequest()

  potentialBidResponse
    .map(bidResponse => {
      bidResponse.foreach(br => println(buildAuctionString(br)))
    })
    .runSyncUnsafe(Duration.Inf)

  private def buildHttpClient(): Resource[Task, Client[Task]] = {
    BlazeClientBuilder[Task](global).resource
  }

  private def httpBid(httpClient: Resource[Task, Client[Task]]) =
    httpClient.use(AdserverHttpClientBuilder.bid(_, bidRequest))

  private def buildAuctionString(bidResponse: BidResponse) = {
    case class Auction(bidRequest: BidRequest, bidResponse: BidResponse)

    val auctionEncoder = new Encoder[Auction] {
      override def apply(auction: Auction): Json = Json.obj(
        ("request", ExampleSerdeModule.bidRequestEncoder.apply(auction.bidRequest)),
        ("response", ExampleSerdeModule.bidResponseEncoder.apply(auction.bidResponse))
      )
    }

    auctionEncoder(Auction(bidRequest, bidResponse)).toString()
  }
} 
Example 32
Source File: IterateeMain.scala    From advanced-scala-code   with Apache License 2.0 5 votes vote down vote up
package iteratee

import scala.util.{Failure, Success}

object IterateeMain {
  def fileExample(): Unit = {
    import io.iteratee.monix.task._
    import java.io.File

    val wordsE = readLines(new File("license.txt")).flatMap { line =>
      enumIndexedSeq(line.split("\\W"))
    }
    val noEmptyLinesEE = filter[String](str => str.trim.length > 0)
    val toLowerEE = map[String, String](_.toLowerCase)
    val countWordsI = fold[String, Map[String, Int]](Map.empty) { (acc, next) =>
      acc.get(next) match {
        case None => acc + (next -> 1)
        case Some(num) => acc + (next -> (1 + num))
      }
    }
    val dataT = wordsE.through(noEmptyLinesEE).
      through(toLowerEE).into(countWordsI).map { dataMap =>
      dataMap.toList.sortWith( _._2 > _._2).take(5).map(_._1)
    }
    import monix.execution.Scheduler.Implicits.global
    dataT.runOnComplete {
      case Success(data) => println(data)
      case Failure(th) => th.printStackTrace()
    }

    
  }


  def main(args: Array[String]) {
    import io.iteratee.modules.id._

    // Just one Int
    val singleNumE = enumOne(42)
    val singleNumI = takeI[Int](1)
    val singleNumResult = singleNumE.into(singleNumI)
    println(singleNumResult)

    // Incrementing one Int
    val incrementNumEE = map[Int, Int](_ + 1)
    val incrementedNumResult = singleNumE.through(incrementNumEE).into(singleNumI)
    println(incrementedNumResult)

    // First 10 even numbers
    val naturalsE = iterate(1)(_ + 1)
    val moreThan100EE = filter[Int](_ >= 100)
    val evenFilterEE = filter[Int](_ % 2 == 0)
    val first10I = takeI[Int](10)
    println(naturalsE.through(moreThan100EE).through(evenFilterEE).into(first10I))

    {
      import io.iteratee.modules.eval._
      // Summing N first numbers
      val naturalsE = iterate(1)(_ + 1)
      val limit1kEE = take[Int](30000)
      val sumI = fold[Int, Int](0) { (acc, next) => acc + next }
      println(naturalsE.through(limit1kEE).into(sumI).value)
    }

    fileExample()

  }
} 
Example 33
Source File: ObservableMain.scala    From advanced-scala-code   with Apache License 2.0 5 votes vote down vote up
object ObservableMain {

  def main(args: Array[String]): Unit = {

    import monix.reactive.Observable
    val linesO = Observable.defer {
      import java.io.{BufferedReader, FileReader}
      val br = new BufferedReader(new FileReader("license.txt"))
      Observable.fromLinesReaderUnsafe(br)
    }

    printStatistics(linesO)
    printStatistics(linesO)

    def printStatistics(linesO: Observable[String]): Unit = {
      val wordsO = linesO.flatMap { line =>
        val arr = line.split("\\W").map(_.toLowerCase)
          .map(_.trim).filter(!_.isEmpty)
        Observable.fromIterable(arr.toIterable)
      }

      val rawResultO = wordsO.foldLeft(Map.empty[String, Int]) { (acc, next) =>
        acc.get(next) match {
          case None => acc + (next -> 1)
          case Some(num) => acc + (next -> (1 + num))
        }
      }

      import monix.reactive.Consumer
      val finalResultT = rawResultO.map { map =>
        map.toList.sortWith( _._2 > _._2).take(5).map(_._1)
      }.consumeWith(Consumer.head)

      import monix.execution.Scheduler.Implicits.global
      val resultCF = finalResultT.runToFuture

      import scala.concurrent.Await
      import scala.concurrent.duration._
      val result = Await.result(resultCF, 30.seconds)
      println(result)
      // List(the, or, of, and, to)
    }

  }

  import cats.kernel.Monoid
  import monix.reactive.Observable
  def alternativeMonoid(wordsO: Observable[String]): Unit = {
    import cats.instances.int.catsKernelStdGroupForInt
    import cats.instances.map.catsKernelStdMonoidForMap

    val listT = wordsO.map(word => Map(word -> 1)).toListL
    val totals = listT.map { data =>
      Monoid[Map[String, Int]].combineAll(data)
    }
    // totalsT: Task[Map[String, Int]]

    val finalResultT = totals.map { data =>
      data.toList.sortWith( _._2 > _._2).take(5).map(_._1)
    }

    import monix.execution.Scheduler.Implicits.global
    import scala.concurrent.Await
    import scala.concurrent.duration._
    val result = Await.result(finalResultT.runToFuture, 30.seconds)
    println(result)
  }
} 
Example 34
Source File: DecodeNullSpec.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.cassandra.monix

import io.getquill._

class DecodeNullSpec extends Spec {

  "no default values when reading null" - {
    "stream" in {
      import monix.execution.Scheduler.Implicits.global
      import testMonixDB._
      val writeEntities = quote(querySchema[DecodeNullTestWriteEntity]("DecodeNullTestEntity"))

      val result =
        for {
          _ <- testMonixDB.run(writeEntities.delete)
          _ <- testMonixDB.run(writeEntities.insert(lift(insertValue)))
          result <- testMonixDB.run(query[DecodeNullTestEntity])
        } yield {
          result
        }
      intercept[IllegalStateException] {
        await {
          result.runToFuture
        }
      }
    }
  }

  case class DecodeNullTestEntity(id: Int, value: Int)

  case class DecodeNullTestWriteEntity(id: Int, value: Option[Int])

  val insertValue = DecodeNullTestWriteEntity(0, None)
} 
Example 35
Source File: EventsHelpers.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.events

import com.wavesplatform.block.{Block, MicroBlock}
import com.wavesplatform.common.utils.EitherExt2
import com.wavesplatform.mining.MiningConstraint
import com.wavesplatform.state.diffs.BlockDiffer
import com.wavesplatform.state.diffs.BlockDiffer.DetailedDiff
import monix.execution.Scheduler.Implicits.global
import monix.reactive.subjects.ReplaySubject
import org.scalatest.Suite

import scala.concurrent.duration._

private[events] trait EventsHelpers extends WithBlockchain { _: Suite =>
  protected def produceEvent(useTrigger: BlockchainUpdateTriggers => Unit): BlockchainUpdated = {
    val evts = ReplaySubject[BlockchainUpdated]()
    val t    = new BlockchainUpdateTriggersImpl(evts)
    useTrigger(t)
    evts.onComplete()
    evts.toListL.runSyncUnsafe(500.milliseconds).head
  }

  protected def detailedDiffFromBlock(b: Block): DetailedDiff =
    BlockDiffer.fromBlock(blockchain, None, b, MiningConstraint.Unlimited, verify = false).explicitGet().detailedDiff

  protected def appendBlock(b: Block, minerReward: Option[Long] = None): BlockAppended =
    produceEvent(_.onProcessBlock(b, detailedDiffFromBlock(b), minerReward, blockchain)) match {
      case ba: BlockAppended => ba
      case _                 => fail()
    }

  protected def appendMicroBlock(mb: MicroBlock): MicroBlockAppended = {
    val dd = BlockDiffer.fromMicroBlock(blockchain, Some(0), mb, 1, MiningConstraint.Unlimited, verify = false).explicitGet().detailedDiff
    produceEvent(_.onProcessMicroBlock(mb, dd, blockchain, mb.totalResBlockSig)) match {
      case mba: MicroBlockAppended => mba
      case _                       => fail()
    }
  }
} 
Example 36
Source File: CommonAccountsApiSpec.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.http

import com.wavesplatform.api.common.CommonAccountsApi
import com.wavesplatform.common.utils._
import com.wavesplatform.db.WithDomain
import com.wavesplatform.features.BlockchainFeatures
import com.wavesplatform.settings.TestFunctionalitySettings
import com.wavesplatform.state.{DataEntry, Diff, StringDataEntry, diffs}
import com.wavesplatform.transaction.{DataTransaction, GenesisTransaction}
import com.wavesplatform.{BlocksTransactionsHelpers, TransactionGen, history}
import monix.execution.Scheduler.Implicits.global
import org.scalatest.{FreeSpec, Matchers}
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks

class CommonAccountsApiSpec
    extends FreeSpec
    with Matchers
    with WithDomain
    with TransactionGen
    with BlocksTransactionsHelpers
    with ScalaCheckDrivenPropertyChecks {
  "Data stream" - {
    "handles non-existent address" in {
      val entry1 = StringDataEntry("test", "test")
      val entry2 = StringDataEntry("test1", "test")
      val entry3 = StringDataEntry("test2", "test")

      val preconditions = for {
        acc <- accountGen
        ts = System.currentTimeMillis()
        fee <- smallFeeGen
        genesis        = GenesisTransaction.create(acc.toAddress, diffs.ENOUGH_AMT, ts).explicitGet()
        data1          = DataTransaction.selfSigned(1.toByte, acc, Seq(entry1), fee, ts).explicitGet()
        data2          = DataTransaction.selfSigned(1.toByte, acc, Seq(entry2), fee, ts).explicitGet()
        data3          = DataTransaction.selfSigned(1.toByte, acc, Seq(entry3), fee, ts).explicitGet()
        (block1, mbs1) = UnsafeBlocks.unsafeChainBaseAndMicro(history.randomSig, Seq(genesis), Seq(Seq(data1)), acc, 3, ts)
        (block2, mbs2) = UnsafeBlocks.unsafeChainBaseAndMicro(mbs1.last.totalResBlockSig, Seq(data2), Seq(Seq(data3)), acc, 3, ts)
      } yield (acc, block1, mbs1.head, block2, mbs2.head)

      forAll(preconditions) {
        case (acc, block1, mb1, block2, mb2) =>
          withDomain(domainSettingsWithFS(TestFunctionalitySettings.withFeatures(BlockchainFeatures.NG, BlockchainFeatures.DataTransaction))) { d =>
            val commonAccountsApi             = CommonAccountsApi(d.blockchainUpdater.bestLiquidDiff.getOrElse(Diff.empty), d.db, d.blockchainUpdater)
            def dataList(): Set[DataEntry[_]] = commonAccountsApi.dataStream(acc.toAddress, None).toListL.runSyncUnsafe().toSet

            d.appendBlock(block1)
            dataList() shouldBe empty
            d.appendMicroBlock(mb1)
            dataList() shouldBe Set(entry1)
            d.appendBlock(block2)
            dataList() shouldBe Set(entry1, entry2)
            d.appendMicroBlock(mb2)
            dataList() shouldBe Set(entry1, entry2, entry3)
          }
      }
    }
  }
} 
Example 37
Source File: StorageCacheSpec.scala    From nexus-kg   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.cache

import java.nio.file.Paths
import java.time.Clock

import akka.testkit._
import ch.epfl.bluebrain.nexus.commons.test.ActorSystemFixture
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.config.AppConfig._
import ch.epfl.bluebrain.nexus.kg.config.{AppConfig, Settings}
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.{ProjectRef}
import ch.epfl.bluebrain.nexus.kg.storage.Storage.DiskStorage
import ch.epfl.bluebrain.nexus.rdf.implicits._
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.matchers.should.Matchers
import org.scalatest.{Inspectors, TryValues}

import scala.concurrent.duration._

//noinspection NameBooleanParameters
class StorageCacheSpec
    extends ActorSystemFixture("StorageCacheSpec", true)
    with Matchers
    with Inspectors
    with ScalaFutures
    with TryValues
    with TestHelper {

  override implicit def patienceConfig: PatienceConfig = PatienceConfig(3.seconds.dilated, 5.milliseconds)

  private implicit val clock: Clock         = Clock.systemUTC
  private implicit val appConfig: AppConfig = Settings(system).appConfig

  val ref1 = ProjectRef(genUUID)
  val ref2 = ProjectRef(genUUID)

  val time   = clock.instant()
  val lastId = url"http://example.com/lastA"
  // initialInstant.minusSeconds(1L + genInt().toLong)

  val tempStorage = DiskStorage(ref1, genIri, 1L, false, true, "alg", Paths.get("/tmp"), read, write, 1024L)

  val lastStorageProj1 = tempStorage.copy(id = lastId)
  val lastStorageProj2 = tempStorage.copy(ref = ref2, id = lastId)

  val storagesProj1: List[DiskStorage] = List.fill(5)(tempStorage.copy(id = genIri)) :+ lastStorageProj1
  val storagesProj2: List[DiskStorage] = List.fill(5)(tempStorage.copy(ref = ref2, id = genIri)) :+ lastStorageProj2

  private val cache = StorageCache[Task]

  "StorageCache" should {

    "index storages" in {
      forAll((storagesProj1 ++ storagesProj2).zipWithIndex) {
        case (storage, index) =>
          implicit val instant = time.plusSeconds(index.toLong)
          cache.put(storage).runToFuture.futureValue
          cache.get(storage.ref, storage.id).runToFuture.futureValue shouldEqual Some(storage)
      }
    }

    "get latest default storage" in {
      cache.getDefault(ref1).runToFuture.futureValue shouldEqual Some(lastStorageProj1)
      cache.getDefault(ref2).runToFuture.futureValue shouldEqual Some(lastStorageProj2)
      cache.getDefault(ProjectRef(genUUID)).runToFuture.futureValue shouldEqual None
    }

    "list storages" in {
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs storagesProj1
      cache.get(ref2).runToFuture.futureValue should contain theSameElementsAs storagesProj2
    }

    "deprecate storage" in {
      val storage          = storagesProj1.head
      implicit val instant = time.plusSeconds(30L)
      cache.put(storage.copy(deprecated = true, rev = 2L)).runToFuture.futureValue
      cache.get(storage.ref, storage.id).runToFuture.futureValue shouldEqual None
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs storagesProj1.filterNot(_ == storage)
    }
  }
} 
Example 38
Source File: ResolverCacheSpec.scala    From nexus-kg   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.cache

import akka.actor.ExtendedActorSystem
import akka.serialization.Serialization
import akka.testkit._
import ch.epfl.bluebrain.nexus.commons.test.ActorSystemFixture
import ch.epfl.bluebrain.nexus.iam.client.types.Identity.Anonymous
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.config.AppConfig._
import ch.epfl.bluebrain.nexus.kg.config.{AppConfig, Settings}
import ch.epfl.bluebrain.nexus.kg.resolve.Resolver._
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.{ProjectLabel, ProjectRef}
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.matchers.should.Matchers
import org.scalatest.{Inspectors, TryValues}

import scala.concurrent.duration._

//noinspection NameBooleanParameters
class ResolverCacheSpec
    extends ActorSystemFixture("ResolverCacheSpec", true)
    with Matchers
    with Inspectors
    with ScalaFutures
    with TryValues
    with TestHelper {

  override implicit def patienceConfig: PatienceConfig = PatienceConfig(3.seconds.dilated, 5.milliseconds)

  private implicit val appConfig: AppConfig = Settings(system).appConfig

  val ref1 = ProjectRef(genUUID)
  val ref2 = ProjectRef(genUUID)

  val label1 = ProjectLabel(genString(), genString())
  val label2 = ProjectLabel(genString(), genString())

  val resolver: InProjectResolver = InProjectResolver(ref1, genIri, 1L, false, 10)
  val crossRefs: CrossProjectResolver =
    CrossProjectResolver(Set(genIri), List(ref1, ref2), Set(Anonymous), ref1, genIri, 0L, false, 1)
  val crossLabels: CrossProjectResolver =
    CrossProjectResolver(Set(genIri), List(label1, label2), Set(Anonymous), ref1, genIri, 0L, false, 1)

  val resolverProj1: Set[InProjectResolver] = List.fill(5)(resolver.copy(id = genIri)).toSet
  val resolverProj2: Set[InProjectResolver] = List.fill(5)(resolver.copy(id = genIri, ref = ref2)).toSet

  private val cache = ResolverCache[Task]

  "ResolverCache" should {

    "index resolvers" in {
      val list = (resolverProj1 ++ resolverProj2).toList
      forAll(list) { resolver =>
        cache.put(resolver).runToFuture.futureValue
        cache.get(resolver.ref, resolver.id).runToFuture.futureValue shouldEqual Some(resolver)
      }
    }

    "list resolvers" in {
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs resolverProj1
      cache.get(ref2).runToFuture.futureValue should contain theSameElementsAs resolverProj2
    }

    "deprecate resolver" in {
      val resolver = resolverProj1.head
      cache.put(resolver.copy(deprecated = true, rev = 2L)).runToFuture.futureValue
      cache.get(resolver.ref, resolver.id).runToFuture.futureValue shouldEqual None
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs resolverProj1.filterNot(_ == resolver)
    }

    "serialize cross project resolver" when {
      val serialization = new Serialization(system.asInstanceOf[ExtendedActorSystem])
      "parameterized with ProjectRef" in {
        val bytes = serialization.serialize(crossRefs).success.value
        val out   = serialization.deserialize(bytes, classOf[CrossProjectResolver]).success.value
        out shouldEqual crossRefs
      }
      "parameterized with ProjectLabel" in {
        val bytes = serialization.serialize(crossLabels).success.value
        val out   = serialization.deserialize(bytes, classOf[CrossProjectResolver]).success.value
        out shouldEqual crossLabels
      }
    }
  }
} 
Example 39
Source File: AuthDirectives.scala    From nexus-kg   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.directives

import akka.http.scaladsl.model.headers.OAuth2BearerToken
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.directives.FutureDirectives.onComplete
import akka.http.scaladsl.server.{Directive0, Directive1}
import ch.epfl.bluebrain.nexus.admin.client.types.Project
import ch.epfl.bluebrain.nexus.iam.client.types._
import ch.epfl.bluebrain.nexus.iam.client.{IamClient, IamClientError}
import ch.epfl.bluebrain.nexus.kg.KgError.{AuthenticationFailed, AuthorizationFailed, InternalError}
import ch.epfl.bluebrain.nexus.kg.resources.syntax._
import com.typesafe.scalalogging.Logger
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global

import scala.util.{Failure, Success}

object AuthDirectives {

  private val logger = Logger[this.type]

  
  def extractCaller(implicit iam: IamClient[Task], token: Option[AuthToken]): Directive1[Caller] =
    onComplete(iam.identities.runToFuture).flatMap {
      case Success(caller)                         => provide(caller)
      case Failure(_: IamClientError.Unauthorized) => failWith(AuthenticationFailed)
      case Failure(_: IamClientError.Forbidden)    => failWith(AuthorizationFailed)
      case Failure(err) =>
        val message = "Error when trying to extract the subject"
        logger.error(message, err)
        failWith(InternalError(message))
    }
} 
Example 40
Source File: package.scala    From nexus-kg   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg

import akka.http.scaladsl.model.StatusCode
import akka.http.scaladsl.server.Directives.complete
import akka.http.scaladsl.server.{MalformedQueryParamRejection, Route}
import cats.Functor
import cats.data.{EitherT, OptionT}
import cats.instances.future._
import ch.epfl.bluebrain.nexus.iam.client.types._
import ch.epfl.bluebrain.nexus.kg.marshallers.instances._
import ch.epfl.bluebrain.nexus.kg.resources.Rejection.NotFound.notFound
import ch.epfl.bluebrain.nexus.kg.resources.{Ref, Rejection, ResourceV}
import ch.epfl.bluebrain.nexus.kg.routes.OutputFormat.{DOT, Triples}
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri
import monix.execution.Scheduler.Implicits.global

import scala.concurrent.Future

package object routes {

  private[routes] def completeWithFormat(
      fetched: Future[Either[Rejection, (StatusCode, ResourceV)]]
  )(implicit format: NonBinaryOutputFormat): Route =
    completeWithFormat(EitherT(fetched))

  private def completeWithFormat(
      fetched: EitherT[Future, Rejection, (StatusCode, ResourceV)]
  )(implicit format: NonBinaryOutputFormat): Route =
    format match {
      case f: JsonLDOutputFormat =>
        implicit val format = f
        complete(fetched.value)
      case Triples =>
        implicit val format = Triples
        complete(fetched.map { case (status, resource) => status -> resource.value.graph.ntriples }.value)
      case DOT =>
        implicit val format = DOT
        complete(fetched.map { case (status, resource) => status -> resource.value.graph.dot() }.value)
    }

  private[routes] val read: Permission = Permission.unsafe("resources/read")

  private[routes] val schemaError =
    MalformedQueryParamRejection("schema", "The provided schema does not match the schema on the Uri")

  private[routes] implicit class FOptionSyntax[F[_], A](private val fOpt: F[Option[A]]) extends AnyVal {
    def toNotFound(id: AbsoluteIri)(implicit F: Functor[F]): EitherT[F, Rejection, A] =
      OptionT(fOpt).toRight(notFound(Ref(id)))
  }
} 
Example 41
Source File: ArchiveRoutes.scala    From nexus-kg   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.routes

import akka.http.scaladsl.model.StatusCodes.{Created, OK}
import akka.http.scaladsl.model.headers.Accept
import akka.http.scaladsl.model.{HttpEntity, MediaTypes}
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import ch.epfl.bluebrain.nexus.admin.client.types.Project
import ch.epfl.bluebrain.nexus.iam.client.types._
import ch.epfl.bluebrain.nexus.kg.KgError.{InvalidOutputFormat, UnacceptedResponseContentType}
import ch.epfl.bluebrain.nexus.kg.archives.Archive._
import ch.epfl.bluebrain.nexus.kg.config.AppConfig
import ch.epfl.bluebrain.nexus.kg.directives.AuthDirectives.hasPermission
import ch.epfl.bluebrain.nexus.kg.directives.PathDirectives._
import ch.epfl.bluebrain.nexus.kg.directives.ProjectDirectives._
import ch.epfl.bluebrain.nexus.kg.directives.QueryDirectives.outputFormat
import ch.epfl.bluebrain.nexus.kg.marshallers.instances._
import ch.epfl.bluebrain.nexus.kg.resources._
import ch.epfl.bluebrain.nexus.kg.resources.syntax._
import ch.epfl.bluebrain.nexus.kg.routes.OutputFormat.Tar
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri
import io.circe.Json
import kamon.instrumentation.akka.http.TracingDirectives.operationName
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global

class ArchiveRoutes private[routes] (archives: Archives[Task])(
    implicit acls: AccessControlLists,
    project: Project,
    caller: Caller,
    config: AppConfig
) {

  private val responseType = MediaTypes.`application/x-tar`

  
  def routes(id: AbsoluteIri): Route = {
    val resId = Id(project.ref, id)
    concat(
      // Create archive
      (put & pathEndOrSingleSlash) {
        operationName(s"/${config.http.prefix}/archives/{org}/{project}/{id}") {
          (hasPermission(write) & projectNotDeprecated) {
            entity(as[Json]) { source =>
              complete(archives.create(resId, source).value.runWithStatus(Created))
            }
          }
        }
      },
      // Fetch archive
      (get & outputFormat(strict = true, Tar) & pathEndOrSingleSlash) {
        case Tar                           => getArchive(resId)
        case format: NonBinaryOutputFormat => getResource(resId)(format)
        case other                         => failWith(InvalidOutputFormat(other.toString))

      }
    )
  }

  private def getResource(resId: ResId)(implicit format: NonBinaryOutputFormat): Route =
    completeWithFormat(archives.fetch(resId).value.runWithStatus(OK))

  private def getArchive(resId: ResId): Route = {
    parameter("ignoreNotFound".as[Boolean] ? false) { ignoreNotFound =>
      onSuccess(archives.fetchArchive(resId, ignoreNotFound).value.runToFuture) {
        case Right(source) =>
          headerValueByType[Accept](()) { accept =>
            if (accept.mediaRanges.exists(_.matches(responseType)))
              complete(HttpEntity(responseType, source))
            else
              failWith(
                UnacceptedResponseContentType(
                  s"File Media Type '$responseType' does not match the Accept header value '${accept.mediaRanges.mkString(", ")}'"
                )
              )
          }
        case Left(err) => complete(err)
      }
    }
  }
}

object ArchiveRoutes {
  final def apply(archives: Archives[Task])(
      implicit acls: AccessControlLists,
      caller: Caller,
      project: Project,
      config: AppConfig
  ): ArchiveRoutes = new ArchiveRoutes(archives)
} 
Example 42
Source File: TagRoutes.scala    From nexus-kg   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.routes

import akka.http.scaladsl.model.StatusCodes.{Created, OK}
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import ch.epfl.bluebrain.nexus.admin.client.types.Project
import ch.epfl.bluebrain.nexus.iam.client.types._
import ch.epfl.bluebrain.nexus.kg.config.AppConfig
import ch.epfl.bluebrain.nexus.kg.config.Contexts.tagCtxUri
import ch.epfl.bluebrain.nexus.kg.config.Vocabulary.nxv
import ch.epfl.bluebrain.nexus.kg.directives.AuthDirectives._
import ch.epfl.bluebrain.nexus.kg.directives.ProjectDirectives._
import ch.epfl.bluebrain.nexus.kg.marshallers.instances._
import ch.epfl.bluebrain.nexus.kg.resources._
import ch.epfl.bluebrain.nexus.kg.resources.syntax._
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri
import ch.epfl.bluebrain.nexus.rdf.implicits._
import io.circe.syntax._
import io.circe.{Encoder, Json}
import kamon.Kamon
import kamon.instrumentation.akka.http.TracingDirectives.operationName
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global

class TagRoutes private[routes] (resourceType: String, tags: Tags[Task], schema: Ref, write: Permission)(
    implicit acls: AccessControlLists,
    caller: Caller,
    project: Project,
    config: AppConfig
) {

  
  def routes(id: AbsoluteIri): Route =
    // Consume the tag segment
    pathPrefix("tags") {
      concat(
        // Create tag
        (post & parameter("rev".as[Long]) & pathEndOrSingleSlash) { rev =>
          operationName(opName) {
            (hasPermission(write) & projectNotDeprecated) {
              entity(as[Json]) { source =>
                Kamon.currentSpan().tag("resource.operation", "create")
                complete(tags.create(Id(project.ref, id), rev, source, schema).value.runWithStatus(Created))
              }
            }
          }
        },
        // Fetch a tag
        (get & projectNotDeprecated & pathEndOrSingleSlash) {
          operationName(opName) {
            hasPermission(read).apply {
              parameter("rev".as[Long].?) {
                case Some(rev) => complete(tags.fetch(Id(project.ref, id), rev, schema).value.runWithStatus(OK))
                case _         => complete(tags.fetch(Id(project.ref, id), schema).value.runWithStatus(OK))
              }
            }
          }
        }
      )
    }

  private implicit def tagsEncoder: Encoder[TagSet] =
    Encoder.instance(tags => Json.obj(nxv.tags.prefix -> Json.arr(tags.map(_.asJson).toSeq: _*)).addContext(tagCtxUri))

  private def opName: String = resourceType match {
    case "resources" => s"/${config.http.prefix}/resources/{org}/{project}/{schemaId}/{id}/tags"
    case _           => s"/${config.http.prefix}/$resourceType/{org}/{project}/{id}/tags"
  }
} 
Example 43
Source File: MonixExample.scala    From korolev   with Apache License 2.0 5 votes vote down vote up
import korolev.Context
import korolev.akka._
import korolev.server.{KorolevServiceConfig, StateLoader}
import korolev.state.javaSerialization._
import korolev.monix._

import monix.eval.Task
import monix.execution.Scheduler.Implicits.global

object MonixExample extends SimpleAkkaHttpKorolevApp {

  val ctx = Context[Task, Option[String], Any]

  import ctx._
  import levsha.dsl._
  import html._

  private val aInput = elementId()
  private val bInput = elementId()

  def service: AkkaHttpService = akkaHttpService {
    KorolevServiceConfig[Task, Option[String], Any](
      stateLoader = StateLoader.default(None),
      document = maybeResult => optimize {
        Html(
          body(
            form(
              input(aInput, `type` := "number", event("input")(onChange)),
              span("+"),
              input(bInput, `type` := "number", event("input")(onChange)),
              span("="),
              maybeResult.map(result => span(result)),
            )
          )
        )
      }
    )
  }

  private def onChange(access: Access) =
    for {
      a <- access.valueOf(aInput)
      b <- access.valueOf(bInput)
      _ <-
        if (a.trim.isEmpty || b.trim.isEmpty) Task.unit
        else access.transition(_ => Some((a.toInt + b.toInt).toString))
    } yield ()
} 
Example 44
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.FileNotFoundException
import java.io.IOException
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._
import org.specs2._

import scala.collection.immutable.SortedSet
import scala.concurrent.duration._
import monix.eval._
import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  val base = Directory("base")
  val base1 = File(s"${base.path}/1.txt")
  val base2 = File(s"${base.path}/2.txt")
  val subdir = Directory(s"${base.path}/subdir")
  val sub1 = File(s"${subdir.path}/1.txt")
  val sub3 = File(s"${subdir.path}/3.txt")
  val directories = Map(
    base -> List(subdir, base1, base2),
    subdir -> List(sub1, sub3)
  )
  val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L)
  val fs = MockFilesystem(directories, fileSizes)

  type R = Fx.fx3[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?]]

  def run[T](program: Eff[R, T], fs: Filesystem) =
    program.runReader(ScanConfig(2)).runReader(fs).runAsync.attempt.runSyncUnsafe(3.seconds)

  "file scan" ! {
    val actual = run(Scanner.pathScan(base), fs)
    val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4))

    actual.mustEqual(expected)
  }

  "Error from Filesystem" ! {
    val emptyFs: Filesystem = MockFilesystem(directories, Map.empty)

    val actual = runE(Scanner.scanReport(Array("base", "10")), emptyFs)
    val expected =  Left(new IOException().toString)

    actual.mustEqual(expected)
  }

  type E = Fx.fx3[Task, Reader[Filesystem, ?], Either[String, ?]]
  def runE[T](program: Eff[E, T], fs: Filesystem) =
    //there are two nested Either in the stack, one from Exceptions and one from errors raised by the program
    //we convert to a common error type String then flatten
    program.runReader(fs).runEither.runAsync.attempt.runSyncUnsafe(3.seconds).leftMap(_.toString).flatten

  "Error - Report with non-numeric input" ! {
    val actual = runE(Scanner.scanReport(Array("base", "not a number")), fs)
    val expected = Left("Number of files must be numeric: not a number")

    actual.mustEqual(expected)
  }

  "Error - Report with non-positive input" ! {
    val actual = runE(Scanner.scanReport(Array("base", "-1")), fs)
    val expected = Left("Invalid number of files -1")

    actual.mustEqual(expected)
  }
} 
Example 45
Source File: AdminRoutes.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.admin.routes

import akka.actor.ActorSystem
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import ch.epfl.bluebrain.nexus.admin.index.{OrganizationCache, ProjectCache}
import ch.epfl.bluebrain.nexus.admin.organizations.Organizations
import ch.epfl.bluebrain.nexus.admin.projects.Projects
import ch.epfl.bluebrain.nexus.iam.acls.Acls
import ch.epfl.bluebrain.nexus.iam.realms.Realms
import ch.epfl.bluebrain.nexus.iam.routes.EventRoutes
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig.{HttpConfig, PaginationConfig, PersistenceConfig}
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global

object AdminRoutes {

  
  final def apply(
      orgs: Organizations[Task],
      projects: Projects[Task],
      orgCache: OrganizationCache[Task],
      projCache: ProjectCache[Task],
      acls: Acls[Task],
      realms: Realms[Task]
  )(implicit
      as: ActorSystem,
      cfg: ServiceConfig
  ): Route = {
    implicit val hc: HttpConfig        = cfg.http
    implicit val pc: PersistenceConfig = cfg.persistence
    implicit val pgc: PaginationConfig = cfg.pagination

    val eventsRoutes  = new EventRoutes(acls, realms).routes
    val orgRoutes     = new OrganizationRoutes(orgs, orgCache, acls, realms).routes
    val projectRoutes = new ProjectRoutes(projects, orgCache, projCache, acls, realms).routes

    pathPrefix(cfg.http.prefix) {
      eventsRoutes ~ orgRoutes ~ projectRoutes
    }
  }
} 
Example 46
Source File: MonixParallelTests.scala    From freestyle   with Apache License 2.0 5 votes vote down vote up
package freestyle.free.tests

import freestyle.free.implicits._
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import org.scalatest.{Matchers, WordSpec}
import scala.concurrent.Await
import scala.concurrent.duration.Duration

class MonixParallelTests extends WordSpec with Matchers {
  "Applicative Parallel Support" should {
    "allow non deterministic execution when interpreting to monix.eval.Task" ignore {

      val test = new freestyle.NonDeterminismTestShared
      import test._

      implicit val interpreter = new freestyle.MixedFreeS.Handler[Task] {
        override def x: Task[Int] = Task(blocker(1, 1000L))
        override def y: Task[Int] = Task(blocker(2, 0L))
        override def z: Task[Int] = Task(blocker(3, 2000L))
      }

      Await.result(program.interpret[Task].runAsync, Duration.Inf) shouldBe List(3, 1, 2, 3)
      buf.toArray shouldBe Array(3, 2, 1, 3)
    }
  }
} 
Example 47
Source File: PingPongSuite.scala    From lsp4s   with Apache License 2.0 5 votes vote down vote up
package tests

import java.util.concurrent.ConcurrentLinkedQueue
import minitest.SimpleTestSuite
import monix.execution.Scheduler.Implicits.global
import scala.collection.JavaConverters._
import scala.concurrent.Promise
import scala.meta.jsonrpc._
import scala.meta.jsonrpc.testkit._
import scribe.Logger


object PingPongSuite extends SimpleTestSuite {

  private val Ping = Endpoint.notification[String]("ping")
  private val Pong = Endpoint.notification[String]("pong")
  private val Hello = Endpoint.request[String, String]("hello")

  testAsync("ping pong") {
    val promise = Promise[Unit]()
    val pongs = new ConcurrentLinkedQueue[String]()
    val services = Services
      .empty(Logger.root)
      .request(Hello) { msg =>
        s"$msg, World!"
      }
      .notification(Pong) { message =>
        assert(pongs.add(message))
        if (pongs.size() == 2) {
          promise.complete(util.Success(()))
        }
      }
    val pongBack: LanguageClient => Services = { client =>
      services.notification(Ping) { message =>
        Pong.notify(message.replace("Ping", "Pong"))(client)
      }
    }
    val conn = TestConnection(pongBack, pongBack)
    for {
      _ <- Ping.notify("Ping from client")(conn.alice.client)
      _ <- Ping.notify("Ping from server")(conn.bob.client)
      Right(helloWorld) <- Hello.request("Hello")(conn.alice.client).runAsync
      _ <- promise.future
    } yield {
      assertEquals(helloWorld, "Hello, World!")
      val obtainedPongs = pongs.asScala.toList.sorted
      val expectedPongs = List("Pong from client", "Pong from server")
      assertEquals(obtainedPongs, expectedPongs)
      conn.cancel()
    }
  }

} 
Example 48
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.FileNotFoundException
import java.io.IOException
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._
import org.specs2._

import scala.collection.immutable.SortedSet
import scala.concurrent.duration._
import monix.eval._
import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  val base = Directory("base")
  val base1 = File(s"${base.path}/1.txt")
  val base2 = File(s"${base.path}/2.txt")
  val subdir = Directory(s"${base.path}/subdir")
  val sub1 = File(s"${subdir.path}/1.txt")
  val sub3 = File(s"${subdir.path}/3.txt")
  val directories = Map(
    base -> List(subdir, base1, base2),
    subdir -> List(sub1, sub3)
  )
  val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L)
  val fs = MockFilesystem(directories, fileSizes)

  type R = Fx.fx3[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?]]

  def run[T](program: Eff[R, T], fs: Filesystem) =
    program.runReader(ScanConfig(2)).runReader(fs).runAsync.attempt.runSyncUnsafe(3.seconds)

  "file scan" ! {
    val actual = run(Scanner.pathScan(base), fs)
    val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4))

    actual.mustEqual(expected)
  }

  "Error from Filesystem" ! {
    val emptyFs: Filesystem = MockFilesystem(directories, Map.empty)

    val actual = runE(Scanner.scanReport(Array("base", "10")), emptyFs)
    val expected = ???

    actual.mustEqual(expected)
  }

  type E = Fx.fx3[Task, Reader[Filesystem, ?], Either[String, ?]]
  def runE[T](program: Eff[E, T], fs: Filesystem) =
    //there are two nested Either in the stack, one from Exceptions and one from errors raised by the program
    //we convert to a common error type String then flatten
    program.runReader(fs).runEither.runAsync.attempt.runSyncUnsafe(3.seconds).leftMap(_.toString).flatten

  "Error - Report with non-numeric input" ! {
    val actual = runE(Scanner.scanReport(Array("base", "not a number")), fs)
    val expected = Left("Number of files must be numeric: not a number")

    actual.mustEqual(expected)
  }

  "Error - Report with non-positive input" ! {
    val actual = runE(Scanner.scanReport(Array("base", "-1")), fs)
    val expected = Left("Invalid number of files -1")

    actual.mustEqual(expected)
  }
} 
Example 49
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.FileNotFoundException
import java.io.IOException
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._
import org.specs2._

import scala.collection.immutable.SortedSet
import scala.concurrent.duration._
import monix.eval._
import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  val base = Directory("base")
  val base1 = File(s"${base.path}/1.txt")
  val base2 = File(s"${base.path}/2.txt")
  val subdir = Directory(s"${base.path}/subdir")
  val sub1 = File(s"${subdir.path}/1.txt")
  val sub3 = File(s"${subdir.path}/3.txt")
  val directories = Map(
    base -> List(subdir, base1, base2),
    subdir -> List(sub1, sub3)
  )
  val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L)
  val fs = MockFilesystem(directories, fileSizes)

  type R = Fx.fx4[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?], Writer[Log, ?]]

  def run[T](program: Eff[R, T], fs: Filesystem) =
    program.runReader(ScanConfig(2)).runReader(fs).taskAttempt.runWriter.runAsync.runSyncUnsafe(3.seconds)

  val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4))
  val expectedLogs = Set(
    Log.info("Scan started on Directory(base)"),
    Log.debug("Scanning directory 'Directory(base)': 1 subdirectories and 2 files"),
    Log.debug("File base/1.txt Size 1 B"),
    Log.debug("File base/2.txt Size 2 B"),
    Log.debug("Scanning directory 'Directory(base/subdir)': 0 subdirectories and 2 files"),
    Log.debug("File base/subdir/1.txt Size 1 B"),
    Log.debug("File base/subdir/3.txt Size 3 B")
  )

  val (actual, logs) = run(Scanner.pathScan(base), fs)

  "Report Format" ! {actual.mustEqual(expected)}

  "Logs messages are emitted (ignores order due to non-determinstic concurrent execution)" ! {
    logs.forall(expectedLogs.contains)
  }
} 
Example 50
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.FileNotFoundException
import java.io.IOException
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._
import org.specs2._

import scala.collection.immutable.SortedSet
import scala.concurrent.duration._
import monix.eval._
import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  val base = Directory("base")
  val linkTarget = File(s"/somewhere/else/7.txt")
  val base1 = File(s"${base.path}/1.txt")
  val baseLink = Symlink(s"${base.path}/7.txt", linkTarget)
  val subdir = Directory(s"${base.path}/subdir")
  val sub2 = File(s"${subdir.path}/2.txt")
  val subLink = Symlink(s"${subdir.path}/7.txt", linkTarget)
  val directories = Map(
    base -> List(subdir, base1, baseLink),
    subdir -> List(sub2, subLink)
  )
  val fileSizes = Map(base1 -> 1L, sub2 -> 2L, linkTarget -> 7L)
  val fs = MockFilesystem(directories, fileSizes)

  type R = Fx.fx5[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?], Writer[Log, ?], State[Set[FilePath], ?]]

  def run[T](program: Eff[R, T], fs: Filesystem) =
    program.runReader(ScanConfig(2)).runReader(fs).evalStateZero[Set[FilePath]].taskAttempt.runWriter[Log].runAsync.runSyncUnsafe(3.seconds)

  val expected = Right(new PathScan(SortedSet(FileSize(linkTarget, 7), FileSize(sub2, 2)), 10, 3))

  val (actual, logs) = run(Scanner.pathScan[R](base), fs)

  "Report Format" ! {actual.mustEqual(expected)}

} 
Example 51
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.FileNotFoundException
import java.io.IOException
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._
import org.specs2._

import scala.collection.immutable.SortedSet
import scala.concurrent.duration._
import monix.eval._
import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  val base = Directory("base")
  val base1 = File(s"${base.path}/1.txt")
  val base2 = File(s"${base.path}/2.txt")
  val subdir = Directory(s"${base.path}/subdir")
  val sub1 = File(s"${subdir.path}/1.txt")
  val sub3 = File(s"${subdir.path}/3.txt")
  val directories = Map(
    base -> List(subdir, base1, base2),
    subdir -> List(sub1, sub3)
  )
  val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L)
  val fs = MockFilesystem(directories, fileSizes)

  type R = Fx.fx4[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?], Writer[Log, ?]]

  def run[T](program: Eff[R, T], fs: Filesystem) =
    program.runReader(ScanConfig(2)).runReader(fs).taskAttempt.runWriter.runAsync.runSyncUnsafe(3.seconds)

  val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4))
  val expectedLogs = Set(
    Log.info("Scan started on Directory(base)"),
    Log.debug("Scanning directory 'Directory(base)': 1 subdirectories and 2 files"),
    Log.debug("File base/1.txt Size 1 B"),
    Log.debug("File base/2.txt Size 2 B"),
    Log.debug("Scanning directory 'Directory(base/subdir)': 0 subdirectories and 2 files"),
    Log.debug("File base/subdir/1.txt Size 1 B"),
    Log.debug("File base/subdir/3.txt Size 3 B")
  )

  val (actual, logs) = run(Scanner.pathScan(base), fs)

  "Report Format" ! {actual.mustEqual(expected)}

  "Logs messages are emitted (ignores order due to non-determinstic concurrent execution)" ! {
    expectedLogs.forall(logs.contains)
  }
} 
Example 52
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io._
import java.io._
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._

import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._

import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._

import org.specs2._

import scala.collection.immutable.SortedSet

import scala.concurrent.duration._

import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  "file scan" ! {
    val base = Directory("base")
    val base1 = File(s"${base.path}/1.txt")
    val base2 = File(s"${base.path}/2.txt")
    val subdir = Directory(s"${base.path}/subdir")
    val sub1 = File(s"${subdir.path}/1.txt")
    val sub3 = File(s"${subdir.path}/3.txt")
    val fs: Filesystem = MockFilesystem(
      Map(
        base -> List(subdir, base1, base2),
        subdir -> List(sub1, sub3)
      ),
      Map(base1 -> 1, base2 -> 2, sub1 -> 1, sub3 -> 3)
    )

    val program = Scanner.pathScan[Scanner.R](base)
    val actual = program.runReader(ScanConfig(2)).runReader(fs).runAsync.runSyncUnsafe(3.seconds)
    val expected = new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)

    actual.mustEqual(expected)
  }
} 
Example 53
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io._
import java.io._
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._

import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._

import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._

import org.specs2._

import scala.collection.immutable.SortedSet

import scala.concurrent.duration._

import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  import EffOptics._

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  "file scan" ! {
    val base = Directory("base")
    val base1 = File(s"${base.path}/1.txt")
    val base2 = File(s"${base.path}/2.txt")
    val subdir = Directory(s"${base.path}/subdir")
    val sub1 = File(s"${subdir.path}/1.txt")
    val sub3 = File(s"${subdir.path}/3.txt")
    val fs: Filesystem = MockFilesystem(
      Map(
        base -> List(subdir, base1, base2),
        subdir -> List(sub1, sub3)
      ),
      Map(base1 -> 1, base2 -> 2, sub1 -> 1, sub3 -> 3)
    )

    val program = Scanner.pathScan[Scanner.R](base)
    val actual = program.runReader(AppConfig(ScanConfig(2), fs)).runAsync.runSyncUnsafe(3.seconds)
    val expected = new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)

    actual.mustEqual(expected)
  }
} 
Example 54
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.PrintWriter
import java.nio.file._

import org.specs2._

import scala.collection.immutable.SortedSet

import scala.concurrent.duration._

import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  "Report Format" ! {
    val base = deletedOnExit(Files.createTempDirectory("exerciseTask"))
    val base1 = deletedOnExit(fillFile(base, 1))
    val base2 = deletedOnExit(fillFile(base, 2))
    val subdir = deletedOnExit(Files.createTempDirectory(base, "subdir"))
    val sub1 = deletedOnExit(fillFile(subdir, 1))
    val sub3 = deletedOnExit(fillFile(subdir, 3))

    val actual = Scanner.pathScan(FilePath(base), 2).runSyncUnsafe(3.seconds)
    val expected = new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)

    actual.mustEqual(expected)
  }

  def fillFile(dir: Path, size: Int) = {
    val path = dir.resolve(s"$size.txt")
    val w = new PrintWriter(path.toFile)
    try w.write("a" * size)
    finally w.close
    path
  }

  def deletedOnExit(p: Path) = {
    p.toFile.deleteOnExit()
    p
  }

} 
Example 55
Source File: EventRoutes.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.routes

import akka.actor.ActorSystem
import akka.http.scaladsl.marshalling.sse.EventStreamMarshalling._
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import ch.epfl.bluebrain.nexus.admin.client.types.{Organization, Project}
import ch.epfl.bluebrain.nexus.iam.acls.Acls
import ch.epfl.bluebrain.nexus.iam.realms.Realms
import ch.epfl.bluebrain.nexus.iam.types.{Caller, Permission}
import ch.epfl.bluebrain.nexus.kg.resources.Event.JsonLd._
import ch.epfl.bluebrain.nexus.rdf.Iri.Path._
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig
import ch.epfl.bluebrain.nexus.service.directives.AuthDirectives
import kamon.instrumentation.akka.http.TracingDirectives.operationName
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global

class EventRoutes(acls: Acls[Task], realms: Realms[Task], caller: Caller)(implicit
    override val as: ActorSystem,
    override val config: ServiceConfig
) extends AuthDirectives(acls, realms)
    with EventCommonRoutes {

  private val read: Permission = Permission.unsafe("resources/read")

  def routes(project: Project): Route = {

    lastEventId { offset =>
      operationName(s"/${config.http.prefix}/resources/{org}/{project}/events") {
        authorizeFor(project.organizationLabel / project.label, read)(caller) {
          complete(source(s"project=${project.uuid}", offset))
        }
      }
    }
  }

  def routes(org: Organization): Route =
    lastEventId { offset =>
      operationName(s"/${config.http.prefix}/resources/{org}/events") {
        authorizeFor(/ + org.label, read)(caller) {
          complete(source(s"org=${org.uuid}", offset))
        }
      }
    }
} 
Example 56
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.FileNotFoundException
import java.io.IOException
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._
import org.specs2._

import scala.collection.immutable.SortedSet
import scala.concurrent.duration._
import monix.eval._
import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  val base = Directory("base")
  val base1 = File(s"${base.path}/1.txt")
  val base2 = File(s"${base.path}/2.txt")
  val subdir = Directory(s"${base.path}/subdir")
  val sub1 = File(s"${subdir.path}/1.txt")
  val sub3 = File(s"${subdir.path}/3.txt")
  val directories = Map(
    base -> List(subdir, base1, base2),
    subdir -> List(sub1, sub3)
  )
  val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L)
  val fs = MockFilesystem(directories, fileSizes)

  type R = Fx.fx4[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?], Writer[Log, ?]]

  def run[T](program: Eff[R, T], fs: Filesystem) =
    program.runReader(ScanConfig(2)).runReader(fs).taskAttempt.runWriter.runAsync.runSyncUnsafe(3.seconds)

  val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4))
  val expectedLogs = Set(
    Log.info("Scan started on Directory(base)"),
    Log.debug("Scanning directory 'Directory(base)': 1 subdirectories and 2 files"),
    Log.debug("File base/1.txt Size 1 B"),
    Log.debug("File base/2.txt Size 2 B"),
    Log.debug("Scanning directory 'Directory(base/subdir)': 0 subdirectories and 2 files"),
    Log.debug("File base/subdir/1.txt Size 1 B"),
    Log.debug("File base/subdir/3.txt Size 3 B")
  )

  val (actual, logs) = run(Scanner.pathScan(base), fs)

  "Report Format" ! {actual.mustEqual(expected)}

  "Logs messages are emitted (ignores order due to non-determinstic concurrent execution)" ! {
    logs.forall(expectedLogs.contains)
  }
} 
Example 57
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.FileNotFoundException
import java.io.IOException
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._
import org.specs2._

import scala.collection.immutable.SortedSet
import scala.concurrent.duration._
import monix.eval._
import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  val base = Directory("base")
  val linkTarget = File(s"/somewhere/else/7.txt")
  val base1 = File(s"${base.path}/1.txt")
  val baseLink = Symlink(s"${base.path}/7.txt", linkTarget)
  val subdir = Directory(s"${base.path}/subdir")
  val sub2 = File(s"${subdir.path}/2.txt")
  val subLink = Symlink(s"${subdir.path}/7.txt", linkTarget)
  val directories = Map(
    base -> List(subdir, base1, baseLink),
    subdir -> List(sub2, subLink)
  )
  val fileSizes = Map(base1 -> 1L, sub2 -> 2L, linkTarget -> 7L)
  val fs = MockFilesystem(directories, fileSizes)

  type R = Fx.fx5[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?], Writer[Log, ?], State[Set[FilePath], ?]]

  def run[T](program: Eff[R, T], fs: Filesystem) =
    program.runReader(ScanConfig(2)).runReader(fs).evalStateZero[Set[FilePath]].taskAttempt.runWriter[Log].runAsync.runSyncUnsafe(3.seconds)

  val expected = Right(new PathScan(SortedSet(FileSize(linkTarget, 7), FileSize(sub2, 2)), 10, 3))

  val (actual, logs) = run(Scanner.pathScan[R](base), fs)

  "Report Format" ! {actual.mustEqual(expected)}

} 
Example 58
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.FileNotFoundException
import java.io.IOException
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._
import org.specs2._

import scala.collection.immutable.SortedSet
import scala.concurrent.duration._
import monix.eval._
import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  val base = Directory("base")
  val base1 = File(s"${base.path}/1.txt")
  val base2 = File(s"${base.path}/2.txt")
  val subdir = Directory(s"${base.path}/subdir")
  val sub1 = File(s"${subdir.path}/1.txt")
  val sub3 = File(s"${subdir.path}/3.txt")
  val directories = Map(
    base -> List(subdir, base1, base2),
    subdir -> List(sub1, sub3)
  )
  val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L)
  val fs = MockFilesystem(directories, fileSizes)

  type R = Fx.fx4[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?], Writer[Log, ?]]

  def run[T](program: Eff[R, T], fs: Filesystem) =
    program.runReader(ScanConfig(2)).runReader(fs).taskAttempt.runWriter.runAsync.runSyncUnsafe(3.seconds)

  val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4))
  val expectedLogs = Set(
    Log.info("Scan started on Directory(base)"),
    Log.debug("Scanning directory 'Directory(base)': 1 subdirectories and 2 files"),
    Log.debug("File base/1.txt Size 1 B"),
    Log.debug("File base/2.txt Size 2 B"),
    Log.debug("Scanning directory 'Directory(base/subdir)': 0 subdirectories and 2 files"),
    Log.debug("File base/subdir/1.txt Size 1 B"),
    Log.debug("File base/subdir/3.txt Size 3 B")
  )

  val (actual, logs) = run(Scanner.pathScan(base), fs)

  "Report Format" ! {actual.mustEqual(expected)}

  "Logs messages are emitted (ignores order due to non-determinstic concurrent execution)" ! {
    expectedLogs.forall(logs.contains)
  }
} 
Example 59
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io._
import java.io._
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._

import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._

import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._

import org.specs2._

import scala.collection.immutable.SortedSet

import scala.concurrent.duration._

import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  "file scan" ! {
    val base = Directory("base")
    val base1 = File(s"${base.path}/1.txt")
    val base2 = File(s"${base.path}/2.txt")
    val subdir = Directory(s"${base.path}/subdir")
    val sub1 = File(s"${subdir.path}/1.txt")
    val sub3 = File(s"${subdir.path}/3.txt")
    val fs: Filesystem = MockFilesystem(
      Map(
        base -> List(subdir, base1, base2),
        subdir -> List(sub1, sub3)
      ),
      Map(base1 -> 1, base2 -> 2, sub1 -> 1, sub3 -> 3)
    )

    val program = Scanner.pathScan[Scanner.R](base)
    val actual = program.runReader(ScanConfig(2)).runReader(fs).runAsync.runSyncUnsafe(3.seconds)
    val expected = new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)

    actual.mustEqual(expected)
  }
} 
Example 60
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io._
import java.io._
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._

import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._

import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._

import org.specs2._

import scala.collection.immutable.SortedSet

import scala.concurrent.duration._

import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  import EffOptics._

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  "file scan" ! {
    val base = Directory("base")
    val base1 = File(s"${base.path}/1.txt")
    val base2 = File(s"${base.path}/2.txt")
    val subdir = Directory(s"${base.path}/subdir")
    val sub1 = File(s"${subdir.path}/1.txt")
    val sub3 = File(s"${subdir.path}/3.txt")
    val fs: Filesystem = MockFilesystem(
      Map(
        base -> List(subdir, base1, base2),
        subdir -> List(sub1, sub3)
      ),
      Map(base1 -> 1, base2 -> 2, sub1 -> 1, sub3 -> 3)
    )

    val program = Scanner.pathScan[Scanner.R](base)
    val actual = program.runReader(AppConfig(ScanConfig(2), fs)).runAsync.runSyncUnsafe(3.seconds)
    val expected = new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)

    actual.mustEqual(expected)
  }
} 
Example 61
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.PrintWriter
import java.nio.file._

import org.specs2._

import scala.collection.immutable.SortedSet

import scala.concurrent.duration._

import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  "Report Format" ! {
    val base = deletedOnExit(Files.createTempDirectory("exerciseTask"))
    val base1 = deletedOnExit(fillFile(base, 1))
    val base2 = deletedOnExit(fillFile(base, 2))
    val subdir = deletedOnExit(Files.createTempDirectory(base, "subdir"))
    val sub1 = deletedOnExit(fillFile(subdir, 1))
    val sub3 = deletedOnExit(fillFile(subdir, 3))

    val actual = Scanner.pathScan(FilePath(base), 2).runSyncUnsafe(3.seconds)
    val expected = new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)

    actual.mustEqual(expected)
  }

  def fillFile(dir: Path, size: Int) = {
    val path = dir.resolve(s"$size.txt")
    val w = new PrintWriter(path.toFile)
    try w.write("a" * size)
    finally w.close
    path
  }

  def deletedOnExit(p: Path) = {
    p.toFile.deleteOnExit()
    p
  }

} 
Example 62
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.FileNotFoundException
import java.io.IOException
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._
import org.specs2._

import scala.collection.immutable.SortedSet
import scala.concurrent.duration._
import monix.eval._
import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  val base = Directory("base")
  val base1 = File(s"${base.path}/1.txt")
  val base2 = File(s"${base.path}/2.txt")
  val subdir = Directory(s"${base.path}/subdir")
  val sub1 = File(s"${subdir.path}/1.txt")
  val sub3 = File(s"${subdir.path}/3.txt")
  val directories = Map(
    base -> List(subdir, base1, base2),
    subdir -> List(sub1, sub3)
  )
  val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L)
  val fs = MockFilesystem(directories, fileSizes)

  type R = Fx.fx4[Task, FilesystemCmd, Reader[ScanConfig, ?], Writer[Log, ?]]

  def run[T](program: Eff[R, T]) =
    program.runReader(ScanConfig(2)).runFilesystemCmds(fs).taskAttempt.runWriter.runAsync.runSyncUnsafe(3.seconds)

  val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4))
  val expectedLogs = Set(
    Log.info("Scan started on Directory(base)"),
    Log.debug("Scanning directory 'Directory(base)': 1 subdirectories and 2 files"),
    Log.debug("File base/1.txt Size 1 B"),
    Log.debug("File base/2.txt Size 2 B"),
    Log.debug("Scanning directory 'Directory(base/subdir)': 0 subdirectories and 2 files"),
    Log.debug("File base/subdir/1.txt Size 1 B"),
    Log.debug("File base/subdir/3.txt Size 3 B")
  )

  val (actual, logs) = run(Scanner.pathScan(base))

  "Report Format" ! {actual.mustEqual(expected)}

  "Logs messages are emitted (ignores order due to non-determinstic concurrent execution)" ! {
    logs.forall(expectedLogs.contains)
  }
} 
Example 63
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.FileNotFoundException
import java.io.IOException
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._
import org.specs2._

import scala.collection.immutable.SortedSet
import scala.concurrent.duration._
import monix.eval._
import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  val base = Directory("base")
  val base1 = File(s"${base.path}/1.txt")
  val base2 = File(s"${base.path}/2.txt")
  val subdir = Directory(s"${base.path}/subdir")
  val sub1 = File(s"${subdir.path}/1.txt")
  val sub3 = File(s"${subdir.path}/3.txt")
  val directories = Map(
    base -> List(subdir, base1, base2),
    subdir -> List(sub1, sub3)
  )
  val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L)
  val fs = MockFilesystem(directories, fileSizes)

  type R = Fx.fx4[Task, FilesystemCmd, Reader[ScanConfig, ?], Writer[Log, ?]]

  def run[T](program: Eff[R, T]) =
    program.runReader(ScanConfig(2)).runFilesystemCmds(fs).taskAttempt.runWriter.runAsync.runSyncUnsafe(3.seconds)

  val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4))
  val expectedLogs = Set(
    Log.info("Scan started on Directory(base)"),
    Log.debug("Scanning directory 'Directory(base)': 1 subdirectories and 2 files"),
    Log.debug("File base/1.txt Size 1 B"),
    Log.debug("File base/2.txt Size 2 B"),
    Log.debug("Scanning directory 'Directory(base/subdir)': 0 subdirectories and 2 files"),
    Log.debug("File base/subdir/1.txt Size 1 B"),
    Log.debug("File base/subdir/3.txt Size 3 B")
  )

  val (actual, logs) = run(Scanner.pathScan(base))

  "Report Format" ! {actual.mustEqual(expected)}

  "Logs messages are emitted (ignores order due to non-determinstic concurrent execution)" ! {
    logs.forall(expectedLogs.contains)
  }
} 
Example 64
Source File: Monix.scala    From arrows   with Apache License 2.0 5 votes vote down vote up
package benchmarks

import monix.eval.Task
import monix.execution.Cancelable
import org.openjdk.jmh.annotations.Benchmark
import scala.util.Try
import scala.util.Success

trait MonixAsync {
  this: Benchmarks =>

  private[this] final val gen = MonixGen(dist)

  @Benchmark
  def monixTask = {
    import scala.concurrent._
    import scala.concurrent.duration._
    import monix.execution.Scheduler.Implicits.global
    Try(Await.result(gen(1).runAsync, Duration.Inf))
  }

}

trait MonixSync {
  this: Benchmarks =>

  private[this] final val gen = MonixGen(dist)

  @Benchmark
  def monixTask = {
    import monix.execution.Scheduler.Implicits.global
    Success(gen(1).runSyncMaybe.right.get)
  }
}

object MonixGen extends Gen[Int => Task[Int]] {

  def sync = Task.now _

  def async(schedule: Runnable => Unit) = {
    v =>
      Task.async[Int] {
        case (s, cb) =>
          schedule(() => cb.onSuccess(v))
          Cancelable.empty
      }
  }

  def failure(ex: Throwable) = _ => Task.raiseError(ex)

  def map(t: Int => Task[Int], f: Int => Int) =
    t.andThen(_.map(f))

  def flatMap(t: Int => Task[Int], f: Int => Task[Int]) =
    t.andThen(_.flatMap(f))

  def handle(t: Int => Task[Int], i: Int) =
    t.andThen(_.onErrorHandle(_ => i))
} 
Example 65
Source File: UserAccountRepositoryOnMemorySpec.scala    From scala-ddd-base   with MIT License 5 votes vote down vote up
package com.github.j5ik2o.dddbase.example.repository.memory

import java.time.ZonedDateTime

import com.github.j5ik2o.dddbase.AggregateNotFoundException
import com.github.j5ik2o.dddbase.example.model._
import com.github.j5ik2o.dddbase.example.repository.{ IdGenerator, SpecSupport, UserAccountRepository }
import com.github.j5ik2o.dddbase.example.repository.util.ScalaFuturesSupportSpec
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{ FreeSpec, Matchers }

import scala.concurrent.duration._
import scala.concurrent.{ Await, Future }

class UserAccountRepositoryOnMemorySpec
    extends FreeSpec
    with ScalaFutures
    with ScalaFuturesSupportSpec
    with Matchers
    with SpecSupport {

  val userAccount = UserAccount(
    id = UserAccountId(IdGenerator.generateIdValue),
    status = Status.Active,
    emailAddress = EmailAddress("[email protected]"),
    password = HashedPassword("aaa"),
    firstName = "Junichi",
    lastName = "Kato",
    createdAt = ZonedDateTime.now,
    updatedAt = None
  )

  val userAccounts = for (idx <- 1L to 10L)
    yield
      UserAccount(
        id = UserAccountId(IdGenerator.generateIdValue),
        status = Status.Active,
        emailAddress = EmailAddress(s"user${idx}@gmail.com"),
        password = HashedPassword("aaa"),
        firstName = "Junichi",
        lastName = "Kato",
        createdAt = ZonedDateTime.now,
        updatedAt = None
      )

  "UserAccountRepositoryOnMemory" - {
    "store" in {
      val repository = UserAccountRepository.onMemory()
      val result: UserAccount = (for {
        _ <- repository.store(userAccount)
        r <- repository.resolveById(userAccount.id)
      } yield r).runToFuture.futureValue

      result shouldBe userAccount
    }
    "storeMulti" in {
      val repository = UserAccountRepository.onMemory()
      val result: Seq[UserAccount] = (for {
        _ <- repository.storeMulti(userAccounts)

        r <- repository.resolveMulti(userAccounts.map(_.id))
      } yield r).runToFuture.futureValue

      sameAs(result, userAccounts) shouldBe true
    }
    "store then expired" in {
      val repository = UserAccountRepository.onMemory(expireAfterWrite = Some(1 seconds))
      val resultFuture: Future[UserAccount] = (for {
        _ <- repository.store(userAccount)
        _ <- Task.pure(Thread.sleep(1000))
        r <- repository.resolveById(userAccount.id)
      } yield r).runToFuture

      an[AggregateNotFoundException] should be thrownBy {
        Await.result(resultFuture, Duration.Inf)
      }
    }
  }

} 
Example 66
Source File: QueryResultTypeCassandraMonixSpec.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.cassandra.monix

import io.getquill.context.cassandra.QueryResultTypeCassandraSpec
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import monix.reactive.Observable

class QueryResultTypeCassandraMonixSpec extends QueryResultTypeCassandraSpec {

  val context = testMonixDB

  import context._

  def result[T](t: Task[T]) =
    await(t.runToFuture(global))

  def result[T](t: Observable[T]) =
    await(t.foldLeftL(List.empty[T])(_ :+ _).runToFuture)

  override def beforeAll = {
    result(context.run(deleteAll))
    result(context.run(liftQuery(entries).foreach(e => insert(e))))
    ()
  }

  "query" in {
    result(context.run(selectAll)) mustEqual entries
  }

  "stream" in {
    result(context.stream(selectAll)) mustEqual entries
  }

  "querySingle" - {
    "size" in {
      result(context.run(entitySize)) mustEqual 3
    }
    "parametrized size" in {
      result(context.run(parametrizedSize(lift(10000)))) mustEqual 0
    }
  }
} 
Example 67
Source File: EncodingSpec.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.cassandra.monix

import io.getquill.context.cassandra.EncodingSpecHelper
import io.getquill.Query

class EncodingSpec extends EncodingSpecHelper {
  "encodes and decodes types" - {
    "stream" in {
      import monix.execution.Scheduler.Implicits.global
      import testMonixDB._
      val result =
        for {
          _ <- testMonixDB.run(query[EncodingTestEntity].delete)
          _ <- testMonixDB.run(liftQuery(insertValues).foreach(e => query[EncodingTestEntity].insert(e)))
          result <- testMonixDB.run(query[EncodingTestEntity])
        } yield {
          result
        }
      val f = result.runToFuture
      verify(await(f))
    }
  }

  "encodes collections" - {
    "stream" in {
      import monix.execution.Scheduler.Implicits.global
      import testMonixDB._
      val q = quote {
        (list: Query[Int]) =>
          query[EncodingTestEntity].filter(t => list.contains(t.id))
      }
      val result =
        for {
          _ <- testMonixDB.run(query[EncodingTestEntity].delete)
          _ <- testMonixDB.run(liftQuery(insertValues).foreach(e => query[EncodingTestEntity].insert(e)))
          result <- testMonixDB.run(q(liftQuery(insertValues.map(_.id))))
        } yield {
          result
        }
      val f = result.runToFuture
      verify(await(f))
    }
  }
} 
Example 68
Source File: ApplicationController.scala    From monix-sample   with Apache License 2.0 5 votes vote down vote up
package controllers

import akka.actor.ActorSystem
import akka.stream.Materializer
import engine.{BackPressuredWebSocketActor, DataProducer, SimpleWebSocketActor}
import monix.execution.Scheduler.Implicits.global
import play.api.Environment
import play.api.libs.json.JsValue
import play.api.libs.streams.ActorFlow
import play.api.mvc._
import scala.concurrent.duration._

class ApplicationController()
  (implicit env: Environment, as: ActorSystem, m: Materializer)
  extends Controller with JSONFormats {

  def index = Action {
    Ok(views.html.index(env))
  }

  def backPressuredStream(periodMillis: Int, seed: Long) =
    WebSocket.accept[JsValue, JsValue] { request =>
      val obs = new DataProducer(periodMillis.millis, seed)
      ActorFlow.actorRef(out => BackPressuredWebSocketActor.props(obs, out))
    }

  def simpleStream(periodMillis: Int, seed: Long) =
    WebSocket.accept[JsValue, JsValue] { request =>
      val obs = new DataProducer(periodMillis.millis, seed)
      ActorFlow.actorRef(out => SimpleWebSocketActor.props(obs, out))
    }
} 
Example 69
Source File: MonixSampleClient.scala    From monix-sample   with Apache License 2.0 5 votes vote down vote up
package client

import monix.execution.Scheduler.Implicits.global
import monix.reactive.Observable
import shared.models.Signal

import scala.concurrent.duration._
import scala.scalajs.js

object MonixSampleClient extends js.JSApp {
  def main(): Unit = {
    val line1 = new DataConsumer(200.millis, 1274028492832L, doBackPressure = true)
      .collect { case s: Signal => s }
    val line2 = new DataConsumer(200.millis, 9384729038472L, doBackPressure = true)
      .collect { case s: Signal => s }
    val line3 = new DataConsumer(200.millis, -2938472934842L, doBackPressure = false)
      .collect { case s: Signal => s }
    val line4 = new DataConsumer(200.millis, -9826395057397L, doBackPressure = false)
      .collect { case s: Signal => s }

    Observable
      .combineLatest4(line1, line2, line3, line4)
      .subscribe(new Graph("lineChart"))
  }
} 
Example 70
Source File: UserAccountRepositoryByFreeSpec.scala    From scala-ddd-base   with MIT License 5 votes vote down vote up
package com.github.j5ik2o.dddbase.example.repository.free

import java.time.ZonedDateTime

import cats.free.Free
import com.github.j5ik2o.dddbase.example.model._
import com.github.j5ik2o.dddbase.example.repository.{ IdGenerator, SpecSupport, UserAccountRepository }
import com.github.j5ik2o.dddbase.example.repository.util.{ FlywayWithMySQLSpecSupport, SkinnySpecSupport }
import monix.execution.Scheduler.Implicits.global
import org.scalatest.{ FreeSpecLike, Matchers }
import scalikejdbc.AutoSession

class UserAccountRepositoryByFreeSpec
    extends FreeSpecLike
    with FlywayWithMySQLSpecSupport
    with SkinnySpecSupport
    with Matchers
    with SpecSupport {

  override val tables: Seq[String] = Seq("user_account")

  val userAccount = UserAccount(
    id = UserAccountId(IdGenerator.generateIdValue),
    status = Status.Active,
    emailAddress = EmailAddress("[email protected]"),
    password = HashedPassword("aaa"),
    firstName = "Junichi",
    lastName = "Kato",
    createdAt = ZonedDateTime.now,
    updatedAt = None
  )

  val userAccounts = for (idx <- 1L to 10L)
    yield
      UserAccount(
        id = UserAccountId(IdGenerator.generateIdValue),
        status = Status.Active,
        emailAddress = EmailAddress(s"user${idx}@gmail.com"),
        password = HashedPassword("aaa"),
        firstName = "Junichi",
        lastName = "Kato",
        createdAt = ZonedDateTime.now,
        updatedAt = None
      )

  "UserAccountRepositoryByFree" - {
    "store" in {
      val program: Free[UserRepositoryDSL, UserAccount] = for {
        _      <- UserAccountRepositoryByFree.store(userAccount)
        result <- UserAccountRepositoryByFree.resolveById(userAccount.id)
      } yield result
      val skinny     = UserAccountRepository.bySkinny
      val evalResult = UserAccountRepositoryByFree.evaluate(skinny)(program)
      val result     = evalResult.run(AutoSession).runToFuture.futureValue
      result shouldBe userAccount
    }
  }
} 
Example 71
Source File: UserMessageRepositoryBySkinnyImplSpec.scala    From scala-ddd-base   with MIT License 5 votes vote down vote up
package com.github.j5ik2o.dddbase.example.repository.skinny
import java.time.ZonedDateTime

import com.github.j5ik2o.dddbase.example.model.{ Status, UserMessage, UserMessageId }
import com.github.j5ik2o.dddbase.example.repository.{ IdGenerator, SpecSupport }
import com.github.j5ik2o.dddbase.example.repository.util.{ FlywayWithMySQLSpecSupport, SkinnySpecSupport }
import monix.execution.Scheduler.Implicits.global
import org.scalatest.{ FreeSpecLike, Matchers }
import scalikejdbc.AutoSession

class UserMessageRepositoryBySkinnyImplSpec
    extends FreeSpecLike
    with FlywayWithMySQLSpecSupport
    with SkinnySpecSupport
    with Matchers
    with SpecSupport {
  override val tables: Seq[String] = Seq("user_message")

  val userMessage = UserMessage(
    id = UserMessageId(IdGenerator.generateIdValue, IdGenerator.generateIdValue),
    status = Status.Active,
    message = "ABC",
    createdAt = ZonedDateTime.now(),
    updatedAt = None
  )

  val userMessages = for (idx <- 1L to 10L)
    yield
      UserMessage(
        id = UserMessageId(IdGenerator.generateIdValue, IdGenerator.generateIdValue),
        status = Status.Active,
        message = s"ABC${idx}",
        createdAt = ZonedDateTime.now(),
        updatedAt = None
      )
  val repository = new UserMessageRepositoryBySkinnyImpl

  "UserMessageRepositoryBySkinnyImpl" - {
    "store" in {
      val result = (for {
        _ <- repository.store(userMessage)
        r <- repository.resolveById(userMessage.id)
      } yield r).run(AutoSession).runToFuture.futureValue

      result shouldBe userMessage
    }
    "storeMulti" in {
      val result = (for {
        _ <- repository.storeMulti(userMessages)
        r <- repository.resolveMulti(userMessages.map(_.id))
      } yield r).run(AutoSession).runToFuture.futureValue

      sameAs(result, userMessages) shouldBe true
    }
  }
} 
Example 72
Source File: UserAccountRepositoryBySkinnyImplSpec.scala    From scala-ddd-base   with MIT License 5 votes vote down vote up
package com.github.j5ik2o.dddbase.example.repository.skinny

import java.time.ZonedDateTime

import com.github.j5ik2o.dddbase.example.model._
import com.github.j5ik2o.dddbase.example.repository.{ IdGenerator, SpecSupport }
import com.github.j5ik2o.dddbase.example.repository.util.{ FlywayWithMySQLSpecSupport, SkinnySpecSupport }
import monix.execution.Scheduler.Implicits.global
import org.scalatest.{ FreeSpecLike, Matchers }
import scalikejdbc.AutoSession

class UserAccountRepositoryBySkinnyImplSpec
    extends FreeSpecLike
    with FlywayWithMySQLSpecSupport
    with SkinnySpecSupport
    with Matchers
    with SpecSupport {

  val repository                   = new UserAccountRepositoryBySkinnyImpl
  override val tables: Seq[String] = Seq("user_account")

  val userAccount = UserAccount(
    id = UserAccountId(IdGenerator.generateIdValue),
    status = Status.Active,
    emailAddress = EmailAddress("[email protected]"),
    password = HashedPassword("aaa"),
    firstName = "Junichi",
    lastName = "Kato",
    createdAt = ZonedDateTime.now,
    updatedAt = None
  )

  val userAccounts = for (idx <- 1L to 10L)
    yield
      UserAccount(
        id = UserAccountId(IdGenerator.generateIdValue),
        status = Status.Active,
        emailAddress = EmailAddress(s"user${idx}@gmail.com"),
        password = HashedPassword("aaa"),
        firstName = "Junichi",
        lastName = "Kato",
        createdAt = ZonedDateTime.now,
        updatedAt = None
      )

  "UserAccountRepositoryBySkinny" - {
    "store" in {
      val result = (for {
        _ <- repository.store(userAccount)
        r <- repository.resolveById(userAccount.id)
      } yield r).run(AutoSession).runToFuture.futureValue

      result shouldBe userAccount
    }
    "storeMulti" in {
      val result = (for {
        _ <- repository.storeMulti(userAccounts)
        r <- repository.resolveMulti(userAccounts.map(_.id))
      } yield r).run(AutoSession).runToFuture.futureValue

      sameAs(result, userAccounts) shouldBe true
    }

  }

} 
Example 73
Source File: AirframeSpec.scala    From scala-ddd-base   with MIT License 5 votes vote down vote up
package com.github.j5ik2o.dddbase.example.repository.airframe

import java.time.ZonedDateTime

import com.github.j5ik2o.dddbase.example.model._
import com.github.j5ik2o.dddbase.example.repository.util.{ FlywayWithMySQLSpecSupport, SkinnySpecSupport }
import com.github.j5ik2o.dddbase.example.repository.{ BySkinny, IdGenerator, UserAccountRepository }
import monix.execution.Scheduler.Implicits.global
import org.scalatest.{ FreeSpecLike, Matchers }
import scalikejdbc.AutoSession
import wvlet.airframe._

class AirframeSpec extends FreeSpecLike with FlywayWithMySQLSpecSupport with SkinnySpecSupport with Matchers {

  override val tables: Seq[String] = Seq("user_account")

  val design = newDesign.bind[UserAccountRepository[BySkinny]].toInstance(UserAccountRepository.bySkinny)

  val userAccount = UserAccount(
    id = UserAccountId(IdGenerator.generateIdValue),
    status = Status.Active,
    emailAddress = EmailAddress("[email protected]"),
    password = HashedPassword("aaa"),
    firstName = "Junichi",
    lastName = "Kato",
    createdAt = ZonedDateTime.now,
    updatedAt = None
  )

  "Airframe" - {
    "store and resolveById" in {
      design.withSession { session =>
        val repository = session.build[UserAccountRepository[BySkinny]]
        val result = (for {
          _ <- repository.store(userAccount)
          r <- repository.resolveById(userAccount.id)
        } yield r).run(AutoSession).runToFuture.futureValue
        result shouldBe userAccount
      }
    }
  }

} 
Example 74
Source File: UserAccountRepositoryBySlickImplSpec.scala    From scala-ddd-base   with MIT License 5 votes vote down vote up
package com.github.j5ik2o.dddbase.example.repository.slick

import java.time.ZonedDateTime

import com.github.j5ik2o.dddbase.example.model._
import com.github.j5ik2o.dddbase.example.repository.{ IdGenerator, SpecSupport, UserAccountRepository }
import com.github.j5ik2o.dddbase.example.repository.util.{ FlywayWithMySQLSpecSupport, Slick3SpecSupport }
import monix.execution.Scheduler.Implicits.global
import org.scalatest.{ FreeSpecLike, Matchers }

class UserAccountRepositoryBySlickImplSpec
    extends FreeSpecLike
    with FlywayWithMySQLSpecSupport
    with Slick3SpecSupport
    with Matchers
    with SpecSupport {

  override val tables: Seq[String] = Seq("user_account")

  val userAccount = UserAccount(
    id = UserAccountId(IdGenerator.generateIdValue),
    status = Status.Active,
    emailAddress = EmailAddress("[email protected]"),
    password = HashedPassword("aaa"),
    firstName = "Junichi",
    lastName = "Kato",
    createdAt = ZonedDateTime.now,
    updatedAt = None
  )

  val userAccounts = for (idx <- 1L to 10L)
    yield
      UserAccount(
        id = UserAccountId(IdGenerator.generateIdValue),
        status = Status.Active,
        emailAddress = EmailAddress(s"user${idx}@gmail.com"),
        password = HashedPassword("aaa"),
        firstName = "Junichi",
        lastName = "Kato",
        createdAt = ZonedDateTime.now,
        updatedAt = None
      )

  "UserAccountRepositoryBySlickImpl" - {
    "store" in {
      val repository = new UserAccountRepositoryBySlickImpl(dbConfig.profile, dbConfig.db)
      val result = (for {
        _ <- repository.store(userAccount)
        r <- repository.resolveById(userAccount.id)
      } yield r).runToFuture.futureValue

      result shouldBe userAccount
    }
    "storeMulti" in {
      val repository = new UserAccountRepositoryBySlickImpl(dbConfig.profile, dbConfig.db)
      val result = (for {
        _ <- repository.storeMulti(userAccounts)
        r <- repository.resolveMulti(userAccounts.map(_.id))
      } yield r).runToFuture.futureValue

      sameAs(result, userAccounts) shouldBe true
    }
  }
} 
Example 75
Source File: UserMessageRepositoryBySlickImplSpec.scala    From scala-ddd-base   with MIT License 5 votes vote down vote up
package com.github.j5ik2o.dddbase.example.repository.slick

import java.time.ZonedDateTime

import com.github.j5ik2o.dddbase.example.model.{ Status, UserMessage, UserMessageId }
import com.github.j5ik2o.dddbase.example.repository.{ IdGenerator, SpecSupport }
import com.github.j5ik2o.dddbase.example.repository.util.{ FlywayWithMySQLSpecSupport, Slick3SpecSupport }
import monix.execution.Scheduler.Implicits.global
import org.scalatest.{ FreeSpecLike, Matchers }

class UserMessageRepositoryBySlickImplSpec
    extends FreeSpecLike
    with FlywayWithMySQLSpecSupport
    with Slick3SpecSupport
    with Matchers
    with SpecSupport {

  override val tables: Seq[String] = Seq("user_message")

  val userMessage = UserMessage(
    id = UserMessageId(IdGenerator.generateIdValue, IdGenerator.generateIdValue),
    status = Status.Active,
    message = "ABC",
    createdAt = ZonedDateTime.now(),
    updatedAt = None
  )

  val userMessages = for (idx <- 1L to 10L)
    yield
      UserMessage(
        id = UserMessageId(IdGenerator.generateIdValue, IdGenerator.generateIdValue),
        status = Status.Active,
        message = s"ABC${idx}",
        createdAt = ZonedDateTime.now(),
        updatedAt = None
      )

  "UserMessageRepositoryBySlickImpl" - {
    "store" in {
      val repository = new UserMessageRepositoryBySlickImpl(dbConfig.profile, dbConfig.db)
      val result = (for {
        _ <- repository.store(userMessage)
        r <- repository.resolveById(userMessage.id)
      } yield r).runToFuture.futureValue

      result shouldBe userMessage
    }
    "storeMulti" in {
      val repository = new UserMessageRepositoryBySlickImpl(dbConfig.profile, dbConfig.db)
      val result = (for {
        _ <- repository.storeMulti(userMessages)
        r <- repository.resolveMulti(userMessages.map(_.id))
      } yield r).runToFuture.futureValue

      sameAs(result, userMessages) shouldBe true
    }
  }
} 
Example 76
Source File: Stream.scala    From pulse   with Apache License 2.0 5 votes vote down vote up
package io.phdata.pulse

import io.phdata.pulse.log.{ HttpManager, JsonParser }
import monix.reactive.subjects.ConcurrentSubject
import monix.execution.Scheduler.Implicits.global
import monix.reactive.OverflowStrategy
import org.apache.log4j.helpers.LogLog
import org.apache.log4j.spi.LoggingEvent

import scala.concurrent.duration.FiniteDuration
import scala.util.{ Failure, Success, Try }

abstract class Stream[E](flushDuration: FiniteDuration, flushSize: Int, maxBuffer: Int) {

  val overflowStragegy = OverflowStrategy.DropNewAndSignal(maxBuffer, (_: Long) => None)
  val subject          = ConcurrentSubject.publish[E](overflowStragegy)

  subject
    .bufferTimedAndCounted(flushDuration, flushSize)
    .map(save)
    .subscribe()

  def append(value: E): Unit =
    Try { subject.onNext(value) } match {
      case Success(_) => ()
      case Failure(e) => LogLog.error("Error appending to stream", e)
    }

  def save(values: Seq[E])

}

class HttpStream(flushDuration: FiniteDuration,
                 flushSize: Int,
                 maxBuffer: Int,
                 httpManager: HttpManager)
    extends Stream[LoggingEvent](flushDuration, flushSize, maxBuffer) {

  val jsonParser = new JsonParser

  override def save(values: Seq[LoggingEvent]): Unit = {
    val logArray = values.toArray
    LogLog.debug(s"Flushing ${logArray.length} messages")
    val logMessage = jsonParser.marshallArray(logArray)

    httpManager.send(logMessage)
  }
} 
Example 77
Source File: AuthDirectives.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.storage.routes

import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.model.headers.OAuth2BearerToken
import akka.http.scaladsl.server.Directive1
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.directives.FutureDirectives.onComplete
import ch.epfl.bluebrain.nexus.storage.IamIdentitiesClient
import ch.epfl.bluebrain.nexus.storage.IamIdentitiesClient.{AccessToken, Caller}
import ch.epfl.bluebrain.nexus.storage.IamIdentitiesClientError.IdentitiesClientStatusError
import ch.epfl.bluebrain.nexus.storage.StorageError._
import com.typesafe.scalalogging.Logger
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global

import scala.util.{Failure, Success}

object AuthDirectives {

  private val logger = Logger[this.type]

  
  def extractCaller(implicit identities: IamIdentitiesClient[Task], token: Option[AccessToken]): Directive1[Caller] =
    onComplete(identities().runToFuture).flatMap {
      case Success(caller)                                                   => provide(caller)
      case Failure(IdentitiesClientStatusError(StatusCodes.Unauthorized, _)) => failWith(AuthenticationFailed)
      case Failure(IdentitiesClientStatusError(StatusCodes.Forbidden, _))    => failWith(AuthorizationFailed)
      case Failure(err)                                                      =>
        val message = "Error when trying to extract the subject"
        logger.error(message, err)
        failWith(InternalError(message))
    }
} 
Example 78
Source File: StorageCacheSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.cache

import java.nio.file.Paths
import java.time.Clock

import akka.testkit._
import ch.epfl.bluebrain.nexus.commons.test.ActorSystemFixture
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.ProjectRef
import ch.epfl.bluebrain.nexus.kg.storage.Storage.DiskStorage
import ch.epfl.bluebrain.nexus.rdf.implicits._
import ch.epfl.bluebrain.nexus.service.config.{ServiceConfig, Settings}
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.matchers.should.Matchers
import org.scalatest.{Inspectors, TryValues}

import scala.concurrent.duration._

//noinspection NameBooleanParameters
class StorageCacheSpec
    extends ActorSystemFixture("StorageCacheSpec", true)
    with Matchers
    with Inspectors
    with ScalaFutures
    with TryValues
    with TestHelper {

  implicit override def patienceConfig: PatienceConfig = PatienceConfig(3.seconds.dilated, 5.milliseconds)

  implicit private val clock: Clock             = Clock.systemUTC
  implicit private val appConfig: ServiceConfig = Settings(system).serviceConfig
  implicit private val keyValueStoreCfg         = appConfig.kg.keyValueStore.keyValueStoreConfig

  val ref1 = ProjectRef(genUUID)
  val ref2 = ProjectRef(genUUID)

  val time   = clock.instant()
  val lastId = url"http://example.com/lastA"
  // initialInstant.minusSeconds(1L + genInt().toLong)

  val tempStorage = DiskStorage(ref1, genIri, 1L, false, true, "alg", Paths.get("/tmp"), read, write, 1024L)

  val lastStorageProj1 = tempStorage.copy(id = lastId)
  val lastStorageProj2 = tempStorage.copy(ref = ref2, id = lastId)

  val storagesProj1: List[DiskStorage] = List.fill(5)(tempStorage.copy(id = genIri)) :+ lastStorageProj1
  val storagesProj2: List[DiskStorage] = List.fill(5)(tempStorage.copy(ref = ref2, id = genIri)) :+ lastStorageProj2

  private val cache = StorageCache[Task]

  "StorageCache" should {

    "index storages" in {
      forAll((storagesProj1 ++ storagesProj2).zipWithIndex) {
        case (storage, index) =>
          implicit val instant = time.plusSeconds(index.toLong)
          cache.put(storage).runToFuture.futureValue
          cache.get(storage.ref, storage.id).runToFuture.futureValue shouldEqual Some(storage)
      }
    }

    "get latest default storage" in {
      cache.getDefault(ref1).runToFuture.futureValue shouldEqual Some(lastStorageProj1)
      cache.getDefault(ref2).runToFuture.futureValue shouldEqual Some(lastStorageProj2)
      cache.getDefault(ProjectRef(genUUID)).runToFuture.futureValue shouldEqual None
    }

    "list storages" in {
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs storagesProj1
      cache.get(ref2).runToFuture.futureValue should contain theSameElementsAs storagesProj2
    }

    "deprecate storage" in {
      val storage          = storagesProj1.head
      implicit val instant = time.plusSeconds(30L)
      cache.put(storage.copy(deprecated = true, rev = 2L)).runToFuture.futureValue
      cache.get(storage.ref, storage.id).runToFuture.futureValue shouldEqual None
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs storagesProj1.filterNot(_ == storage)
    }
  }
} 
Example 79
Source File: ResolverCacheSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.cache

import akka.actor.ExtendedActorSystem
import akka.serialization.Serialization
import akka.testkit._
import ch.epfl.bluebrain.nexus.commons.test.ActorSystemFixture
import ch.epfl.bluebrain.nexus.iam.types.Identity.Anonymous
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.config.KgConfig._
import ch.epfl.bluebrain.nexus.kg.resolve.Resolver._
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.{ProjectLabel, ProjectRef}
import ch.epfl.bluebrain.nexus.service.config.{ServiceConfig, Settings}
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.matchers.should.Matchers
import org.scalatest.{Inspectors, TryValues}

import scala.concurrent.duration._

//noinspection NameBooleanParameters
class ResolverCacheSpec
    extends ActorSystemFixture("ResolverCacheSpec", true)
    with Matchers
    with Inspectors
    with ScalaFutures
    with TryValues
    with TestHelper {

  implicit override def patienceConfig: PatienceConfig = PatienceConfig(3.seconds.dilated, 5.milliseconds)

  implicit private val appConfig: ServiceConfig = Settings(system).serviceConfig
  implicit private val keyValueStoreCfg         = appConfig.kg.keyValueStore.keyValueStoreConfig

  val ref1 = ProjectRef(genUUID)
  val ref2 = ProjectRef(genUUID)

  val label1 = ProjectLabel(genString(), genString())
  val label2 = ProjectLabel(genString(), genString())

  val resolver: InProjectResolver       = InProjectResolver(ref1, genIri, 1L, false, 10)
  val crossRefs: CrossProjectResolver   =
    CrossProjectResolver(Set(genIri), List(ref1, ref2), Set(Anonymous), ref1, genIri, 0L, false, 1)
  val crossLabels: CrossProjectResolver =
    CrossProjectResolver(Set(genIri), List(label1, label2), Set(Anonymous), ref1, genIri, 0L, false, 1)

  val resolverProj1: Set[InProjectResolver] = List.fill(5)(resolver.copy(id = genIri)).toSet
  val resolverProj2: Set[InProjectResolver] = List.fill(5)(resolver.copy(id = genIri, ref = ref2)).toSet

  private val cache = ResolverCache[Task]

  "ResolverCache" should {

    "index resolvers" in {
      val list = (resolverProj1 ++ resolverProj2).toList
      forAll(list) { resolver =>
        cache.put(resolver).runToFuture.futureValue
        cache.get(resolver.ref, resolver.id).runToFuture.futureValue shouldEqual Some(resolver)
      }
    }

    "list resolvers" in {
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs resolverProj1
      cache.get(ref2).runToFuture.futureValue should contain theSameElementsAs resolverProj2
    }

    "deprecate resolver" in {
      val resolver = resolverProj1.head
      cache.put(resolver.copy(deprecated = true, rev = 2L)).runToFuture.futureValue
      cache.get(resolver.ref, resolver.id).runToFuture.futureValue shouldEqual None
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs resolverProj1.filterNot(_ == resolver)
    }

    "serialize cross project resolver" when {
      val serialization = new Serialization(system.asInstanceOf[ExtendedActorSystem])
      "parameterized with ProjectRef" in {
        val bytes = serialization.serialize(crossRefs).success.value
        val out   = serialization.deserialize(bytes, classOf[CrossProjectResolver]).success.value
        out shouldEqual crossRefs
      }
      "parameterized with ProjectLabel" in {
        val bytes = serialization.serialize(crossLabels).success.value
        val out   = serialization.deserialize(bytes, classOf[CrossProjectResolver]).success.value
        out shouldEqual crossLabels
      }
    }
  }
} 
Example 80
Source File: IdentitiesRoutes.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.routes

import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import ch.epfl.bluebrain.nexus.iam.acls.Acls
import ch.epfl.bluebrain.nexus.iam.realms.Realms
import ch.epfl.bluebrain.nexus.iam.types.Caller.JsonLd._
import ch.epfl.bluebrain.nexus.service.directives.AuthDirectives
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig.HttpConfig
import ch.epfl.bluebrain.nexus.service.marshallers.instances._
import kamon.instrumentation.akka.http.TracingDirectives.operationName
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global


class IdentitiesRoutes(acls: Acls[Task], realms: Realms[Task])(implicit http: HttpConfig)
    extends AuthDirectives(acls, realms) {

  def routes: Route = {
    (pathPrefix("identities") & pathEndOrSingleSlash) {
      operationName(s"/${http.prefix}/identities") {
        (extractCaller & get) { caller =>
          complete(caller)
        }
      }
    }
  }
} 
Example 81
Source File: PermissionsRoutes.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.routes

import akka.http.javadsl.server.Rejections._
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import ch.epfl.bluebrain.nexus.iam.acls.Acls
import ch.epfl.bluebrain.nexus.iam.permissions.Permissions
import ch.epfl.bluebrain.nexus.iam.realms.Realms
import ch.epfl.bluebrain.nexus.iam.routes.PermissionsRoutes.PatchPermissions
import ch.epfl.bluebrain.nexus.iam.routes.PermissionsRoutes.PatchPermissions.{Append, Replace, Subtract}
import ch.epfl.bluebrain.nexus.iam.types.Permission
import ch.epfl.bluebrain.nexus.iam.types.ResourceF._
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig.HttpConfig
import ch.epfl.bluebrain.nexus.service.directives.AuthDirectives
import ch.epfl.bluebrain.nexus.service.marshallers.instances._
import io.circe.{Decoder, DecodingFailure}
import kamon.instrumentation.akka.http.TracingDirectives.operationName
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global


class PermissionsRoutes(permissions: Permissions[Task], acls: Acls[Task], realms: Realms[Task])(implicit
    http: HttpConfig
) extends AuthDirectives(acls, realms) {

  def routes: Route =
    (pathPrefix("permissions") & pathEndOrSingleSlash) {
      operationName(s"/${http.prefix}/permissions") {
        extractCaller { implicit caller =>
          concat(
            get {
              parameter("rev".as[Long].?) {
                case Some(rev) => complete(permissions.fetchAt(rev).runNotFound)
                case None      => complete(permissions.fetch.runToFuture)
              }
            },
            (put & parameter("rev" ? 0L)) { rev =>
              entity(as[PatchPermissions]) {
                case Replace(set) =>
                  complete(permissions.replace(set, rev).runToFuture)
                case _            => reject(validationRejection("Only @type 'Replace' is permitted when using 'put'."))
              }
            },
            delete {
              parameter("rev".as[Long]) { rev => complete(permissions.delete(rev).runToFuture) }
            },
            (patch & parameter("rev" ? 0L)) { rev =>
              entity(as[PatchPermissions]) {
                case Append(set)   =>
                  complete(permissions.append(set, rev).runToFuture)
                case Subtract(set) =>
                  complete(permissions.subtract(set, rev).runToFuture)
                case _             =>
                  reject(validationRejection("Only @type 'Append' or 'Subtract' is permitted when using 'patch'."))
              }
            }
          )
        }
      }
    }
}

object PermissionsRoutes {

  sealed private[routes] trait PatchPermissions extends Product with Serializable

  private[routes] object PatchPermissions {

    final case class Append(permissions: Set[Permission])   extends PatchPermissions
    final case class Subtract(permissions: Set[Permission]) extends PatchPermissions
    final case class Replace(permissions: Set[Permission])  extends PatchPermissions

    implicit val patchPermissionsDecoder: Decoder[PatchPermissions] =
      Decoder.instance { hc =>
        for {
          permissions <- hc.get[Set[Permission]]("permissions")
          tpe          = hc.get[String]("@type").getOrElse("Replace")
          patch       <- tpe match {
                           case "Replace"  => Right(Replace(permissions))
                           case "Append"   => Right(Append(permissions))
                           case "Subtract" => Right(Subtract(permissions))
                           case _          => Left(DecodingFailure("@type field must have Append or Subtract value", hc.history))
                         }
        } yield patch
      }
  }

} 
Example 82
Source File: package.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg

import akka.http.scaladsl.model.StatusCode
import akka.http.scaladsl.server.Directives.complete
import akka.http.scaladsl.server.{MalformedQueryParamRejection, Route}
import cats.Functor
import cats.data.{EitherT, OptionT}
import cats.instances.future._
import ch.epfl.bluebrain.nexus.iam.types.Permission
import ch.epfl.bluebrain.nexus.kg.marshallers.instances._
import ch.epfl.bluebrain.nexus.kg.resources.Rejection.NotFound.notFound
import ch.epfl.bluebrain.nexus.kg.resources.{Ref, Rejection, ResourceV}
import ch.epfl.bluebrain.nexus.kg.routes.OutputFormat.{DOT, Triples}
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri
import monix.execution.Scheduler.Implicits.global

import scala.concurrent.Future

package object routes {

  private[routes] def completeWithFormat(
      fetched: Future[Either[Rejection, (StatusCode, ResourceV)]]
  )(implicit format: NonBinaryOutputFormat): Route =
    completeWithFormat(EitherT(fetched))

  private def completeWithFormat(
      fetched: EitherT[Future, Rejection, (StatusCode, ResourceV)]
  )(implicit format: NonBinaryOutputFormat): Route =
    format match {
      case f: JsonLDOutputFormat =>
        implicit val format = f
        complete(fetched.value)
      case Triples               =>
        implicit val format = Triples
        complete(fetched.map { case (status, resource) => status -> resource.value.graph.ntriples }.value)
      case DOT                   =>
        implicit val format = DOT
        complete(fetched.map { case (status, resource) => status -> resource.value.graph.dot() }.value)
    }

  private[routes] val read: Permission = Permission.unsafe("resources/read")

  private[routes] val schemaError =
    MalformedQueryParamRejection("schema", "The provided schema does not match the schema on the Uri")

  implicit private[routes] class FOptionSyntax[F[_], A](private val fOpt: F[Option[A]]) extends AnyVal {
    def toNotFound(id: AbsoluteIri)(implicit F: Functor[F]): EitherT[F, Rejection, A] =
      OptionT(fOpt).toRight(notFound(Ref(id)))
  }
} 
Example 83
Source File: GlobalEventRoutes.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.routes

import akka.actor.ActorSystem
import akka.http.scaladsl.marshalling.sse.EventStreamMarshalling._
import akka.http.scaladsl.server.Directives.complete
import akka.http.scaladsl.server.Route
import ch.epfl.bluebrain.nexus.iam.acls.Acls
import ch.epfl.bluebrain.nexus.iam.realms.Realms
import ch.epfl.bluebrain.nexus.iam.types.{Caller, Permission}
import ch.epfl.bluebrain.nexus.kg.persistence.TaggingAdapter
import ch.epfl.bluebrain.nexus.kg.resources.Event.JsonLd._
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig
import ch.epfl.bluebrain.nexus.service.directives.AuthDirectives
import kamon.instrumentation.akka.http.TracingDirectives.operationName
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global

class GlobalEventRoutes(acls: Acls[Task], realms: Realms[Task], caller: Caller)(implicit
    override val as: ActorSystem,
    override val config: ServiceConfig
) extends AuthDirectives(acls, realms)
    with EventCommonRoutes {

  private val read: Permission = Permission.unsafe("events/read")

  def routes: Route =
    lastEventId { offset =>
      operationName(s"/${config.http.prefix}/events") {
        authorizeFor(permission = read)(caller) {
          complete(source(TaggingAdapter.EventTag, offset))
        }
      }
    }
} 
Example 84
Source File: ArchiveRoutes.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.routes

import akka.http.scaladsl.model.StatusCodes.{Created, OK}
import akka.http.scaladsl.model.headers.Accept
import akka.http.scaladsl.model.{HttpEntity, MediaTypes}
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import ch.epfl.bluebrain.nexus.admin.client.types.Project
import ch.epfl.bluebrain.nexus.iam.acls.Acls
import ch.epfl.bluebrain.nexus.iam.realms.Realms
import ch.epfl.bluebrain.nexus.iam.types.Caller
import ch.epfl.bluebrain.nexus.iam.types.Identity.Subject
import ch.epfl.bluebrain.nexus.kg.KgError.{InvalidOutputFormat, UnacceptedResponseContentType}
import ch.epfl.bluebrain.nexus.kg.archives.Archive._
import ch.epfl.bluebrain.nexus.kg.directives.PathDirectives._
import ch.epfl.bluebrain.nexus.kg.directives.ProjectDirectives._
import ch.epfl.bluebrain.nexus.kg.directives.QueryDirectives.outputFormat
import ch.epfl.bluebrain.nexus.kg.marshallers.instances._
import ch.epfl.bluebrain.nexus.kg.resources._
import ch.epfl.bluebrain.nexus.kg.resources.syntax._
import ch.epfl.bluebrain.nexus.kg.routes.OutputFormat.Tar
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri
import ch.epfl.bluebrain.nexus.rdf.Iri.Path._
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig
import ch.epfl.bluebrain.nexus.service.directives.AuthDirectives
import io.circe.Json
import kamon.instrumentation.akka.http.TracingDirectives.operationName
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global

class ArchiveRoutes private[routes] (archives: Archives[Task], acls: Acls[Task], realms: Realms[Task])(implicit
    project: Project,
    caller: Caller,
    config: ServiceConfig
) extends AuthDirectives(acls, realms) {

  private val responseType              = MediaTypes.`application/x-tar`
  private val projectPath               = project.organizationLabel / project.label
  implicit private val subject: Subject = caller.subject

  
  def routes(id: AbsoluteIri): Route = {
    val resId = Id(project.ref, id)
    concat(
      // Create archive
      (put & pathEndOrSingleSlash) {
        operationName(s"/${config.http.prefix}/archives/{org}/{project}/{id}") {
          (authorizeFor(projectPath, write) & projectNotDeprecated) {
            entity(as[Json]) { source =>
              complete(archives.create(resId, source).value.runWithStatus(Created))
            }
          }
        }
      },
      // Fetch archive
      (get & outputFormat(strict = true, Tar) & pathEndOrSingleSlash) {
        case Tar                           => getArchive(resId)
        case format: NonBinaryOutputFormat => getResource(resId)(format)
        case other                         => failWith(InvalidOutputFormat(other.toString))

      }
    )
  }

  private def getResource(resId: ResId)(implicit format: NonBinaryOutputFormat): Route =
    completeWithFormat(archives.fetch(resId).value.runWithStatus(OK))

  private def getArchive(resId: ResId): Route = {
    (parameter("ignoreNotFound".as[Boolean] ? false) & extractCallerAcls(anyProject)) { (ignoreNotFound, acls) =>
      onSuccess(archives.fetchArchive(resId, ignoreNotFound)(acls, caller).value.runToFuture) {
        case Right(source) =>
          headerValueByType[Accept](()) { accept =>
            if (accept.mediaRanges.exists(_.matches(responseType)))
              complete(HttpEntity(responseType, source))
            else
              failWith(
                UnacceptedResponseContentType(
                  s"File Media Type '$responseType' does not match the Accept header value '${accept.mediaRanges.mkString(", ")}'"
                )
              )
          }
        case Left(err)     => complete(err)
      }
    }
  }
} 
Example 85
Source File: TagRoutes.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.routes

import akka.http.scaladsl.model.StatusCodes.{Created, OK}
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import ch.epfl.bluebrain.nexus.admin.client.types.Project
import ch.epfl.bluebrain.nexus.iam.acls.Acls
import ch.epfl.bluebrain.nexus.iam.realms.Realms
import ch.epfl.bluebrain.nexus.iam.types.Identity.Subject
import ch.epfl.bluebrain.nexus.iam.types.{Caller, Permission}
import ch.epfl.bluebrain.nexus.kg.config.Contexts.tagCtxUri
import ch.epfl.bluebrain.nexus.kg.directives.ProjectDirectives._
import ch.epfl.bluebrain.nexus.kg.marshallers.instances._
import ch.epfl.bluebrain.nexus.kg.resources._
import ch.epfl.bluebrain.nexus.kg.resources.syntax._
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri
import ch.epfl.bluebrain.nexus.rdf.Iri.Path._
import ch.epfl.bluebrain.nexus.rdf.implicits._
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig
import ch.epfl.bluebrain.nexus.service.directives.AuthDirectives
import io.circe.syntax._
import io.circe.{Encoder, Json}
import kamon.Kamon
import kamon.instrumentation.akka.http.TracingDirectives.operationName
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global

class TagRoutes private[routes] (
    resourceType: String,
    tags: Tags[Task],
    acls: Acls[Task],
    realms: Realms[Task],
    schema: Ref,
    write: Permission
)(implicit
    caller: Caller,
    project: Project,
    config: ServiceConfig
) extends AuthDirectives(acls, realms) {

  private val projectPath               = project.organizationLabel / project.label
  implicit private val subject: Subject = caller.subject

  
  def routes(id: AbsoluteIri): Route =
    // Consume the tag segment
    pathPrefix("tags") {
      concat(
        // Create tag
        (post & parameter("rev".as[Long]) & pathEndOrSingleSlash) { rev =>
          operationName(opName) {
            (authorizeFor(projectPath, write) & projectNotDeprecated) {
              entity(as[Json]) { source =>
                Kamon.currentSpan().tag("resource.operation", "create")
                complete(tags.create(Id(project.ref, id), rev, source, schema).value.runWithStatus(Created))
              }
            }
          }
        },
        // Fetch a tag
        (get & projectNotDeprecated & pathEndOrSingleSlash) {
          operationName(opName) {
            authorizeFor(projectPath, read)(caller) {
              parameter("rev".as[Long].?) {
                case Some(rev) => complete(tags.fetch(Id(project.ref, id), rev, schema).value.runWithStatus(OK))
                case _         => complete(tags.fetch(Id(project.ref, id), schema).value.runWithStatus(OK))
              }
            }
          }
        }
      )
    }

  implicit private def tagsEncoder: Encoder[TagSet] =
    Encoder.instance(tags => Json.obj("tags" -> Json.arr(tags.map(_.asJson).toSeq: _*)).addContext(tagCtxUri))

  private def opName: String                        =
    resourceType match {
      case "resources" => s"/${config.http.prefix}/resources/{org}/{project}/{schemaId}/{id}/tags"
      case _           => s"/${config.http.prefix}/$resourceType/{org}/{project}/{id}/tags"
    }
}