java.time.Instant Scala Examples

The following examples show how to use java.time.Instant. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: CallRecordMergeSpec.scala    From pipelines-examples   with Apache License 2.0 5 votes vote down vote up
package pipelines.examples.carly.ingestor

import java.time.Instant
import java.time.temporal.ChronoUnit

import akka.actor._
import akka.stream._
import akka.stream.scaladsl._
import akka.testkit._
import org.scalatest._
import org.scalatest.concurrent._

import pipelines.akkastream.testkit.scaladsl._
import pipelines.examples.carly.data._

class CallRecordMergeSpec extends WordSpec with MustMatchers with ScalaFutures with BeforeAndAfterAll {

  private implicit val system = ActorSystem("CallRecordMergeSpec")
  private implicit val mat = ActorMaterializer()

  override def afterAll: Unit = {
    TestKit.shutdownActorSystem(system)
  }

  "A CallRecordMerge" should {
    "merge incoming data" in {
      val testkit = AkkaStreamletTestKit(system, mat)
      val streamlet = new CallRecordMerge

      val instant = Instant.now.toEpochMilli / 1000
      val past = Instant.now.minus(5000, ChronoUnit.DAYS).toEpochMilli / 1000

      val cr1 = CallRecord("user-1", "user-2", "f", 10L, instant)
      val cr2 = CallRecord("user-1", "user-2", "f", 15L, instant)
      val cr3 = CallRecord("user-1", "user-2", "f", 18L, instant)
      val cr4 = CallRecord("user-1", "user-2", "f", 40L, past)
      val cr5 = CallRecord("user-1", "user-2", "f", 70L, past)
      val cr6 = CallRecord("user-3", "user-1", "f", 80L, past)

      val source0 = Source(Vector(cr1, cr2, cr3))
      val source1 = Source(Vector(cr4, cr5))
      val source2 = Source(Vector(cr6))

      val in0 = testkit.inletFromSource(streamlet.in0, source0)
      val in1 = testkit.inletFromSource(streamlet.in1, source1)
      val in2 = testkit.inletFromSource(streamlet.in2, source2)
      val out = testkit.outletAsTap(streamlet.out)

      testkit.run(streamlet, List(in0, in1, in2), out, () ⇒ {
        out.probe.expectMsg(("user-1", cr1))
        out.probe.expectMsg(("user-1", cr4))
        out.probe.expectMsg(("user-3", cr6))
        out.probe.expectMsg(("user-1", cr2))
        out.probe.expectMsg(("user-1", cr5))
        out.probe.expectMsg(("user-1", cr3))
      })

      out.probe.expectMsg(Completed)
    }
  }
} 
Example 2
Source File: ParameterConversions.scala    From scruid   with Apache License 2.0 5 votes vote down vote up
package ing.wbaa.druid.sql

import java.sql.Timestamp
import java.time.{ Instant, LocalDate, LocalDateTime }

import scala.language.implicitConversions

import ing.wbaa.druid.{ DruidConfig, SQLQueryParameter, SQLQueryParameterType }

trait ParameterConversions {
  implicit def char2Param(v: Char): SQLQueryParameter =
    SQLQueryParameter(SQLQueryParameterType.Char, v.toString)

  implicit def string2Param(v: String): SQLQueryParameter =
    SQLQueryParameter(SQLQueryParameterType.Varchar, v)

  implicit def byte2Param(v: Byte): SQLQueryParameter =
    SQLQueryParameter(SQLQueryParameterType.Tinyint, v.toString)

  implicit def short2Param(v: Short): SQLQueryParameter =
    SQLQueryParameter(SQLQueryParameterType.Smallint, v.toString)

  implicit def int2Param(v: Int): SQLQueryParameter =
    SQLQueryParameter(SQLQueryParameterType.Integer, v.toString)

  implicit def long2Param(v: Long): SQLQueryParameter =
    SQLQueryParameter(SQLQueryParameterType.Bigint, v.toString)

  implicit def float2Param(v: Float): SQLQueryParameter =
    SQLQueryParameter(SQLQueryParameterType.Float, v.toString)

  implicit def double2Param(v: Double): SQLQueryParameter =
    SQLQueryParameter(SQLQueryParameterType.Double, v.toString)

  implicit def boolean2Param(v: Boolean): SQLQueryParameter =
    SQLQueryParameter(SQLQueryParameterType.Boolean, v.toString)

  implicit def localDate2Param(v: LocalDate)(implicit config: DruidConfig =
                                               DruidConfig.DefaultConfig): SQLQueryParameter =
    SQLQueryParameter(SQLQueryParameterType.Date, v.format(config.FormatterDate))

  implicit def localDateTime2Param(
      v: LocalDateTime
  )(implicit config: DruidConfig = DruidConfig.DefaultConfig): SQLQueryParameter =
    SQLQueryParameter(SQLQueryParameterType.Timestamp, v.format(config.FormatterDateTime))

  implicit def timestamp2Param(v: Timestamp)(implicit config: DruidConfig =
                                               DruidConfig.DefaultConfig): SQLQueryParameter =
    SQLQueryParameter(SQLQueryParameterType.Timestamp, config.FormatterDateTime.format(v.toInstant))

  implicit def instant2Param(
      v: Instant
  )(implicit config: DruidConfig = DruidConfig.DefaultConfig): SQLQueryParameter =
    SQLQueryParameter(SQLQueryParameterType.Timestamp, config.FormatterDateTime.format(v))
} 
Example 3
Source File: HttpIncomingReceiverTest.scala    From sumobot   with Apache License 2.0 5 votes vote down vote up
package com.sumologic.sumobot.http_frontend

import java.time.Instant

import akka.actor.{ActorSystem, Props}
import akka.http.scaladsl.model.ws.TextMessage
import akka.stream.scaladsl.Source
import akka.testkit.{TestActorRef, TestActors, TestKit, TestProbe}
import com.sumologic.sumobot.core.HttpReceptionist
import com.sumologic.sumobot.core.model.IncomingMessage
import com.sumologic.sumobot.test.SumoBotSpec
import com.sumologic.sumobot.test.annotated.SumoBotTestKit
import org.scalatest.BeforeAndAfterAll

class HttpIncomingReceiverTest
  extends SumoBotTestKit(ActorSystem("HttpIncomingReceiverTest"))
  with BeforeAndAfterAll {

  private val probe = new TestProbe(system)
  system.eventStream.subscribe(probe.ref, classOf[IncomingMessage])

  private val dummyActor = TestActorRef(TestActors.blackholeProps)
  private val httpIncomingReceiver = TestActorRef(new HttpIncomingReceiver(dummyActor))

  "HttpIncomingReceiver" should {
    "publish IncomingMessage" when {
      "received streamed TextMessage" in {
        val msgSource = Source(List("hello"))
        val streamedMsg = TextMessage.Streamed(msgSource)

        httpIncomingReceiver ! streamedMsg
        val result = probe.expectMsgClass(classOf[IncomingMessage])
        result.canonicalText should be ("hello")
        result.addressedToUs should be (true)
        result.channel should be (HttpReceptionist.DefaultSumoBotChannel)
        result.attachments should be (Seq.empty)
        result.sentBy.plainTextReference should be (HttpReceptionist.DefaultClientUser.id)
      }

      "received strict TextMessage" in {
        val strictMsg = TextMessage.Strict("hi!")

        httpIncomingReceiver ! strictMsg

        val result = probe.expectMsgClass(classOf[IncomingMessage])
        result.canonicalText should be ("hi!")
        result.addressedToUs should be (true)
        result.channel should be (HttpReceptionist.DefaultSumoBotChannel)
        result.attachments should be (Seq.empty)
        result.sentBy.plainTextReference should be (HttpReceptionist.DefaultClientUser.id)
      }

      "properly format date" when {
        "sending IncomingMessage" in {
          val strictMsg = TextMessage.Strict("test")

          httpIncomingReceiver ! strictMsg
          val result = probe.expectMsgClass(classOf[IncomingMessage])

          val currentDate = Instant.now().getEpochSecond.toDouble
          val messageDate = result.idTimestamp.toDouble

          messageDate should be (currentDate +- 5.0)
        }
      }
    }

    "stop itself and outcoming actor" when {
      "stream ended" in {
        val outcomingActor = TestActorRef(TestActors.blackholeProps)
        val testProbeOutcoming = TestProbe()
        testProbeOutcoming.watch(outcomingActor)

        val shutdownReceiver = TestActorRef(new HttpIncomingReceiver(outcomingActor))
        val testProbeShutdown = TestProbe()
        testProbeShutdown.watch(shutdownReceiver)

        shutdownReceiver ! HttpIncomingReceiver.StreamEnded

        testProbeOutcoming.expectTerminated(outcomingActor)
        testProbeShutdown.expectTerminated(shutdownReceiver)
      }
    }
  }

  override def afterAll: Unit = {
    TestKit.shutdownActorSystem(system)
  }
} 
Example 4
Source File: HttpReceptionist.scala    From sumobot   with Apache License 2.0 5 votes vote down vote up
package com.sumologic.sumobot.core

import java.time.Instant

import akka.actor.{Actor, ActorLogging, ActorRef, Props}
import com.sumologic.sumobot.core.model.PublicChannel
import com.sumologic.sumobot.plugins.BotPlugin.{InitializePlugin, PluginAdded, PluginRemoved}
import play.api.libs.json.{JsObject, JsValue}
import slack.api.RtmStartState
import slack.models.{Channel, Group, Im, Team, User}
import slack.rtm.RtmState

object HttpReceptionist {
  private[core] val DefaultChannel = Channel("C0001SUMO", "sumobot", Instant.now().getEpochSecond(),
    Some("U0001SUMO"), Some(false), Some(true), Some(false), Some(false), Some(true), None, Some(false), Some(false), None, None, None, None, None, None, None, None)
  val DefaultSumoBotChannel = PublicChannel(DefaultChannel.id, DefaultChannel.name)

  val DefaultBotUser = User("U0001SUMO", "sumobot-bot", None, None, None, None, None, None, None, None, None, None, None, None, None, None)
  val DefaultClientUser = User("U0002SUMO", "sumobot-client", None, None, None, None, None, None, None, None, None, None, None, None, None, None)

  private[core] val StateUrl = ""
  private[core] val StateTeam = Team("T0001SUMO", "Sumo Bot", "sumobot", "sumologic.com", 30, false, new JsObject(Map.empty), "std")
  private[core] val StateUsers: Seq[User] = Array(DefaultBotUser, DefaultClientUser)
  private[core] val StateChannels: Seq[Channel] = Array(DefaultChannel)
  private[core] val StateGroups: Seq[Group] = Seq.empty
  private[core] val StateIms: Seq[Im] = Seq.empty
  private[core] val StateBots: Seq[JsValue] = Seq.empty

  private[core] val StartState = RtmStartState(StateUrl, DefaultBotUser, StateTeam, StateUsers, StateChannels, StateGroups, StateIms, StateBots)
  private[core] val State = new RtmState(StartState)
}

class HttpReceptionist(brain: ActorRef) extends Actor with ActorLogging {
  private val pluginRegistry = context.system.actorOf(Props(classOf[PluginRegistry]), "plugin-registry")

  override def receive: Receive = {
    case message@PluginAdded(plugin, _) =>
      plugin ! InitializePlugin(HttpReceptionist.State, brain, pluginRegistry)
      pluginRegistry ! message

    case message@PluginRemoved(_) =>
      pluginRegistry ! message
  }
} 
Example 5
Source File: HttpIncomingReceiver.scala    From sumobot   with Apache License 2.0 5 votes vote down vote up
package com.sumologic.sumobot.http_frontend

import java.time.Instant
import java.util.concurrent.TimeUnit

import akka.actor.{Actor, ActorLogging, ActorRef}
import akka.pattern.pipe
import scala.concurrent.ExecutionContext.Implicits.global
import akka.http.scaladsl.model.ws.TextMessage
import akka.stream.ActorMaterializer
import com.sumologic.sumobot.core.HttpReceptionist
import com.sumologic.sumobot.core.model.{IncomingMessage, UserSender}

import scala.concurrent.duration.Duration

object HttpIncomingReceiver {
  case class StreamEnded()
  private val StrictTimeout = Duration.create(5, TimeUnit.SECONDS)
}

class HttpIncomingReceiver(outcomingRef: ActorRef) extends Actor with ActorLogging {
  private implicit val materializer = ActorMaterializer()

  override def receive: Receive = {
    case streamedMsg: TextMessage.Streamed =>
      streamedMsg.toStrict(HttpIncomingReceiver.StrictTimeout).pipeTo(self)(sender())

    case strictMsg: TextMessage.Strict =>
      val contents = strictMsg.getStrictText
      val incomingMessage = IncomingMessage(contents, true, HttpReceptionist.DefaultSumoBotChannel,
        formatDateNow(), None, Seq.empty, UserSender(HttpReceptionist.DefaultClientUser))
      context.system.eventStream.publish(incomingMessage)

    case HttpIncomingReceiver.StreamEnded =>
      context.stop(outcomingRef)
      context.stop(self)
  }

  private def formatDateNow(): String = {
    s"${Instant.now().getEpochSecond}.000000"
  }
} 
Example 6
Source File: ResponseTimeArbiterSpec.scala    From warp-core   with MIT License 5 votes vote down vote up
package com.workday.warp.arbiters

import java.util.concurrent.TimeUnit
import java.time.Instant

import com.workday.telemetron.annotation.Required
import com.workday.warp.common.category.UnitTest
import com.workday.warp.common.spec.WarpJUnitSpec
import com.workday.warp.persistence.CorePersistenceAware
import com.workday.warp.persistence.TablesLike.TestExecutionRowLike
import com.workday.warp.persistence.TablesLike.RowTypeClasses._
import com.workday.warp.utils.Ballot
import org.junit.Test
import org.junit.experimental.categories.Category


  @Test
  @Category(Array(classOf[UnitTest]))
  @Required(maxResponseTime = 3000, timeUnit = TimeUnit.MILLISECONDS)
  def requiredFailedMillis(): Unit = {
    val testId: String = this.getTestId
    val ballot: Ballot = new Ballot(testId)
    val testExecution: TestExecutionRowLike = this.persistenceUtils.createTestExecution(testId, Instant.now(), 4.0, 3.0)
    val arbiter: ResponseTimeArbiter = new ResponseTimeArbiter

    val vote: Option[Throwable] = arbiter.vote(ballot, testExecution)
    vote should not be empty
    vote.get.getMessage should be (s"$testId violated response time requirement: expected 0:00:03.000 (3000 ms), but " +
      "measured 0:00:04.000 (4000 ms)")
  }
} 
Example 7
Source File: PercentageDegradationArbiterSpec.scala    From warp-core   with MIT License 5 votes vote down vote up
package com.workday.warp.arbiters

import java.time.Instant
import java.util.UUID

import com.workday.telemetron.RequirementViolationException
import com.workday.warp.common.CoreWarpProperty._
import com.workday.warp.common.annotation.PercentageDegradationRequirement
import com.workday.warp.common.category.UnitTest
import com.workday.warp.common.spec.WarpJUnitSpec
import com.workday.warp.persistence.CorePersistenceAware
import com.workday.warp.persistence.TablesLike.TestExecutionRowLike
import com.workday.warp.persistence.TablesLike.RowTypeClasses._
import com.workday.warp.utils.{AnnotationReader, Ballot}
import org.junit.Test
import org.junit.experimental.categories.Category


  @Test
  @Category(Array(classOf[UnitTest]))
  @PercentageDegradationRequirement(percentage = 20)
  def percentageVote(): Unit = {
    val ballot: Ballot = new Ballot(this.getTestId)
    val testExecution: TestExecutionRowLike = this.persistenceUtils.createTestExecution(this.getTestId, Instant.now(), 4.0, 5.0)

    val arbiter: PercentageDegradationArbiter = new PercentageDegradationArbiter
    arbiter.vote(List(1.0, 2.0, 3.0), ballot, testExecution, this.minimumHistoricalData) should be (defined)
    arbiter.vote(List(3.5, 3.5, 3.5), ballot, testExecution, this.minimumHistoricalData) should be (empty)
    // check with a number of measurements smaller than the minimum required
    arbiter.vote(List(3.0), ballot, testExecution, this.minimumHistoricalData) should be (empty)

    // shouldn't throw an exception here
    arbiter.maybeThrow(arbiter.vote(List(3.0, 3.5, 4.0), ballot, testExecution, this.minimumHistoricalData))

    // intercept the thrown exception
    intercept[RequirementViolationException] {
      arbiter.maybeThrow(arbiter.vote(List(2.5, 3.0, 3.5), ballot, testExecution, this.minimumHistoricalData))
    }
  }
} 
Example 8
Source File: WarpSlickSingleColumnExtensionsSpec.scala    From warp-core   with MIT License 5 votes vote down vote up
package com.workday.warp.persistence.mysql

import java.time.Instant

import com.workday.warp.common.spec.WarpJUnitSpec
import com.workday.warp.junit.UnitTest
import com.workday.warp.persistence.Tables._
import com.workday.warp.persistence.mysql.WarpMySQLProfile.api._
import com.workday.warp.persistence.mysql.WarpSlickSingleColumnExtensionsSpec._
import com.workday.warp.persistence.{Connection, CorePersistenceAware, CorePersistenceUtils}
import org.junit.jupiter.api.BeforeEach


  @UnitTest
  def standardDeviation(): Unit = {
    this.persistenceUtils.createTestExecution(methodSignature1, Instant.now(), 1.0, 10)
    this.persistenceUtils.createTestExecution(methodSignature1, Instant.now(), 2.0, 10)
    this.persistenceUtils.createTestExecution(methodSignature1, Instant.now(), 3.0, 10)
    this.persistenceUtils.createTestExecution(methodSignature2, Instant.now(), 2.0, 10)
    this.persistenceUtils.createTestExecution(methodSignature2, Instant.now(), 4.0, 10)
    this.persistenceUtils.createTestExecution(methodSignature2, Instant.now(), 6.0, 10)
    this.persistenceUtils.createTestExecution(methodSignature3, Instant.now(), 2.0, 10)
    this.persistenceUtils.createTestExecution(methodSignature3, Instant.now(), 2.0, 10)
    this.persistenceUtils.createTestExecution(methodSignature3, Instant.now(), 2.0, 10)

    val query = TestExecution.groupBy(_.idTestDefinition)
    val action = query.map { case (id, metrics) => (
      id,
      metrics.map(_.responseTime).std
      )
    }

    val rows: Seq[(Int, Option[Double])] = this.persistenceUtils.runWithRetries(action.result, 5).sortBy(_._1)
    rows.head._2 shouldBe defined
    rows.head._2.get shouldEqual STD_1
    rows(1)._2 shouldBe defined
    rows(1)._2.get shouldEqual STD_2
    rows(2)._2 shouldBe defined
    rows(2)._2.get shouldEqual 0
  }
}

object WarpSlickSingleColumnExtensionsSpec {

  val methodSignature1 = "com.workday.warp.slick.implicits.test.1"
  val methodSignature2 = "com.workday.warp.slick.implicits.test.2"
  val methodSignature3 = "com.workday.warp.slick.implicits.test.3"

  val STD_1: Double = 0.816496580927726
  val STD_2: Double = 1.632993161855452

} 
Example 9
Source File: InfluxDBClientSpec.scala    From warp-core   with MIT License 5 votes vote down vote up
package com.workday.warp.persistence.influxdb

import java.time.Instant
import java.util.UUID

import com.workday.telemetron.spec.HasRandomTestId
import com.workday.warp.common.heaphistogram.{HeapHistogram, HeapHistogramEntry}
import com.workday.warp.common.spec.WarpJUnitSpec
import com.workday.warp.junit.IntegTest
import com.workday.warp.persistence.{Connection, CorePersistenceAware}
import com.workday.warp.persistence.TablesLike.TestExecutionRowLike
import com.workday.warp.persistence.TablesLike.RowTypeClasses._
import org.influxdb.InfluxDB
import org.influxdb.dto.Pong

import scala.util.Try


  @IntegTest
  def createDatabase(): Unit = {
    val dbName: String = s"schema-${UUID.randomUUID().toString}"

    val exists: Boolean = this.databaseExists(dbName).get
    if (exists) {
      this.dropDatabase(dbName).get
    }

    this.createDatabase(dbName).get
    this.databaseExists(dbName).get should be (true)
    this.dropDatabase(dbName).get
  }
} 
Example 10
Source File: KnowledgeGraphSpec.scala    From Scalaprof   with GNU General Public License v2.0 5 votes vote down vote up
package edu.neu.coe.scala.spark.graphx

import org.scalatest._
import java.time.Instant

class KnowledgeGraphSpec extends FlatSpec with Matchers {
  "Quantity" should "handle date 2015-11-10T10:15:30.00Z" in {
    val nov10 = Quantity[Instant]("November 10th",Instant.parse("2015-11-10T10:15:30.00Z"))
    nov10.lang shouldBe ("en")
  }
  "NamedEntity" should "person" in {
    val kerry = NamedEntity("Senator John Kerry","John Kerry, U.S. Senator and Secretary of State","person")
    kerry.lang shouldBe ("en")
  }
  "Concept" should "handle invitation to meet" in {
    val meet =  Concept("invitation to meet","meet")
    meet.lang shouldBe ("en")
  }
} 
Example 11
Source File: ScheduleKeyDeletion.scala    From aws4s   with MIT License 5 votes vote down vote up
package org.aws4s.kms

import java.time.Instant
import cats.effect.Effect
import io.circe.{Decoder, Json}
import org.aws4s.Region
import org.aws4s.core.ExtraCirceDecoders._
import org.aws4s.core.Command.Validator
import org.aws4s.core.{CommandPayload, Param}

private[kms] case class ScheduleKeyDeletion[F[_]: Effect](
    region:              Region,
    keyId:               KeyId,
    pendingWindowInDays: Option[PendingWindowInDays],
) extends KmsCommand[F, ScheduleKeyDeletionSuccess] {
  override def action: String = "ScheduleKeyDeletion"

  override val validator: Validator[Json] = _ => None

  override def params: List[Param[Json]] = CommandPayload.params(keyId)(pendingWindowInDays)
}

case class ScheduleKeyDeletionSuccess(
    keyId:        KeyId,
    deletionDate: Instant,
)

object ScheduleKeyDeletionSuccess {
  implicit val decoder: Decoder[ScheduleKeyDeletionSuccess] =
    Decoder.forProduct2(
      KeyId.name,
      "DeletionDate"
    )(ScheduleKeyDeletionSuccess.apply)
} 
Example 12
Source File: JsonFormats.scala    From pipelines-examples   with Apache License 2.0 5 votes vote down vote up
package pipelines.examples.sensordata

import java.time.Instant
import java.util.UUID

import scala.util.Try

import spray.json._

trait UUIDJsonSupport extends DefaultJsonProtocol {
  implicit object UUIDFormat extends JsonFormat[UUID] {
    def write(uuid: UUID) = JsString(uuid.toString)

    def read(json: JsValue): UUID = json match {
      case JsString(uuid) ⇒ Try(UUID.fromString(uuid)).getOrElse(deserializationError(s"Expected valid UUID but got '$uuid'."))
      case other          ⇒ deserializationError(s"Expected UUID as JsString, but got: $other")
    }
  }
}

trait InstantJsonSupport extends DefaultJsonProtocol {
  implicit object InstantFormat extends JsonFormat[Instant] {
    def write(instant: Instant) = JsNumber(instant.toEpochMilli)

    def read(json: JsValue): Instant = json match {
      case JsNumber(value) ⇒ Instant.ofEpochMilli(value.toLong)
      case other           ⇒ deserializationError(s"Expected Instant as JsNumber, but got: $other")
    }
  }
}

object MeasurementsJsonSupport extends DefaultJsonProtocol {
  implicit val measurementFormat = jsonFormat3(Measurements.apply)
}

object SensorDataJsonSupport extends DefaultJsonProtocol with UUIDJsonSupport with InstantJsonSupport {
  import MeasurementsJsonSupport._
  implicit val sensorDataFormat = jsonFormat3(SensorData.apply)
} 
Example 13
Source File: DirectMessage.scala    From twitter4s   with Apache License 2.0 5 votes vote down vote up
package com.danielasfregola.twitter4s.entities

import java.time.Instant

import com.danielasfregola.twitter4s.entities.streaming.UserStreamingMessage

final case class DirectMessage(created_at: Instant,
                               entities: Option[Entities],
                               id: Long,
                               id_str: String,
                               recipient: User,
                               recipient_id: Long,
                               recipient_id_str: String,
                               recipient_screen_name: String,
                               sender: User,
                               sender_id: Long,
                               sender_id_str: String,
                               sender_screen_name: String,
                               text: String)
    extends UserStreamingMessage 
Example 14
Source File: CallRecordValidationSpec.scala    From pipelines-examples   with Apache License 2.0 5 votes vote down vote up
package pipelines.examples.carly.ingestor

import java.time.Instant
import java.time.temporal.ChronoUnit

import akka.actor._
import akka.stream._
import akka.stream.scaladsl._
import akka.testkit._
import org.scalatest._
import org.scalatest.concurrent._

import pipelines.akkastream.testkit.scaladsl._

import pipelines.examples.carly.data._

class CallRecordValidationSpec extends WordSpec with MustMatchers with ScalaFutures with BeforeAndAfterAll {
  private implicit val system = ActorSystem("CallRecordValidationSpec")
  private implicit val mat = ActorMaterializer()

  override def afterAll: Unit = {
    TestKit.shutdownActorSystem(system)
  }

  "A CallRecordValidation" should {
    "split incoming data into valid call records and those outside the time range" in {
      val testkit = AkkaStreamletTestKit(system, mat)
      val streamlet = new CallRecordValidation()

      val instant = Instant.now.toEpochMilli / 1000
      val past = Instant.now.minus(5000, ChronoUnit.DAYS).toEpochMilli / 1000

      val cr1 = CallRecord("user-1", "user-2", "f", 10L, instant)
      val cr2 = CallRecord("user-1", "user-2", "f", 15L, instant)
      val cr3 = CallRecord("user-1", "user-2", "f", 18L, instant)
      val cr4 = CallRecord("user-1", "user-2", "f", 40L, past)
      val cr5 = CallRecord("user-1", "user-2", "f", 70L, past)

      val source = Source(Vector(cr1, cr2, cr3, cr4, cr5))

      val in = testkit.inletFromSource(streamlet.in, source)
      val left = testkit.outletAsTap(streamlet.left)
      val right = testkit.outletAsTap(streamlet.right)

      testkit.run(streamlet, in, List(left, right), () ⇒ {
        right.probe.expectMsg(("user-1", cr1))
        right.probe.expectMsg(("user-1", cr2))
        right.probe.expectMsg(("user-1", cr3))
        left.probe.expectMsg((cr4.toString, InvalidRecord(cr4.toString, "Timestamp outside range!")))
        left.probe.expectMsg((cr5.toString, InvalidRecord(cr5.toString, "Timestamp outside range!")))
      })

      left.probe.expectMsg(Completed)
      right.probe.expectMsg(Completed)
    }
  }
} 
Example 15
Source File: CallStatsAggregatorSpec.scala    From pipelines-examples   with Apache License 2.0 5 votes vote down vote up
package pipelines.examples.carly.aggregator

import java.time.Instant
import java.time.temporal.ChronoUnit

import scala.concurrent.duration._

import scala.util.Random

import pipelines.examples.carly.data._

import pipelines.spark.testkit._
import pipelines.spark.sql.SQLImplicits._

class CallStatsAggregatorSpec extends SparkScalaTestSupport {

  val streamlet = new CallStatsAggregator()
  val testKit = SparkStreamletTestkit(session).withConfigParameterValues(
    ConfigParameterValue(streamlet.GroupByWindow, "1 minute"),
    ConfigParameterValue(streamlet.Watermark, "1 minute"))

  "CallStatsAggregator" should {
    "produce elements to its outlet" in {

      // setup inlet tap on inlet port
      val in = testKit.inletAsTap[CallRecord](streamlet.in)

      // setup outlet tap on outlet port
      val out = testKit.outletAsTap[AggregatedCallStats](streamlet.out)

      val maxUsers = 10
      val crs = (1 to 30).toList.map { i ⇒
        CallRecord(
          s"user-${Random.nextInt(maxUsers)}",
          s"user-${Random.nextInt(maxUsers)}",
          (if (i % 2 == 0) "incoming" else "outgoing"),
          Random.nextInt(50),
          Instant.now.minus(Random.nextInt(40), ChronoUnit.MINUTES).toEpochMilli / 1000
        )
      }

      in.addData(crs)

      testKit.run(streamlet, Seq(in), Seq(out), 30.seconds)

      // get data from outlet tap
      val results = out.asCollection(session)

      // assert
      results.size must be > 0
    }
  }
} 
Example 16
Source File: CommandToResultTransformer.scala    From event-sourcing-kafka-streams   with MIT License 5 votes vote down vote up
package org.amitayh.invoices.commandhandler

import java.time.Instant
import java.util.UUID

import org.amitayh.invoices.common.Config
import org.amitayh.invoices.common.domain.{Command, CommandResult, InvoiceSnapshot, SnapshotReducer}
import org.apache.kafka.streams.KeyValue
import org.apache.kafka.streams.kstream.{Transformer, TransformerSupplier}
import org.apache.kafka.streams.processor.ProcessorContext
import org.apache.kafka.streams.state.KeyValueStore

class CommandToResultTransformer
  extends Transformer[UUID, Command, KeyValue[UUID, CommandResult]] {

  private var context: ProcessorContext = _

  private var store: KeyValueStore[UUID, InvoiceSnapshot] = _

  override def init(context: ProcessorContext): Unit = {
    this.context = context
    store = context
      .getStateStore(Config.Stores.Snapshots)
      .asInstanceOf[KeyValueStore[UUID, InvoiceSnapshot]]
  }

  
  override def transform(id: UUID, command: Command): KeyValue[UUID, CommandResult] = {
    val snapshot = loadSnapshot(id)
    val result = command(timestamp(), snapshot)
    updateSnapshot(id, result.outcome)
    KeyValue.pair(id, result)
  }

  override def close(): Unit = ()

  private def loadSnapshot(id: UUID): InvoiceSnapshot =
    Option(store.get(id)).getOrElse(SnapshotReducer.empty)

  private def timestamp(): Instant =
    Instant.ofEpochMilli(context.timestamp())

  private def updateSnapshot(id: UUID, outcome: CommandResult.Outcome): Unit = outcome match {
    case CommandResult.Success(_, _, snapshot) => store.put(id, snapshot)
    case _ => ()
  }

}

object CommandToResultTransformer {
  val Supplier: TransformerSupplier[UUID, Command, KeyValue[UUID, CommandResult]] =
    () => new CommandToResultTransformer
} 
Example 17
Source File: AvroSerde.scala    From event-sourcing-kafka-streams   with MIT License 5 votes vote down vote up
package org.amitayh.invoices.common.serde

import java.io.ByteArrayOutputStream
import java.nio.ByteBuffer
import java.time.Instant
import java.util
import java.util.UUID

import com.sksamuel.avro4s._
import org.amitayh.invoices.common.domain._
import org.amitayh.invoices.common.serde.UuidConverters.{fromByteBuffer, toByteBuffer}
import org.apache.avro.Schema
import org.apache.avro.Schema.Field
import org.apache.kafka.common.serialization.{Deserializer, Serde, Serializer}

object AvroSerde {
  implicit val instantToSchema: ToSchema[Instant] = new ToSchema[Instant] {
    override val schema: Schema = Schema.create(Schema.Type.STRING)
  }

  implicit val instantToValue: ToValue[Instant] = new ToValue[Instant] {
    override def apply(value: Instant): String = value.toString
  }

  implicit val instantFromValue: FromValue[Instant] = new FromValue[Instant] {
    override def apply(value: Any, field: Field): Instant =
      Instant.parse(value.toString)
  }

  implicit val uuidToSchema: ToSchema[UUID] = new ToSchema[UUID] {
    override val schema: Schema = Schema.create(Schema.Type.BYTES)
  }

  implicit val uuidToValue: ToValue[UUID] = new ToValue[UUID] {
    override def apply(value: UUID): ByteBuffer = toByteBuffer(value)
  }

  implicit val uuidFromValue: FromValue[UUID] = new FromValue[UUID] {
    override def apply(value: Any, field: Field): UUID =
      fromByteBuffer(value.asInstanceOf[ByteBuffer])
  }

  val CommandSerde: Serde[Command] = serdeFor[Command]

  val CommandResultSerde: Serde[CommandResult] = serdeFor[CommandResult]

  val SnapshotSerde: Serde[InvoiceSnapshot] = serdeFor[InvoiceSnapshot]

  val EventSerde: Serde[Event] = serdeFor[Event]

  def toBytes[T: SchemaFor: ToRecord](data: T): Array[Byte] = {
    val baos = new ByteArrayOutputStream
    val output = AvroOutputStream.binary[T](baos)
    output.write(data)
    output.close()
    baos.toByteArray
  }

  def fromBytes[T: SchemaFor: FromRecord](data: Array[Byte]): T = {
    val input = AvroInputStream.binary[T](data)
    input.iterator.next()
  }

  private def serdeFor[T: SchemaFor: ToRecord: FromRecord]: Serde[T] = new Serde[T] {
    override val serializer: Serializer[T] = new Serializer[T] {
      override def serialize(topic: String, data: T): Array[Byte] = toBytes(data)
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
      override def close(): Unit = ()
    }
    override val deserializer: Deserializer[T] = new Deserializer[T] {
      override def deserialize(topic: String, data: Array[Byte]): T = fromBytes(data)
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
      override def close(): Unit = ()
    }
    override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
    override def close(): Unit = ()
  }
} 
Example 18
Source File: Command.scala    From event-sourcing-kafka-streams   with MIT License 5 votes vote down vote up
package org.amitayh.invoices.common.domain

import java.time.{Instant, LocalDate}
import java.util.UUID

import scala.collection.immutable.Seq

case class Command(originId: UUID,
                   commandId: UUID,
                   expectedVersion: Option[Int],
                   payload: Command.Payload) {

  def apply(timestamp: Instant, snapshot: InvoiceSnapshot): CommandResult = {
    val outcome = snapshot
      .validateVersion(expectedVersion)
      .flatMap(payload(_))
      .fold(
        CommandResult.Failure,
        success(timestamp, snapshot, _))

    CommandResult(originId, commandId, outcome)
  }

  private def success(timestamp: Instant,
                      snapshot: InvoiceSnapshot,
                      payloads: Seq[Event.Payload]): CommandResult.Outcome = {
    payloads.foldLeft(CommandResult.Success(snapshot)) { (acc, payload) =>
      acc.update(timestamp, commandId, payload)
    }
  }

}

object Command {
  type Result = Either[InvoiceError, Seq[Event.Payload]]

  sealed trait Payload {
    def apply(invoice: Invoice): Result
  }

  case class CreateInvoice(customerName: String,
                           customerEmail: String,
                           issueDate: LocalDate,
                           dueDate: LocalDate,
                           lineItems: List[LineItem]) extends Payload {

    override def apply(invoice: Invoice): Result = {
      val createdEvent = Event.InvoiceCreated(customerName, customerEmail, issueDate, dueDate)
      val lineItemEvents = lineItems.map(toLineItemEvent)
      success(createdEvent :: lineItemEvents)
    }

    private def toLineItemEvent(lineItem: LineItem): Event.Payload =
      Event.LineItemAdded(
        description = lineItem.description,
        quantity = lineItem.quantity,
        price = lineItem.price)
  }

  case class AddLineItem(description: String,
                         quantity: Double,
                         price: Double) extends Payload {
    override def apply(invoice: Invoice): Result =
      success(Event.LineItemAdded(description, quantity, price))
  }

  case class RemoveLineItem(index: Int) extends Payload {
    override def apply(invoice: Invoice): Result = {
      if (invoice.hasLineItem(index)) success(Event.LineItemRemoved(index))
      else failure(LineItemDoesNotExist(index))
    }
  }

  case class PayInvoice() extends Payload {
    override def apply(invoice: Invoice): Result =
      success(Event.PaymentReceived(invoice.total))
  }

  case class DeleteInvoice() extends Payload {
    override def apply(invoice: Invoice): Result =
      success(Event.InvoiceDeleted())
  }

  private def success(events: Event.Payload*): Result = success(events.toList)

  private def success(events: List[Event.Payload]): Result = Right(events)

  private def failure(error: InvoiceError): Result = Left(error)
} 
Example 19
Source File: Reducer.scala    From event-sourcing-kafka-streams   with MIT License 5 votes vote down vote up
package org.amitayh.invoices.common.domain

import java.time.Instant

trait Reducer[S, E] {
  def empty: S
  def handle(s: S, e: E): S
}

object InvoiceReducer extends Reducer[Invoice, Event.Payload] {
  override val empty: Invoice = Invoice.Draft

  override def handle(invoice: Invoice, event: Event.Payload): Invoice = event match {
    case Event.InvoiceCreated(customerName, customerEmail, issueDate, dueDate) =>
      invoice
        .setCustomer(customerName, customerEmail)
        .setDates(issueDate, dueDate)

    case Event.LineItemAdded(description, quantity, price) =>
      invoice.addLineItem(description, quantity, price)

    case Event.LineItemRemoved(index) =>
      invoice.removeLineItem(index)

    case Event.PaymentReceived(amount) =>
      invoice.pay(amount)

    case Event.InvoiceDeleted() =>
      invoice.delete

    case _ => invoice
  }
}

object SnapshotReducer extends Reducer[InvoiceSnapshot, Event] {
  override val empty: InvoiceSnapshot =
    InvoiceSnapshot(InvoiceReducer.empty, 0, Instant.MIN)

  override def handle(snapshot: InvoiceSnapshot, event: Event): InvoiceSnapshot = {
    if (versionsMatch(snapshot, event)) updateSnapshot(snapshot, event)
    else throw new RuntimeException(s"Unexpected version $snapshot / $event")
  }

  private def versionsMatch(snapshot: InvoiceSnapshot, event: Event): Boolean =
    snapshot.version == (event.version - 1)

  private def updateSnapshot(snapshot: InvoiceSnapshot, event: Event): InvoiceSnapshot = {
    val invoice = InvoiceReducer.handle(snapshot.invoice, event.payload)
    InvoiceSnapshot(invoice, event.version, event.timestamp)
  }
} 
Example 20
Source File: Event.scala    From event-sourcing-kafka-streams   with MIT License 5 votes vote down vote up
package org.amitayh.invoices.common.domain

import java.time.{Instant, LocalDate}
import java.util.UUID

case class Event(version: Int,
                 timestamp: Instant,
                 commandId: UUID,
                 payload: Event.Payload)

object Event {
  sealed trait Payload {
    def apply(invoice: Invoice): Invoice = invoice
  }

  case class InvoiceCreated(customerName: String,
                            customerEmail: String,
                            issueDate: LocalDate,
                            dueDate: LocalDate) extends Payload {
    override def apply(invoice: Invoice): Invoice =
      invoice
        .setCustomer(customerName, customerEmail)
        .setDates(issueDate, dueDate)
  }

  case class LineItemAdded(description: String,
                           quantity: BigDecimal,
                           price: BigDecimal) extends Payload {
    override def apply(invoice: Invoice): Invoice =
      invoice.addLineItem(description, quantity, price)
  }

  case class LineItemRemoved(index: Int) extends Payload {
    override def apply(invoice: Invoice): Invoice =
      invoice.removeLineItem(index)
  }

  case class PaymentReceived(amount: BigDecimal) extends Payload {
    override def apply(invoice: Invoice): Invoice = invoice.pay(amount)
  }

  case class InvoiceDeleted() extends Payload {
    override def apply(invoice: Invoice): Invoice = invoice.delete
  }

  case class InvoiceSentToCustomer() extends Payload

} 
Example 21
Source File: CommandResult.scala    From event-sourcing-kafka-streams   with MIT License 5 votes vote down vote up
package org.amitayh.invoices.common.domain

import java.time.Instant
import java.util.UUID

case class CommandResult(originId: UUID,
                         commandId: UUID,
                         outcome: CommandResult.Outcome)

object CommandResult {
  sealed trait Outcome

  case class Success(events: Vector[Event],
                     oldSnapshot: InvoiceSnapshot,
                     newSnapshot: InvoiceSnapshot) extends Outcome {

    def update(timestamp: Instant,
               commandId: UUID,
               payload: Event.Payload): Success = {
      val event = Event(nextVersion, timestamp, commandId, payload)
      val snapshot = SnapshotReducer.handle(newSnapshot, event)
      copy(events = events :+ event, newSnapshot = snapshot)
    }

    private def nextVersion: Int =
      oldSnapshot.version + events.length + 1

  }

  object Success {
    def apply(snapshot: InvoiceSnapshot): Success =
      Success(Vector.empty, snapshot, snapshot)
  }

  case class Failure(cause: InvoiceError) extends Outcome
} 
Example 22
Source File: SerializationRoundtripSpec.scala    From twitter4s   with Apache License 2.0 5 votes vote down vote up
package com.danielasfregola.twitter4s.entities

import java.time.Instant

import com.danielasfregola.randomdatagenerator.RandomDataGenerator
import com.danielasfregola.twitter4s.http.serializers.JsonSupport
import org.json4s.native.Serialization
import org.scalacheck.Gen.alphaChar
import org.scalacheck.{Arbitrary, Gen}
import org.specs2.mutable.Specification
import org.specs2.specification.core.Fragment

import scala.reflect._

class SerializationRoundtripSpec extends Specification with RandomDataGenerator with JsonSupport {

  "JSON serialization" should {

    def roundtripTest[T <: AnyRef: Manifest: Arbitrary]: Fragment = {

      val className = classTag[T].runtimeClass.getSimpleName

      s"round-trip successfully for $className" in {
        val randomEntity = random[T]

        val serializedJson = Serialization.write[T](randomEntity)

        val deserializedEntity = Serialization.read[T](serializedJson)

        deserializedEntity === randomEntity
      }
    }

    roundtripTest[User]
  }

  // We serialize dates to second precision
  implicit val arbitraryDate: Arbitrary[Instant] = Arbitrary {
    for {
      timeInSeconds: Long <- Gen.chooseNum(1142899200L, 1512442349L)
    } yield Instant.ofEpochSecond(timeInSeconds)
  }

  implicit val arbitraryProfileImage: Arbitrary[ProfileImage] = Arbitrary {
    for {
      prefix: String <- Gen.nonEmptyListOf(alphaChar).map(_.mkString)
      suffix: String <- Gen.oneOf("_mini", "_normal", "_bigger", "")
    } yield ProfileImage(s"${prefix}_$suffix.jpg")
  }
} 
Example 23
Source File: Event.scala    From twitter4s   with Apache License 2.0 5 votes vote down vote up
package com.danielasfregola.twitter4s.entities.streaming.user

import java.time.Instant

import com.danielasfregola.twitter4s.entities.enums.EventCode
import com.danielasfregola.twitter4s.entities.enums.SimpleEventCode.SimpleEventCode
import com.danielasfregola.twitter4s.entities.enums.TweetEventCode.TweetEventCode
import com.danielasfregola.twitter4s.entities.enums.TwitterListEventCode.TwitterListEventCode
import com.danielasfregola.twitter4s.entities.streaming.UserStreamingMessage
import com.danielasfregola.twitter4s.entities.{Tweet, TwitterList, User}


abstract class Event[T](created_at: Instant,
                        event: EventCode#Value,
                        target: User,
                        source: User,
                        target_object: Option[T])
    extends UserStreamingMessage

final case class SimpleEvent(created_at: Instant,
                             event: SimpleEventCode,
                             target: User,
                             source: User,
                             target_object: Option[String])
    extends Event(created_at, event, target, source, target_object)

final case class TweetEvent(created_at: Instant,
                            event: TweetEventCode,
                            target: User,
                            source: User,
                            target_object: Tweet)
    extends Event(created_at, event, target, source, Some(target_object))

final case class TwitterListEvent(created_at: Instant,
                                  event: TwitterListEventCode,
                                  target: User,
                                  source: User,
                                  target_object: TwitterList)
    extends Event(created_at, event, target, source, Some(target_object)) 
Example 24
Source File: ExpiryServiceTest.scala    From kafka-journal   with MIT License 5 votes vote down vote up
package com.evolutiongaming.kafka.journal.eventual.cassandra

import java.time.{Instant, LocalDate, ZoneOffset}

import cats.effect.ExitCase
import cats.implicits._
import cats.{Id, catsInstancesForId}
import com.evolutiongaming.kafka.journal.ExpireAfter
import com.evolutiongaming.kafka.journal.ExpireAfter.implicits._
import com.evolutiongaming.kafka.journal.eventual.cassandra.ExpireOn.implicits._
import com.evolutiongaming.kafka.journal.eventual.cassandra.ExpiryService.Action
import com.evolutiongaming.kafka.journal.util.BracketFromMonad
import org.scalatest.FunSuite
import org.scalatest.matchers.should.Matchers

import scala.concurrent.duration._
import scala.util.control.NonFatal

class ExpiryServiceTest extends FunSuite with Matchers {
  import ExpiryServiceTest._

  test("expireOn") {
    val expireAfter = 1.day.toExpireAfter
    val expected = LocalDate.of(2019, 12, 12).toExpireOn
    expireService.expireOn(expireAfter, timestamp) shouldEqual expected
  }

  for {
    (expiry, expireAfter, action) <- List(
      (
        none[Expiry],
        1.minute.toExpireAfter.some,
        Action.update(Expiry(
          1.minute.toExpireAfter,
          LocalDate.of(2019, 12, 11).toExpireOn))),
      (
        none[Expiry],
        1.day.toExpireAfter.some,
        Action.update(Expiry(
          1.day.toExpireAfter,
          LocalDate.of(2019, 12, 12).toExpireOn))),
      (
        Expiry(
          1.day.toExpireAfter,
          LocalDate.of(2019, 12, 11).toExpireOn).some,
        1.day.toExpireAfter.some,
        Action.update(Expiry(
          1.day.toExpireAfter,
          LocalDate.of(2019, 12, 12).toExpireOn))),
      (
        Expiry(
          1.day.toExpireAfter,
          LocalDate.of(2019, 12, 12).toExpireOn).some,
        1.day.toExpireAfter.some,
        Action.ignore),
      (
        Expiry(
          1.day.toExpireAfter,
          LocalDate.of(2019, 12, 12).toExpireOn).some,
        none[ExpireAfter],
        Action.remove))
  } yield {
    test(s"action expiry: $expiry, expireAfter: $expireAfter, action: $action") {
      expireService.action(expiry, expireAfter, timestamp) shouldEqual action
    }
  }
}

object ExpiryServiceTest {

  implicit val bracketId: BracketFromMonad[Id, Throwable] = new BracketFromMonad[Id, Throwable] {

    def F = catsInstancesForId

    def bracketCase[A, B](acquire: Id[A])(use: A => Id[B])(release: (A, ExitCase[Throwable]) => Id[Unit]) = {
      flatMap(acquire) { a =>
        try {
          val b = use(a)
          try release(a, ExitCase.Completed) catch { case NonFatal(_) => }
          b
        } catch {
          case NonFatal(e) =>
            release(a, ExitCase.Error(e))
            raiseError(e)
        }
      }
    }

    def raiseError[A](a: Throwable) = throw a

    def handleErrorWith[A](fa: Id[A])(f: Throwable => Id[A]) = fa
  }

  val timestamp: Instant = Instant.parse("2019-12-11T10:10:10.00Z")

  val zoneId: ZoneOffset = ZoneOffset.UTC

  val expireService: ExpiryService[Id] = ExpiryService[Id](zoneId)
} 
Example 25
Source File: TopicCommit.scala    From kafka-journal   with MIT License 5 votes vote down vote up
package com.evolutiongaming.kafka.journal.replicator

import java.time.Instant

import cats.Applicative
import cats.data.{NonEmptyMap => Nem}
import cats.effect.concurrent.Ref
import cats.effect.{Clock, Sync}
import cats.implicits._
import com.evolutiongaming.catshelper.ClockHelper._
import com.evolutiongaming.catshelper.DataHelper._
import com.evolutiongaming.kafka.journal.util.TemporalHelper._
import com.evolutiongaming.kafka.journal.KafkaConsumer
import com.evolutiongaming.skafka._

import scala.collection.immutable.SortedMap
import scala.concurrent.duration._

trait TopicCommit[F[_]] {

  def apply(offsets: Nem[Partition, Offset]): F[Unit]
}

object TopicCommit {

  def empty[F[_] : Applicative]: TopicCommit[F] = (_: Nem[Partition, Offset]) => ().pure[F]

  def apply[F[_]](
    topic: Topic,
    metadata: String,
    consumer: KafkaConsumer[F, _, _],
  ): TopicCommit[F] = {
    offsets: Nem[Partition, Offset] => {
      val offsets1 = offsets.mapKV { (partition, offset) =>
        val offset1 = OffsetAndMetadata(offset, metadata)
        val partition1 = TopicPartition(topic, partition)
        (partition1, offset1)
      }
      consumer.commit(offsets1)
    }
  }

  def delayed[F[_] : Sync : Clock](
    delay: FiniteDuration,
    commit: TopicCommit[F]
  ): F[TopicCommit[F]] = {

    case class State(until: Instant, offsets: SortedMap[Partition, Offset] = SortedMap.empty)

    for {
      timestamp <- Clock[F].instant
      stateRef  <- Ref[F].of(State(timestamp + delay))
    } yield {
      new TopicCommit[F] {
        def apply(offsets: Nem[Partition, Offset]) = {

          def apply(state: State, timestamp: Instant) = {
            val offsets1 = state.offsets ++ offsets.toSortedMap
            if (state.until <= timestamp) {
              offsets1
                .toNem
                .foldMapM { offsets => commit(offsets) }
                .as(State(timestamp + delay))
            } else {
              state
                .copy(offsets = offsets1)
                .pure[F]
            }
          }

          for {
            timestamp <- Clock[F].instant
            state     <- stateRef.get
            state     <- apply(state, timestamp)
            _         <- stateRef.set(state)
          } yield {}
        }
      }
    }
  }
} 
Example 26
Source File: EventRecord.scala    From kafka-journal   with MIT License 5 votes vote down vote up
package com.evolutiongaming.kafka.journal

import java.time.Instant

import cats._
import cats.implicits._
import com.evolutiongaming.skafka.{Offset, Partition}


final case class EventRecord[A](
  event: Event[A],
  timestamp: Instant,
  partitionOffset: PartitionOffset,
  origin: Option[Origin] = None,
  metadata: RecordMetadata,
  headers: Headers
) {

  def seqNr: SeqNr = event.seqNr

  def offset: Offset = partitionOffset.offset

  def partition: Partition = partitionOffset.partition

  def pointer: JournalPointer = JournalPointer(partitionOffset, event.seqNr)
}

object EventRecord {

  def apply[A](
    record: ActionRecord[Action.Append],
    event: Event[A],
    metadata: PayloadMetadata
  ): EventRecord[A] = {
    apply(record.action, event, record.partitionOffset, metadata)
  }

  def apply[A](
    action: Action.Append,
    event: Event[A],
    partitionOffset: PartitionOffset,
    metadata: PayloadMetadata,
  ): EventRecord[A] = {
    EventRecord(
      event = event,
      timestamp = action.timestamp,
      partitionOffset = partitionOffset,
      origin = action.origin,
      metadata = RecordMetadata(
        header = action.header.metadata,
        payload = metadata),
      headers = action.headers)
  }

  implicit val traverseEventRecord: Traverse[EventRecord] = new Traverse[EventRecord] {
    override def traverse[G[_] : Applicative, A, B](fa: EventRecord[A])(f: A => G[B]): G[EventRecord[B]] =
      fa.event.traverse(f).map(e => fa.copy(event = e))

    override def foldLeft[A, B](fa: EventRecord[A], b: B)(f: (B, A) => B): B =
      fa.event.foldLeft(b)(f)

    override def foldRight[A, B](fa: EventRecord[A], lb: Eval[B])(f: (A, Eval[B]) => Eval[B]): Eval[B] =
      fa.event.foldRight(lb)(f)
  }
} 
Example 27
Source File: ConsRecordToActionRecord.scala    From kafka-journal   with MIT License 5 votes vote down vote up
package com.evolutiongaming.kafka.journal.conversions

import java.time.Instant

import cats.data.OptionT
import cats.implicits._
import com.evolutiongaming.catshelper.MonadThrowable
import com.evolutiongaming.kafka.journal._
import com.evolutiongaming.kafka.journal.util.CatsHelper._

trait ConsRecordToActionRecord[F[_]] {

  def apply(consRecord: ConsRecord): OptionT[F, ActionRecord[Action]]
}


object ConsRecordToActionRecord {

  implicit def apply[F[_] : MonadThrowable](implicit
    consRecordToActionHeader: ConsRecordToActionHeader[F],
    headerToTuple: HeaderToTuple[F],
  ): ConsRecordToActionRecord[F] = {

    consRecord: ConsRecord => {

      def action(key: Key, timestamp: Instant, header: ActionHeader) = {

        def append(header: ActionHeader.Append) = {
          consRecord
            .value
            .traverse { value =>
              val headers = consRecord.headers
                .filter { _.key =!= ActionHeader.key }
                .traverse { header => headerToTuple(header) }

              for {
                headers <- headers
              } yield {
                val payload = value.value
                Action.append(key, timestamp, header, payload, headers.toMap)
              }
            }
        }

        header match {
          case header: ActionHeader.Append => append(header).toOptionT
          case header: ActionHeader.Mark   => Action.mark(key, timestamp, header).pure[OptionT[F, *]]
          case header: ActionHeader.Delete => Action.delete(key, timestamp, header).pure[OptionT[F, *]]
          case header: ActionHeader.Purge  => Action.purge(key, timestamp, header).pure[OptionT[F, *]]
        }
      }

      val result = for {
        id               <- consRecord.key.toOptionT[F]
        timestampAndType <- consRecord.timestampAndType.toOptionT[F]
        header           <- consRecordToActionHeader(consRecord)
        key               = Key(id = id.value, topic = consRecord.topic)
        timestamp         = timestampAndType.timestamp
        action           <- action(key, timestamp, header)
      } yield {
        val partitionOffset = PartitionOffset(consRecord)
        ActionRecord(action, partitionOffset)
      }

      result
        .value
        .adaptError { case e =>
          JournalError(s"ConsRecordToActionRecord failed for $consRecord: $e", e)
        }
        .toOptionT
    }
  }
} 
Example 28
Source File: ShoppingCartService.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.example.shoppingcart.api

import java.time.Instant

import akka.{Done, NotUsed}
import com.lightbend.lagom.scaladsl.api.transport.Method
import com.lightbend.lagom.scaladsl.api.{Service, ServiceCall}
import play.api.libs.json.{Format, Json}


trait ShoppingCartService extends Service {

  def get(id: String): ServiceCall[NotUsed, String]

  def getReport(id: String): ServiceCall[NotUsed, String]

  def updateItem(id: String, productId: String, qty: Int): ServiceCall[NotUsed, String]

  def checkout(id: String): ServiceCall[NotUsed, String]

  override final def descriptor = {
    import Service._
    named("shopping-cart")
      .withCalls(
        restCall(Method.GET, "/shoppingcart/:id", get _),
        restCall(Method.GET, "/shoppingcart/:id/report", getReport _),
        // for the RESTafarians, my formal apologies but the GET calls below do mutate state
        // we just want an easy way to mutate data from a sbt scripted test, so no POST/PUT here
        restCall(Method.GET, "/shoppingcart/:id/:productId/:num", updateItem _),
        restCall(Method.GET, "/shoppingcart/:id/checkout", checkout _)
      )
      .withAutoAcl(true)
  }
} 
Example 29
Source File: DiscoveryUtilsSpec.scala    From spark-summit-2018   with GNU General Public License v3.0 5 votes vote down vote up
package com.twilio.open.streaming.trend.discovery

import java.time.Instant
import java.time.format.DateTimeFormatter

import com.twilio.open.streaming.trend.discovery.protocol.{CallEvent, Dimensions}
import org.scalatest.{FlatSpec, Matchers}

class DiscoveryUtilsSpec extends FlatSpec with Matchers {

  // example using java serialization with case class
  "DiscoveryUtils" should " serialize and deserialize a CallEvent object" in {
    val eventTime = Instant.from(DateTimeFormatter.ISO_DATE_TIME.parse("2018-03-08T18:00:00Z"))
    val loggedTime = eventTime.plusSeconds(34)
    //eventTime: Long, loggedTime: Long, eventId: String, eventType: String,dimensions: Dimensions, signalingEvent: Option[SignalingEvent]
    //case class Dimensions(country: Option[String], continent: Option[String], carrier: Option[String],direction: Option[String])
    val ce = CallEvent(eventTime.toEpochMilli, loggedTime.toEpochMilli, "uuid1", "signaling", Dimensions(
      country = Some("us"),
      continent = Some("na"),
      carrier = Some("verizon"),
      direction = Some("inbound")
    ), None)

    val ceSer = DiscoveryUtils.serialize(ce)
    val ceDeser = DiscoveryUtils.deserialize[CallEvent](ceSer)
    ce.equals(ceDeser)
  }
} 
Example 30
Source File: SelfDestructBot.scala    From telegram   with Apache License 2.0 5 votes vote down vote up
import java.time.Instant

import cats.instances.future._
import cats.syntax.functor._
import com.bot4s.telegram.Implicits._
import com.bot4s.telegram.api.declarative.{Callbacks, InlineQueries}
import com.bot4s.telegram.future.Polling
import com.bot4s.telegram.methods._
import com.bot4s.telegram.models.UpdateType.Filters._
import com.bot4s.telegram.models._

import scala.concurrent.Future
import scala.concurrent.duration._


class SelfDestructBot(token: String) extends ExampleBot(token)
  with Polling
  with InlineQueries[Future]
  with Callbacks[Future] {

  val timeouts = Seq(3, 5, 10, 30)

  override def allowedUpdates = InlineUpdates ++ CallbackUpdates

  def buildResult(timeout: Int, msg: String): InlineQueryResult = {
    InlineQueryResultArticle(s"$timeout", s"$timeout seconds",
      inputMessageContent = InputTextMessageContent(msg),
      description = s"Message will be deleted in $timeout seconds",
      replyMarkup = InlineKeyboardMarkup.singleButton(button(now)))
  }

  def now = Instant.now().getEpochSecond

  def button(timeLeft: Long) = InlineKeyboardButton.callbackData("⏳ left?", "" + timeLeft)

  onCallbackQuery {
    implicit cbq =>
      val left = cbq.data.map(_.toLong - now).getOrElse(-1L)
      ackCallback(s"$left seconds remaining.", cacheTime = 0).void
  }

  onChosenInlineResult { implicit result =>
    val delay = result.resultId.toInt
    Utils.after(delay.seconds) {
      request(EditMessageText(
        text = "⌛ Expired",
        inlineMessageId = result.inlineMessageId))
    }

    request(EditMessageReplyMarkup(
      inlineMessageId = result.inlineMessageId,
      replyMarkup = InlineKeyboardMarkup.singleButton(button(now + delay)))).void
  }

  onInlineQuery { implicit q =>
    val results = if (q.query.isEmpty)
      Seq.empty
    else
      timeouts.map(buildResult(_, q.query))
    answerInlineQuery(results, 5).void
  }
} 
Example 31
Source File: SecurityUserRepo.scala    From scala-play-realworld-example-app   with MIT License 5 votes vote down vote up
package authentication.repositories

import java.time.Instant

import authentication.exceptions.MissingSecurityUserException
import authentication.models.{PasswordHash, SecurityUser, SecurityUserId}
import commons.exceptions.MissingModelException
import commons.models.Email
import commons.utils.DbioUtils
import commons.utils.DbioUtils.optionToDbio
import slick.dbio.DBIO
import slick.jdbc.H2Profile.api.{DBIO => _, MappedTo => _, Rep => _, TableQuery => _, _}
import slick.lifted.{ProvenShape, _}

import scala.concurrent.ExecutionContext

private[authentication] class SecurityUserRepo(implicit private val ex: ExecutionContext) {

  import SecurityUserTable.securityUsers

  def findByEmailOption(email: Email): DBIO[Option[SecurityUser]] = {
    require(email != null)

    securityUsers
      .filter(_.email === email)
      .result
      .headOption
  }

  def findByEmail(email: Email): DBIO[SecurityUser] = {
    require(email != null)

    findByEmailOption(email)
      .flatMap(optionToDbio(_, new MissingSecurityUserException(email.toString)))
  }

  def insertAndGet(securityUser: SecurityUser): DBIO[SecurityUser] = {
    require(securityUser != null)

    insert(securityUser)
      .flatMap(findById)
  }

  private def insert(securityUser: SecurityUser): DBIO[SecurityUserId] = {
    securityUsers.returning(securityUsers.map(_.id)) += securityUser
  }

  def findById(securityUserId: SecurityUserId): DBIO[SecurityUser] = {
    securityUsers
      .filter(_.id === securityUserId)
      .result
      .headOption
      .flatMap(maybeModel => DbioUtils.optionToDbio(maybeModel, new MissingModelException(s"model id: $securityUserId")))
  }

  def updateAndGet(securityUser: SecurityUser): DBIO[SecurityUser] = {
    update(securityUser).flatMap(_ => findById(securityUser.id))
  }

  private def update(securityUser: SecurityUser): DBIO[Int] = {
    require(securityUser != null)

    securityUsers
      .filter(_.id === securityUser.id)
      .update(securityUser)
  }
}

object SecurityUserTable {
  val securityUsers = TableQuery[SecurityUsers]

  protected class SecurityUsers(tag: Tag) extends Table[SecurityUser](tag, "security_users") {

    def id: Rep[SecurityUserId] = column[SecurityUserId]("id", O.PrimaryKey, O.AutoInc)

    def email: Rep[Email] = column("email")

    def password: Rep[PasswordHash] = column("password")

    def createdAt: Rep[Instant] = column("created_at")

    def updatedAt: Rep[Instant] = column("updated_at")

    def * : ProvenShape[SecurityUser] = (id, email, password, createdAt, updatedAt) <> (SecurityUser.tupled,
      SecurityUser.unapply)
  }

} 
Example 32
Source File: CommentRepo.scala    From scala-play-realworld-example-app   with MIT License 5 votes vote down vote up
package articles.repositories

import java.time.Instant

import articles.models.{Tag => _, _}
import commons.exceptions.MissingModelException
import commons.utils.DbioUtils
import slick.dbio.DBIO
import slick.jdbc.H2Profile.api.{DBIO => _, MappedTo => _, Rep => _, TableQuery => _, _}
import slick.lifted.{ProvenShape, _}
import users.models.UserId
import users.repositories.UserRepo

import scala.concurrent.ExecutionContext

class CommentRepo(userRepo: UserRepo, implicit private val ec: ExecutionContext) {
  import CommentTable.comments

  def findByArticleId(articleId: ArticleId): DBIO[Seq[Comment]] = {
    comments
      .filter(_.articleId === articleId)
      .sortBy(_.createdAt.desc)
      .result
  }

  def insertAndGet(comment: Comment): DBIO[Comment] = {
    require(comment != null)

    insert(comment)
      .flatMap(findById)
  }

  private def insert(comment: Comment): DBIO[CommentId] = {
    comments.returning(comments.map(_.id)) += comment
  }

  def findById(commentId: CommentId): DBIO[Comment] = {
    comments
      .filter(_.id === commentId)
      .result
      .headOption
      .flatMap(maybeModel => DbioUtils.optionToDbio(maybeModel, new MissingModelException(s"model id: $commentId")))
  }

  def delete(commentId: CommentId): DBIO[Int] = {
    comments
      .filter(_.id === commentId)
      .delete
  }

  def delete(commentIds: Seq[CommentId]): DBIO[Int] = {
    comments
      .filter(_.id inSet commentIds)
      .delete
  }

}

object CommentTable {
  val comments = TableQuery[Comments]

  protected class Comments(tag: Tag) extends Table[Comment](tag, "comments") {

    def id: Rep[CommentId] = column[CommentId]("id", O.PrimaryKey, O.AutoInc)

    def articleId: Rep[ArticleId] = column("article_id")

    def authorId: Rep[UserId] = column("author_id")

    def body: Rep[String] = column("body")

    def createdAt: Rep[Instant] = column("created_at")

    def updatedAt: Rep[Instant] = column("updated_at")

    def * : ProvenShape[Comment] = (id, articleId, authorId, body, createdAt, updatedAt) <> ((Comment.apply _).tupled,
      Comment.unapply)
  }
} 
Example 33
Source File: Comment.scala    From scala-play-realworld-example-app   with MIT License 5 votes vote down vote up
package articles.models

import java.time.Instant

import commons.models.{BaseId, IdMetaModel, Property, WithId}
import users.models.UserId
import play.api.libs.json._
import slick.jdbc.H2Profile.api.{DBIO => _, MappedTo => _, Rep => _, TableQuery => _, _}

case class Comment(id: CommentId,
                   articleId: ArticleId,
                   authorId: UserId,
                   body: String,
                   createdAt: Instant,
                   updatedAt: Instant,
                  ) extends WithId[Long, CommentId]

case class CommentId(value: Long) extends AnyVal with BaseId[Long]

object CommentId {
  implicit val commentIdFormat: Format[CommentId] = new Format[CommentId] {
    override def reads(json: JsValue): JsResult[CommentId] =
      Reads.LongReads.reads(json).map(CommentId(_))

    override def writes(o: CommentId): JsNumber = Writes.LongWrites.writes(o.value)
  }

  implicit val commentIdDbMapping: BaseColumnType[CommentId] = MappedColumnType.base[CommentId, Long](
    vo => vo.value,
    id => CommentId(id)
  )
}

object CommentMetaModel extends IdMetaModel {

  val articleId: Property[ArticleId] = Property("articleId")
  val authorId: Property[UserId] = Property("authorId")
  val body: Property[String] = Property("body")
  val updatedAt: Property[Instant] = Property("updatedAt")
  val createdAt: Property[Instant] = Property("createdAt")

  override type ModelId = CommentId
} 
Example 34
Source File: Article.scala    From scala-play-realworld-example-app   with MIT License 5 votes vote down vote up
package articles.models

import java.time.Instant

import commons.models.{BaseId, IdMetaModel, Property, WithId}
import users.models.UserId
import play.api.libs.json._
import slick.jdbc.H2Profile.api.{DBIO => _, MappedTo => _, Rep => _, TableQuery => _, _}

case class Article(id: ArticleId,
                   slug: String,
                   title: String,
                   description: String,
                   body: String,
                   createdAt: Instant,
                   updatedAt: Instant,
                   authorId: UserId
                  )
  extends WithId[Long, ArticleId]

object Article {
  implicit val articleFormat: Format[Article] = Json.format[Article]
}

case class ArticleId(override val value: Long) extends AnyVal with BaseId[Long]

object ArticleId {
  implicit val articleIdFormat: Format[ArticleId] = new Format[ArticleId] {
    override def reads(json: JsValue): JsResult[ArticleId] = Reads.LongReads.reads(json).map(ArticleId(_))

    override def writes(o: ArticleId): JsNumber = Writes.LongWrites.writes(o.value)
  }

  implicit val articleIdDbMapping: BaseColumnType[ArticleId] = MappedColumnType.base[ArticleId, Long](
    vo => vo.value,
    id => ArticleId(id)
  )
}

object ArticleMetaModel extends IdMetaModel {
  val slug: Property[String] = Property("slug")
  val title: Property[String] = Property("title")
  val description: Property[String] = Property("description")
  val body: Property[String] = Property("body")

  val createdAt: Property[Instant] = Property("createdAt")
  val updatedAt: Property[Instant] = Property("updatedAt")

  val userId: Property[UserId] = Property("userId")

  override type ModelId = ArticleId
} 
Example 35
Source File: ArticleWithTags.scala    From scala-play-realworld-example-app   with MIT License 5 votes vote down vote up
package articles.models

import java.time.Instant

import users.models.Profile
import play.api.libs.json.{Format, Json}

case class ArticleWithTags(id: ArticleId,
                           slug: String,
                           title: String,
                           description: String,
                           body: String,
                           createdAt: Instant,
                           updatedAt: Instant,
                           tagList: Seq[String],
                           author: Profile,
                           favorited: Boolean,
                           favoritesCount: Int)

object ArticleWithTags {

  implicit val articleWithTagsFormat: Format[ArticleWithTags] = Json.format[ArticleWithTags]

  def apply(article: Article, tags: Seq[Tag], author: Profile, favorited: Boolean,
            favoritesCount: Int): ArticleWithTags = {
    val tagValuesSorted = tags.map(_.name).sorted
    ArticleWithTags(
      article.id,
      article.slug,
      article.title,
      article.description,
      article.body,
      article.createdAt,
      article.updatedAt,
      tagValuesSorted,
      author,
      favorited,
      favoritesCount
    )
  }

  def fromTagValues(article: Article, tagValues: Seq[String], author: Profile, favorited: Boolean,
                    favoritesCount: Int): ArticleWithTags = {
    val tagValuesSorted = tagValues.sorted
    ArticleWithTags(
      article.id,
      article.slug,
      article.title,
      article.description,
      article.body,
      article.createdAt,
      article.updatedAt,
      tagValuesSorted,
      author,
      favorited,
      favoritesCount
    )
  }
} 
Example 36
Source File: CommentWithAuthor.scala    From scala-play-realworld-example-app   with MIT License 5 votes vote down vote up
package articles.models

import java.time.Instant

import users.models.Profile
import play.api.libs.json._

case class CommentWithAuthor(id: CommentId,
                             articleId: ArticleId,
                             author: Profile,
                             body: String,
                             createdAt: Instant,
                             updatedAt: Instant
                            )

object CommentWithAuthor {
  implicit val commentFormat: Format[CommentWithAuthor] = Json.format[CommentWithAuthor]

  def apply(comment: Comment, author: Profile): CommentWithAuthor = CommentWithAuthor(
    comment.id,
    comment.articleId,
    author,
    comment.body,
    comment.createdAt,
    comment.updatedAt
  )
} 
Example 37
Source File: User.scala    From scala-play-realworld-example-app   with MIT License 5 votes vote down vote up
package users.models

import java.time.Instant

import authentication.models.SecurityUserId
import commons.models.{WithId, _}
import play.api.libs.json._
import slick.jdbc.H2Profile.api.{DBIO => _, MappedTo => _, Rep => _, TableQuery => _, _}

case class User(id: UserId,
                securityUserId: SecurityUserId,
                username: Username,
                email: Email,
                bio: Option[String],
                image: Option[String],
                createdAt: Instant,
                updatedAt: Instant) extends WithId[Long, UserId]

case class UserId(override val value: Long) extends AnyVal with BaseId[Long]

object UserId {
  implicit val userIdFormat: Format[UserId] = new Format[UserId] {
    override def reads(json: JsValue): JsResult[UserId] = Reads.LongReads.reads(json).map(UserId(_))

    override def writes(o: UserId): JsValue = Writes.LongWrites.writes(o.value)
  }

  implicit val userIdDbMapping: BaseColumnType[UserId] = MappedColumnType.base[UserId, Long](
    vo => vo.value,
    id => UserId(id)
  )

}

object UserMetaModel extends IdMetaModel {
  override type ModelId = UserId

  val username: Property[Username] = Property("username")
  val email: Property[Email] = Property("email")
  val bio: Property[Option[String]] = Property("bio")
  val image: Property[Option[String]] = Property("image")
} 
Example 38
Source File: ArticleCreateTest.scala    From scala-play-realworld-example-app   with MIT License 5 votes vote down vote up
package articles

import java.time.Instant

import articles.models.ArticleWrapper
import articles.test_helpers.Articles
import commons.repositories.DateTimeProvider
import commons_test.test_helpers.RealWorldWithServerAndTestConfigBaseTest.RealWorldWithTestConfig
import commons_test.test_helpers.{FixedDateTimeProvider, RealWorldWithServerAndTestConfigBaseTest, WithArticleTestHelper, WithUserTestHelper}
import play.api.ApplicationLoader.Context
import play.api.libs.ws.WSResponse
import users.models.UserDetailsWithToken
import users.test_helpers.UserRegistrations

class ArticleCreateTest extends RealWorldWithServerAndTestConfigBaseTest with WithArticleTestHelper with WithUserTestHelper {

  val dateTime: Instant = Instant.now

  "Create article" should "create valid article without tags" in await {
    for {
      userDetailsWithToken <- userTestHelper.register[UserDetailsWithToken](UserRegistrations.petycjaRegistration)
      newArticle = Articles.hotToTrainYourDragon.copy(tagList = Nil)
      response <- articleTestHelper.create[WSResponse](newArticle, userDetailsWithToken.token)
    } yield {
      response.status.mustBe(OK)
      val article = response.json.as[ArticleWrapper].article
      article.title.mustBe(newArticle.title)
      article.updatedAt.mustBe(dateTime)
      article.tagList.isEmpty.mustBe(true)
    }
  }

  it should "create valid article with dragons tag" in await {
    for {
      userDetailsWithToken <- userTestHelper.register[UserDetailsWithToken](UserRegistrations.petycjaRegistration)
      newArticle = Articles.hotToTrainYourDragon
      response <- articleTestHelper.create[WSResponse](newArticle, userDetailsWithToken.token)
    } yield {
      response.status.mustBe(OK)
      val article = response.json.as[ArticleWrapper].article
      article.title.mustBe(newArticle.title)
      article.updatedAt.mustBe(dateTime)
      article.tagList.size.mustBe(1L)
    }
  }


  it should "create article and associate it with existing dragons tag" in await {
    def createFirstArticleToCreateDragonsTag(userDetailsWithToken: UserDetailsWithToken) = {
      articleTestHelper.create[WSResponse](Articles.hotToTrainYourDragon, userDetailsWithToken.token)
    }

    for {
      userDetailsWithToken <- userTestHelper.register[UserDetailsWithToken](UserRegistrations.petycjaRegistration)
      _ <- createFirstArticleToCreateDragonsTag(userDetailsWithToken)
      response <- articleTestHelper.create[WSResponse](Articles.hotToTrainYourDragon, userDetailsWithToken.token)
    } yield {
      response.status.mustBe(OK)
      val article = response.json.as[ArticleWrapper].article
      article.tagList.size.mustBe(1L)
    }
  }

  it should "create article and set author" in await {
    val registration = UserRegistrations.petycjaRegistration
    for {
      userDetailsWithToken <- userTestHelper.register[UserDetailsWithToken](registration)
      response <- articleTestHelper.create[WSResponse](Articles.hotToTrainYourDragon, userDetailsWithToken.token)
    } yield {
      response.status.mustBe(OK)
      val article = response.json.as[ArticleWrapper].article
      article.author.username.mustBe(registration.username)
    }
  }

  it should "generate slug based on title" in await {
    for {
      userDetailsWithToken <- userTestHelper.register[UserDetailsWithToken](UserRegistrations.petycjaRegistration)
      titlePart1 = "the"
      titlePart2 = "title"
      response <- articleTestHelper.create[WSResponse](Articles.hotToTrainYourDragon, userDetailsWithToken.token)
    } yield {
      response.status.mustBe(OK)
      val slug = response.json.as[ArticleWrapper].article.slug
      slug.contains(include(titlePart1).and(include(titlePart2)))
    }
  }

  override def createComponents: RealWorldWithTestConfig = {
    new RealWorldWithTestConfigWithFixedDateTimeProvider(new FixedDateTimeProvider(dateTime), context)
  }

}

class RealWorldWithTestConfigWithFixedDateTimeProvider(dtProvider: DateTimeProvider, context: Context)
  extends RealWorldWithTestConfig(context) {

  override lazy val dateTimeProvider: DateTimeProvider = dtProvider
} 
Example 39
Source File: server.scala    From redis4cats   with Apache License 2.0 5 votes vote down vote up
package dev.profunktor.redis4cats.algebra

import java.time.Instant

trait ServerCommands[F[_], K] extends Flush[F, K] with Diagnostic[F]

trait Flush[F[_], K] {
  def keys(key: K): F[List[K]]
  def flushAll: F[Unit]
  def flushAllAsync: F[Unit]
}

trait Diagnostic[F[_]] {
  def info: F[Map[String, String]]
  def dbsize: F[Long]
  def lastSave: F[Instant]
  def slowLogLen: F[Long]
} 
Example 40
Source File: Timer.scala    From Adenium   with Apache License 2.0 5 votes vote down vote up
package com.adenium.utils

import java.time.{Instant, ZoneId}
import java.time.format.DateTimeFormatter
import java.util.Locale

import com.adenium.utils.May._

object Timer {

  def currentMillis: String = Instant.now().toEpochMilli.toString

  //////////////////////////////////////////////////

  def Timer[A] ( f : => A) : ( A, Long) = {
    val s = System.currentTimeMillis
    val r = f
    val e = System.currentTimeMillis
    ( r, e-s )
  }

  def TimeLog[A]( f : => A)(msg: String): A = {
    val ( r, t) = Timer( f)
    Logger.logWarning( s"[ Time spent ] $t in $msg")
    r
  }


  def UnitTimer( f : => Unit) : Long = {
    val ( _, t) = Timer( f)
    t
  }
} 
Example 41
Source File: ApplicationTimer.scala    From play-webpack-typescript-react   with MIT License 5 votes vote down vote up
package services

import java.time.{Clock, Instant}
import javax.inject._
import play.api.Logger
import play.api.inject.ApplicationLifecycle
import scala.concurrent.Future


@Singleton
class ApplicationTimer @Inject()(clock: Clock, appLifecycle: ApplicationLifecycle) {

  // This code is called when the application starts.
  private val start: Instant = clock.instant
  Logger.info(s"ApplicationTimer demo: Starting application at $start.")

  // When the application starts, register a stop hook with the
  // ApplicationLifecycle object. The code inside the stop hook will
  // be run when the application stops.
  appLifecycle.addStopHook { () =>
    val stop: Instant     = clock.instant
    val runningTime: Long = stop.getEpochSecond - start.getEpochSecond
    Logger.info(s"ApplicationTimer demo: Stopping application at ${clock.instant} after ${runningTime}s.")
    Future.successful(())
  }
} 
Example 42
Source File: TimeBasedUUIDs.scala    From akka-persistence-couchbase   with Apache License 2.0 5 votes vote down vote up
package akka.persistence.couchbase.internal

import java.time.Instant
import java.time.format.{DateTimeFormatterBuilder, SignStyle}
import java.time.temporal.ChronoField
import java.util.{Comparator, UUID}

import akka.annotation.InternalApi


  def toSortableString(id: UUID): String = {
    require(id.version() == 1)
    val builder = new StringBuilder()
    val instant = UUIDTimestamp(id.timestamp()).toInstant
    builder.append(SortableTimeFormatter.format(instant))
    builder.append('_')
    builder.append("%20s".format(java.lang.Long.toUnsignedString(id.getLeastSignificantBits)))
    builder.toString()
  }

  def fromSortableString(text: String): UUID = {
    val parts = text.split('_')
    val parsed = SortableTimeFormatter.parse(parts(0))
    val instant = Instant.from(parsed).atZone(UUIDTimestamp.GMT)
    val timestamp = UUIDTimestamp(instant)
    val lsb = java.lang.Long.parseUnsignedLong(parts(1).trim)
    TimeBasedUUIDs.create(timestamp, lsb)
  }
} 
Example 43
Source File: OrderInstancesSpec.scala    From fs2-rabbit   with Apache License 2.0 5 votes vote down vote up
package dev.profunktor.fs2rabbit.laws

import java.time.Instant

import cats._
import cats.implicits._
import cats.kernel.laws.discipline._
import dev.profunktor.fs2rabbit.model.AmqpFieldValue._
import dev.profunktor.fs2rabbit.model._
import dev.profunktor.fs2rabbit.testkit._
import org.scalacheck.Arbitrary._
import org.scalatest.funspec.AnyFunSpec
import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks
import org.typelevel.discipline.scalatest.FunSpecDiscipline

class OrderInstancesSpec
    extends AnyFunSpec
    with FunSpecDiscipline
    with ScalaCheckPropertyChecks
    with InstantArbitraries {
  implicit val orderInstant: Order[Instant] = Order.by(_.getEpochSecond)

  checkAll("ExchangeName.OrderLaws", OrderTests[ExchangeName].order)
  checkAll("QueueName.OrderLaws", OrderTests[QueueName].order)
  checkAll("RoutingKey.OrderLaws", OrderTests[RoutingKey].order)
  checkAll("DeliveryTag.OrderLaws", OrderTests[DeliveryTag].order)
  checkAll("ConsumerTag.OrderLaws", OrderTests[ConsumerTag].order)
  checkAll("Instant.OrderLaws", OrderTests[Instant](instantOrderWithSecondPrecision).order)
  checkAll("DeliveryMode.OrderLaws", OrderTests[DeliveryMode].order)
  checkAll("ShortString.OrderLaws", OrderTests[ShortString].order)
  checkAll("TimestampVal.OrderLaws", OrderTests[TimestampVal].order)
  checkAll("DecimalVal.OrderLaws", OrderTests[DecimalVal].order)
  checkAll("ByteVal.OrderLaws", OrderTests[ByteVal].order)
  checkAll("DoubleVal.OrderLaws", OrderTests[DoubleVal].order)
  checkAll("FloatVal.OrderLaws", OrderTests[FloatVal].order)
  checkAll("ShortVal.OrderLaws", OrderTests[ShortVal].order)
  checkAll("BooleanVal.OrderLaws", OrderTests[BooleanVal].order)
  checkAll("IntVal.OrderLaws", OrderTests[IntVal].order)
  checkAll("LongVal.OrderLaws", OrderTests[LongVal].order)
  checkAll("StringVal.OrderLaws", OrderTests[StringVal].order)
  checkAll("NullVal.OrderLaws", OrderTests[NullVal.type].order)
} 
Example 44
Source File: AmqpFieldValueSpec.scala    From fs2-rabbit   with Apache License 2.0 5 votes vote down vote up
package dev.profunktor.fs2rabbit

import java.io.{DataInputStream, DataOutputStream, InputStream, OutputStream}
import java.time.Instant

import com.rabbitmq.client.impl.{ValueReader, ValueWriter}
import dev.profunktor.fs2rabbit.model.AmqpFieldValue._
import dev.profunktor.fs2rabbit.model.{AmqpFieldValue, ShortString}
import org.scalatest.flatspec.AnyFlatSpecLike
import org.scalatest.Assertion
import org.scalatest.matchers.should.Matchers

class AmqpFieldValueSpec extends AnyFlatSpecLike with Matchers with AmqpPropertiesArbitraries {

  it should "convert from and to Java primitive header values" in {
    val intVal    = IntVal(1)
    val longVal   = LongVal(2L)
    val stringVal = StringVal("hey")
    val arrayVal  = ArrayVal(Vector(IntVal(3), IntVal(2), IntVal(1)))

    AmqpFieldValue.unsafeFrom(intVal.toValueWriterCompatibleJava) should be(intVal)
    AmqpFieldValue.unsafeFrom(longVal.toValueWriterCompatibleJava) should be(longVal)
    AmqpFieldValue.unsafeFrom(stringVal.toValueWriterCompatibleJava) should be(stringVal)
    AmqpFieldValue.unsafeFrom("fs2") should be(StringVal("fs2"))
    AmqpFieldValue.unsafeFrom(arrayVal.toValueWriterCompatibleJava) should be(arrayVal)
  }
  it should "preserve the same value after a round-trip through impure and from" in {
    forAll { amqpHeaderVal: AmqpFieldValue =>
      AmqpFieldValue.unsafeFrom(amqpHeaderVal.toValueWriterCompatibleJava) == amqpHeaderVal
    }
  }

  it should "preserve the same values after a round-trip through the Java ValueReader and ValueWriter" in {
    forAll(assertThatValueIsPreservedThroughJavaWriteAndRead _)
  }

  it should "preserve a specific StringVal that previously failed after a round-trip through the Java ValueReader and ValueWriter" in {
    assertThatValueIsPreservedThroughJavaWriteAndRead(StringVal("kyvmqzlbjivLqQFukljghxdowkcmjklgSeybdy"))
  }

  it should "preserve a specific DateVal created from an Instant that has millisecond accuracy after a round-trip through the Java ValueReader and ValueWriter" in {
    val instant   = Instant.parse("4000-11-03T20:17:29.57Z")
    val myDateVal = TimestampVal.from(instant)
    assertThatValueIsPreservedThroughJavaWriteAndRead(myDateVal)
  }

  "DecimalVal" should "reject a BigDecimal of an unscaled value with 33 bits..." in {
    DecimalVal.from(BigDecimal(Int.MaxValue) + BigDecimal(1)) should be(None)
  }
  it should "reject a BigDecimal with a scale over octet size" in {
    DecimalVal.from(new java.math.BigDecimal(java.math.BigInteger.valueOf(12345L), 1000)) should be(None)
  }

  // We need to wrap things in a dummy table because the method that would be
  // great to test with ValueReader, readFieldValue, is private, and so we
  // have to call the next best thing, readTable.
  private def wrapInDummyTable(value: AmqpFieldValue): TableVal =
    TableVal(Map(ShortString.unsafeFrom("dummyKey") -> value))

  private def createWriterFromQueue(outputResults: collection.mutable.Queue[Byte]): ValueWriter =
    new ValueWriter({
      new DataOutputStream({
        new OutputStream {
          override def write(b: Int): Unit =
            outputResults.enqueue(b.toByte)
        }
      })
    })

  private def createReaderFromQueue(input: collection.mutable.Queue[Byte]): ValueReader = {
    val inputStream = new InputStream {
      override def read(): Int =
        try {
          val result = input.dequeue()
          // A signed -> unsigned conversion because bytes by default are
          // converted into signed ints, which is bad when the API of read
          // states that negative numbers indicate EOF...
          0Xff & result.toInt
        } catch {
          case _: NoSuchElementException => -1
        }

      override def available(): Int = {
        val result = input.size
        result
      }
    }
    new ValueReader(new DataInputStream(inputStream))
  }

  private def assertThatValueIsPreservedThroughJavaWriteAndRead(amqpHeaderVal: AmqpFieldValue): Assertion = {
    val outputResultsAsTable = collection.mutable.Queue.empty[Byte]
    val tableWriter          = createWriterFromQueue(outputResultsAsTable)
    tableWriter.writeTable(wrapInDummyTable(amqpHeaderVal).toValueWriterCompatibleJava)

    val reader    = createReaderFromQueue(outputResultsAsTable)
    val readValue = reader.readTable()
    AmqpFieldValue.unsafeFrom(readValue) should be(wrapInDummyTable(amqpHeaderVal))
  }
} 
Example 45
Source File: BruteForceDefenderActor.scala    From silhouette-vuejs-app   with Apache License 2.0 5 votes vote down vote up
package models.services

import java.time.Instant

import akka.actor.{Actor, Cancellable}
import javax.inject.Inject

import scala.concurrent.ExecutionContext
import scala.concurrent.duration.FiniteDuration
import scala.language.postfixOps

class BruteForceDefenderActor @Inject()(conf: BruteForceDefenderConf)(implicit ex: ExecutionContext) extends Actor {
  import BruteForceDefenderActor._

  private var signInAttempts = Map.empty[String, List[Instant]]

  private var scheduler: Cancellable = _

  private case object Reset

  override def preStart(): Unit = {
    import scala.concurrent.duration._
    scheduler = context.system.scheduler.schedule(
      initialDelay = 1 second,
      interval = conf.period,
      receiver = self,
      message = Reset
    )
  }

  override def postStop(): Unit = {
    scheduler.cancel()
  }

  override def receive: Receive = {
    case IsSignInAllowed(email) =>
      val signInAttemptsForEmail = signInAttempts.getOrElse(email, List.empty)
      val attemptsAllowed = conf.attempts - signInAttemptsForEmail.size
      if (attemptsAllowed > 0) {
        sender() ! SignInAllowed(attemptsAllowed)
      } else {
        sender() ! SignInForbidden(signInAttemptsForEmail.last.plusSeconds(conf.period.toSeconds))
      }

    case RegisterWrongPasswordSignIn(email) =>
      signInAttempts += email -> (Instant.now :: signInAttempts.getOrElse(email, List.empty))

    case Reset =>
      val expiredBefore = Instant.now.minusSeconds(conf.period.toSeconds)
      signInAttempts = signInAttempts.map {
        case (email, attempts) => email -> attempts.filter(_.isAfter(expiredBefore))
      }
  }
}


case class BruteForceDefenderConf(attempts: Int, period: FiniteDuration)

object BruteForceDefenderActor {
  case class RegisterWrongPasswordSignIn(email: String)

  case class IsSignInAllowed(email: String)
  case class SignInAllowed(attemptsAllowed: Int)
  case class SignInForbidden(nextSignInAllowedAt: Instant)
} 
Example 46
Source File: PubSubMessage.scala    From akka-cloudpubsub   with Apache License 2.0 5 votes vote down vote up
package com.qubit.pubsub.client

import java.time.{Instant, ZoneOffset, ZonedDateTime}

import com.google.protobuf.{ByteString, Timestamp}
import com.google.pubsub.v1.{
  PubsubMessage => PubSubMessageProto,
  ReceivedMessage => ReceivedPubSubMessageProto
}

import scala.collection.JavaConversions._

final case class PubSubMessage(
    payload: Array[Byte],
    msgId: Option[String] = None,
    publishTs: Option[ZonedDateTime] = None,
    attributes: Option[Map[String, String]] = None) {
  def toProto: PubSubMessageProto = {
    val builder = PubSubMessageProto.newBuilder()
    builder.setData(ByteString.copyFrom(payload))
    publishTs.foreach(
      ts =>
        builder.setPublishTime(
          Timestamp.newBuilder().setSeconds(ts.toEpochSecond).build()))
    msgId.foreach(id => builder.setMessageId(id))
    attributes.foreach(attr => builder.putAllAttributes(attr))
    builder.build()
  }
}

object PubSubMessage {
  def fromProto(proto: PubSubMessageProto): PubSubMessage = {
    val payload = proto.getData.toByteArray
    val msgId = Some(proto.getMessageId)
    val attributes = if (proto.getAttributesMap.isEmpty) { None } else {
      Some(proto.getAttributesMap.toMap)
    }
    val publishTs = if (proto.hasPublishTime) {
      Some(
        ZonedDateTime.ofInstant(
          Instant.ofEpochSecond(proto.getPublishTime.getSeconds),
          ZoneOffset.UTC))
    } else {
      None
    }

    PubSubMessage(payload,
                  msgId = msgId,
                  publishTs = publishTs,
                  attributes = attributes)
  }
}

final case class ReceivedPubSubMessage(ackId: String, payload: PubSubMessage)

object ReceivedPubSubMessage {
  def fromProto(proto: ReceivedPubSubMessageProto): ReceivedPubSubMessage = {
    val ackId = proto.getAckId
    val payload = PubSubMessage.fromProto(proto.getMessage)
    ReceivedPubSubMessage(ackId, payload)
  }
} 
Example 47
Source File: LoadDataBenchmark.scala    From memsql-spark-connector   with Apache License 2.0 5 votes vote down vote up
package com.memsql.spark

import java.sql.{Connection, Date, DriverManager}
import java.time.{Instant, LocalDate}
import java.util.Properties

import org.apache.spark.sql.types._
import com.github.mrpowers.spark.daria.sql.SparkSessionExt._
import org.apache.spark.sql.{SaveMode, SparkSession}

import scala.util.Random

// LoadDataBenchmark is written to test load data with CPU profiler
// this feature is accessible in Ultimate version of IntelliJ IDEA
// see https://www.jetbrains.com/help/idea/async-profiler.html#profile for more details
object LoadDataBenchmark extends App {
  final val masterHost: String = sys.props.getOrElse("memsql.host", "localhost")
  final val masterPort: String = sys.props.getOrElse("memsql.port", "5506")

  val spark: SparkSession = SparkSession
    .builder()
    .master("local")
    .config("spark.sql.shuffle.partitions", "1")
    .config("spark.driver.bindAddress", "localhost")
    .config("spark.datasource.memsql.ddlEndpoint", s"${masterHost}:${masterPort}")
    .config("spark.datasource.memsql.database", "testdb")
    .getOrCreate()

  def jdbcConnection: Loan[Connection] = {
    val connProperties = new Properties()
    connProperties.put("user", "root")

    Loan(
      DriverManager.getConnection(
        s"jdbc:mysql://$masterHost:$masterPort",
        connProperties
      ))
  }

  def executeQuery(sql: String): Unit = {
    jdbcConnection.to(conn => Loan(conn.createStatement).to(_.execute(sql)))
  }

  executeQuery("set global default_partitions_per_leaf = 2")
  executeQuery("drop database if exists testdb")
  executeQuery("create database testdb")

  def genRow(): (Long, Int, Double, String) =
    (Random.nextLong(), Random.nextInt(), Random.nextDouble(), Random.nextString(20))
  val df =
    spark.createDF(
      List.fill(1000000)(genRow()),
      List(("LongType", LongType, true),
           ("IntType", IntegerType, true),
           ("DoubleType", DoubleType, true),
           ("StringType", StringType, true))
    )

  val start = System.nanoTime()
  df.write
    .format("memsql")
    .mode(SaveMode.Append)
    .save("testdb.batchinsert")

  val diff = System.nanoTime() - start
  println("Elapsed time: " + diff + "ns [CSV serialization] ")

  executeQuery("truncate testdb.batchinsert")

  val avroStart = System.nanoTime()
  df.write
    .format(DefaultSource.MEMSQL_SOURCE_NAME_SHORT)
    .mode(SaveMode.Append)
    .option(MemsqlOptions.LOAD_DATA_FORMAT, "Avro")
    .save("testdb.batchinsert")
  val avroDiff = System.nanoTime() - avroStart
  println("Elapsed time: " + avroDiff + "ns [Avro serialization] ")
} 
Example 48
Source File: AuthToken.scala    From docspell   with GNU General Public License v3.0 5 votes vote down vote up
package docspell.backend.auth

import java.time.Instant
import javax.crypto.Mac
import javax.crypto.spec.SecretKeySpec

import cats.effect._
import cats.implicits._

import docspell.backend.Common
import docspell.backend.auth.AuthToken._
import docspell.common._

import scodec.bits.ByteVector

case class AuthToken(millis: Long, account: AccountId, salt: String, sig: String) {
  def asString = s"$millis-${b64enc(account.asString)}-$salt-$sig"

  def sigValid(key: ByteVector): Boolean = {
    val newSig = AuthToken.sign(this, key)
    AuthToken.constTimeEq(sig, newSig)
  }
  def sigInvalid(key: ByteVector): Boolean =
    !sigValid(key)

  def notExpired(validity: Duration): Boolean =
    !isExpired(validity)

  def isExpired(validity: Duration): Boolean = {
    val ends = Instant.ofEpochMilli(millis).plusMillis(validity.millis)
    Instant.now.isAfter(ends)
  }

  def validate(key: ByteVector, validity: Duration): Boolean =
    sigValid(key) && notExpired(validity)
}

object AuthToken {
  private val utf8 = java.nio.charset.StandardCharsets.UTF_8

  def fromString(s: String): Either[String, AuthToken] =
    s.split("\\-", 4) match {
      case Array(ms, as, salt, sig) =>
        for {
          millis <- asInt(ms).toRight("Cannot read authenticator data")
          acc    <- b64dec(as).toRight("Cannot read authenticator data")
          accId  <- AccountId.parse(acc)
        } yield AuthToken(millis, accId, salt, sig)

      case _ =>
        Left("Invalid authenticator")
    }

  def user[F[_]: Sync](accountId: AccountId, key: ByteVector): F[AuthToken] =
    for {
      salt <- Common.genSaltString[F]
      millis = Instant.now.toEpochMilli
      cd     = AuthToken(millis, accountId, salt, "")
      sig    = sign(cd, key)
    } yield cd.copy(sig = sig)

  private def sign(cd: AuthToken, key: ByteVector): String = {
    val raw = cd.millis.toString + cd.account.asString + cd.salt
    val mac = Mac.getInstance("HmacSHA1")
    mac.init(new SecretKeySpec(key.toArray, "HmacSHA1"))
    ByteVector.view(mac.doFinal(raw.getBytes(utf8))).toBase64
  }

  private def b64enc(s: String): String =
    ByteVector.view(s.getBytes(utf8)).toBase64

  private def b64dec(s: String): Option[String] =
    ByteVector.fromValidBase64(s).decodeUtf8.toOption

  private def asInt(s: String): Option[Long] =
    Either.catchNonFatal(s.toLong).toOption

  private def constTimeEq(s1: String, s2: String): Boolean =
    s1.zip(s2)
      .foldLeft(true)({ case (r, (c1, c2)) => r & c1 == c2 }) & s1.length == s2.length

} 
Example 49
Source File: DoobieMeta.scala    From docspell   with GNU General Public License v3.0 5 votes vote down vote up
package docspell.store.impl

import java.time.format.DateTimeFormatter
import java.time.{Instant, LocalDate}

import docspell.common._
import docspell.common.syntax.all._

import com.github.eikek.calev.CalEvent
import doobie._
import doobie.implicits.legacy.instant._
import doobie.util.log.Success
import emil.doobie.EmilDoobieMeta
import io.circe.{Decoder, Encoder}

trait DoobieMeta extends EmilDoobieMeta {

  implicit val sqlLogging = LogHandler({
    case e @ Success(_, _, _, _) =>
      DoobieMeta.logger.trace("SQL " + e)
    case e =>
      DoobieMeta.logger.error(s"SQL Failure: $e")
  })

  def jsonMeta[A](implicit d: Decoder[A], e: Encoder[A]): Meta[A] =
    Meta[String].imap(str => str.parseJsonAs[A].fold(ex => throw ex, identity))(a =>
      e.apply(a).noSpaces
    )

  implicit val metaCollectiveState: Meta[CollectiveState] =
    Meta[String].imap(CollectiveState.unsafe)(CollectiveState.asString)

  implicit val metaUserState: Meta[UserState] =
    Meta[String].imap(UserState.unsafe)(UserState.asString)

  implicit val metaPassword: Meta[Password] =
    Meta[String].imap(Password(_))(_.pass)

  implicit val metaIdent: Meta[Ident] =
    Meta[String].imap(Ident.unsafe)(_.id)

  implicit val metaContactKind: Meta[ContactKind] =
    Meta[String].imap(ContactKind.unsafe)(_.asString)

  implicit val metaTimestamp: Meta[Timestamp] =
    Meta[Instant].imap(Timestamp(_))(_.value)

  implicit val metaJobState: Meta[JobState] =
    Meta[String].imap(JobState.unsafe)(_.name)

  implicit val metaDirection: Meta[Direction] =
    Meta[Boolean].imap(flag =>
      if (flag) Direction.Incoming: Direction else Direction.Outgoing: Direction
    )(d => Direction.isIncoming(d))

  implicit val metaPriority: Meta[Priority] =
    Meta[Int].imap(Priority.fromInt)(Priority.toInt)

  implicit val metaLogLevel: Meta[LogLevel] =
    Meta[String].imap(LogLevel.unsafeString)(_.name)

  implicit val metaLenientUri: Meta[LenientUri] =
    Meta[String].imap(LenientUri.unsafe)(_.asString)

  implicit val metaNodeType: Meta[NodeType] =
    Meta[String].imap(NodeType.unsafe)(_.name)

  implicit val metaLocalDate: Meta[LocalDate] =
    Meta[String].imap(str => LocalDate.parse(str))(_.format(DateTimeFormatter.ISO_DATE))

  implicit val metaItemState: Meta[ItemState] =
    Meta[String].imap(ItemState.unsafe)(_.name)

  implicit val metNerTag: Meta[NerTag] =
    Meta[String].imap(NerTag.unsafe)(_.name)

  implicit val metaNerLabel: Meta[NerLabel] =
    jsonMeta[NerLabel]

  implicit val metaNerLabelList: Meta[List[NerLabel]] =
    jsonMeta[List[NerLabel]]

  implicit val metaItemProposal: Meta[MetaProposal] =
    jsonMeta[MetaProposal]

  implicit val metaItemProposalList: Meta[MetaProposalList] =
    jsonMeta[MetaProposalList]

  implicit val metaLanguage: Meta[Language] =
    Meta[String].imap(Language.unsafe)(_.iso3)

  implicit val metaCalEvent: Meta[CalEvent] =
    Meta[String].timap(CalEvent.unsafe)(_.asString)
}

object DoobieMeta extends DoobieMeta {
  import org.log4s._
  private val logger = getLogger

} 
Example 50
Source File: database.scala    From franklin   with Apache License 2.0 5 votes vote down vote up
package com.azavea.franklin

import cats.implicits._
import com.azavea.stac4s.TemporalExtent
import doobie.implicits.javasql._
import doobie.util.meta.Meta
import doobie.util.{Read, Write}
import io.circe.{Decoder, Encoder}

import java.sql.Timestamp
import java.time.Instant

package object database extends CirceJsonbMeta with GeotrellisWktMeta with Filterables {

  implicit val instantMeta: Meta[Instant]   = Meta[Timestamp].imap(_.toInstant)(Timestamp.from)
  implicit val instantRead: Read[Instant]   = Read[Timestamp].imap(_.toInstant)(Timestamp.from)
  implicit val instantWrite: Write[Instant] = Write[Timestamp].imap(_.toInstant)(Timestamp.from)

  def stringToInstant: String => Either[Throwable, Instant] =
    (s: String) => Either.catchNonFatal(Instant.parse(s))

  def temporalExtentToString(te: TemporalExtent): String = {
    te.value match {
      case Some(start) :: Some(end) :: _ if start != end => s"${start.toString}/${end.toString}"
      case Some(start) :: Some(end) :: _ if start == end => s"${start.toString}"
      case Some(start) :: None :: _                      => s"${start.toString}/.."
      case None :: Some(end) :: _                        => s"../${end.toString}"
    }
  }

  def temporalExtentFromString(str: String): Either[String, TemporalExtent] = {
    str.split("/").toList match {
      case ".." :: endString :: _ =>
        val parsedEnd: Either[Throwable, Instant] = stringToInstant(endString)
        parsedEnd match {
          case Left(_)             => Left(s"Could not decode instant: $str")
          case Right(end: Instant) => Right(TemporalExtent(None, end))
        }
      case startString :: ".." :: _ =>
        val parsedStart: Either[Throwable, Instant] = stringToInstant(startString)
        parsedStart match {
          case Left(_)               => Left(s"Could not decode instant: $str")
          case Right(start: Instant) => Right(TemporalExtent(start, None))
        }
      case startString :: endString :: _ =>
        val parsedStart: Either[Throwable, Instant] = stringToInstant(startString)
        val parsedEnd: Either[Throwable, Instant]   = stringToInstant(endString)
        (parsedStart, parsedEnd).tupled match {
          case Left(_)                               => Left(s"Could not decode instant: $str")
          case Right((start: Instant, end: Instant)) => Right(TemporalExtent(start, end))
        }
      case _ =>
        Either.catchNonFatal(Instant.parse(str)) match {
          case Left(_)           => Left(s"Could not decode instant: $str")
          case Right(t: Instant) => Right(TemporalExtent(t, t))
        }
    }
  }

  implicit val encoderTemporalExtent: Encoder[TemporalExtent] =
    Encoder.encodeString.contramap[TemporalExtent] { extent => temporalExtentToString(extent) }

  implicit val decoderTemporalExtent: Decoder[TemporalExtent] = Decoder.decodeString.emap { str =>
    temporalExtentFromString(str)
  }
} 
Example 51
Source File: PaginationToken.scala    From franklin   with Apache License 2.0 5 votes vote down vote up
package com.azavea.franklin.datamodel

import com.azavea.stac4s.meta._
import eu.timepit.refined.types.numeric.PosInt
import io.circe.generic.semiauto._
import io.circe.parser._
import io.circe.refined._
import io.circe.syntax._
import io.circe.{Decoder, Encoder}
import sttp.tapir.DecodeResult

import java.time.Instant
import java.util.Base64

final case class PaginationToken(
    timestampAtLeast: Instant,
    serialIdGreaterThan: PosInt
)

object PaginationToken {

  implicit class ToTapirDecodeResult[T](circeResult: Either[io.circe.Error, T]) {

    def toDecodeResult: DecodeResult[T] = {
      circeResult match {
        case Left(err) =>
          DecodeResult.Error(err.getMessage, err)
        case Right(value) =>
          DecodeResult.Value(value)
      }
    }
  }

  implicit val dec: Decoder[PaginationToken] = deriveDecoder
  implicit val enc: Encoder[PaginationToken] = deriveEncoder

  val b64Encoder = Base64.getEncoder()
  val b64Decoder = Base64.getDecoder()

  def encPaginationToken(token: PaginationToken): String = b64Encoder.encodeToString(
    token.asJson.noSpaces.getBytes
  )

  def decPaginationToken(encoded: String): DecodeResult[PaginationToken] = {
    val jsonString: String = new String(b64Decoder.decode(encoded))
    val circeResult = for {
      js      <- parse(jsonString)
      decoded <- js.as[PaginationToken]
    } yield decoded
    circeResult.toDecodeResult
  }

} 
Example 52
Source File: HeapDumpTest.scala    From fintrospect   with Apache License 2.0 5 votes vote down vote up
package io.fintrospect.util

import java.time.Instant

import com.twitter.finagle.http.{Request, Status}
import com.twitter.util.Await
import org.scalatest.{FunSpec, Matchers}

class HeapDumpTest extends FunSpec with Matchers {

  describe("HeapDump") {
    it("creates the correct heapdump file") {
      val clock = TestClocks.fixed(Instant.ofEpochMilli(0))
      val response = Await.result(new HeapDump("bob", clock).apply(Request()))
      response.status shouldBe Status.Ok
      response.headerMap("Content-disposition").startsWith("inline; filename=\"heapdump-bob-1970-01-01") shouldBe true
      response.contentType shouldBe Some("application/x-heap-dump;charset=utf-8")
    }
  }
} 
Example 53
Source File: ApplicationTimer.scala    From sbt-header   with Apache License 2.0 5 votes vote down vote up
package services

import java.time.{Clock, Instant}
import javax.inject._
import play.api.Logger
import play.api.inject.ApplicationLifecycle
import scala.concurrent.Future


@Singleton
class ApplicationTimer @Inject() (clock: Clock, appLifecycle: ApplicationLifecycle) {

  // This code is called when the application starts.
  private val start: Instant = clock.instant
  Logger.info(s"ApplicationTimer demo: Starting application at $start.")

  // When the application starts, register a stop hook with the
  // ApplicationLifecycle object. The code inside the stop hook will
  // be run when the application stops.
  appLifecycle.addStopHook { () =>
    val stop: Instant = clock.instant
    val runningTime: Long = stop.getEpochSecond - start.getEpochSecond
    Logger.info(s"ApplicationTimer demo: Stopping application at ${clock.instant} after ${runningTime}s.")
    Future.successful(())
  }
} 
Example 54
Source File: ArrayOfInstantsBenchmark.scala    From jsoniter-scala   with MIT License 5 votes vote down vote up
package com.github.plokhotnyuk.jsoniter_scala.benchmark

import java.nio.charset.StandardCharsets.UTF_8
import java.time.Instant

import org.openjdk.jmh.annotations.{Param, Setup}

abstract class ArrayOfInstantsBenchmark extends CommonParams {
  @Param(Array("1", "10", "100", "1000", "10000", "100000", "1000000"))
  var size: Int = 1000
  var obj: Array[Instant] = _
  var jsonString: String = _
  var jsonBytes: Array[Byte] = _
  var preallocatedBuf: Array[Byte] = _

  @Setup
  def setup(): Unit = {
    obj = (1 to size).map { i =>
      val n = Math.abs(i * 1498724053)
      Instant.ofEpochSecond(n, i % 4 match {
        case 0 => 0
        case 1 => ((n % 1000) | 0x1) * 1000000
        case 2 => ((n % 1000000) | 0x1) * 1000
        case 3 => (n | 0x1) % 1000000000
      })
    }.toArray
    jsonString = obj.mkString("[\"", "\",\"", "\"]")
    jsonBytes = jsonString.getBytes(UTF_8)
    preallocatedBuf = new Array[Byte](jsonBytes.length + 100)
  }
} 
Example 55
Source File: GitHubActionsAPI.scala    From jsoniter-scala   with MIT License 5 votes vote down vote up
package com.github.plokhotnyuk.jsoniter_scala.benchmark

import java.time.Instant

import com.fasterxml.jackson.databind.annotation.JsonSerialize
import com.github.plokhotnyuk.jsoniter_scala.macros._

import scala.collection.immutable.Seq

object GitHubActionsAPI {
  case class Artifact(
    id: Long,
    node_id: String,
    name: String,
    size_in_bytes: Long,
    url: String,
    archive_download_url: String,
    @JsonSerialize(using = classOf[StringifiedBooleanSerializer]) @stringified expired: Boolean,
    created_at: Instant,
    expires_at: Instant)

  case class Response(
    total_count: Int,
    artifacts: Seq[Artifact])
} 
Example 56
Source File: ArrayOfInstantsReading.scala    From jsoniter-scala   with MIT License 5 votes vote down vote up
package com.github.plokhotnyuk.jsoniter_scala.benchmark

import java.nio.charset.StandardCharsets.UTF_8
import java.time.Instant

import com.avsystem.commons.serialization.json._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.AVSystemCodecs._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.BorerJsonEncodersDecoders._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.JacksonSerDesers._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.JsoniterScalaCodecs._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.SprayFormats._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.UPickleReaderWriters._
import com.github.plokhotnyuk.jsoniter_scala.core._
import com.rallyhealth.weejson.v1.jackson.FromJson
import com.rallyhealth.weepickle.v1.WeePickle.ToScala
import io.circe.parser._
import org.openjdk.jmh.annotations.Benchmark
import play.api.libs.json.Json
import spray.json._

class ArrayOfInstantsReading extends ArrayOfInstantsBenchmark {
  @Benchmark
  def avSystemGenCodec(): Array[Instant] = JsonStringInput.read[Array[Instant]](new String(jsonBytes, UTF_8))

  @Benchmark
  def borer(): Array[Instant] = io.bullet.borer.Json.decode(jsonBytes).to[Array[Instant]].value

  @Benchmark
  def circe(): Array[Instant] = decode[Array[Instant]](new String(jsonBytes, UTF_8)).fold(throw _, identity)

  @Benchmark
  def jacksonScala(): Array[Instant] = jacksonMapper.readValue[Array[Instant]](jsonBytes)

  @Benchmark
  def jsoniterScala(): Array[Instant] = readFromArray[Array[Instant]](jsonBytes)

  @Benchmark
  def playJson(): Array[Instant] = Json.parse(jsonBytes).as[Array[Instant]]

  @Benchmark
  def sprayJson(): Array[Instant] = JsonParser(jsonBytes).convertTo[Array[Instant]]

  @Benchmark
  def uPickle(): Array[Instant] = read[Array[Instant]](jsonBytes)

  @Benchmark
  def weePickle(): Array[Instant] = FromJson(jsonBytes).transform(ToScala[Array[Instant]])
} 
Example 57
Source File: CirceEncodersDecoders.scala    From jsoniter-scala   with MIT License 5 votes vote down vote up
package com.github.plokhotnyuk.jsoniter_scala.benchmark

import java.time.Instant
import java.util.Base64

import com.github.plokhotnyuk.jsoniter_scala.benchmark.BitMask.toBitMask
import io.circe.Decoder._
import io.circe.Encoder._
import io.circe._
import io.circe.generic.extras._
import io.circe.generic.extras.decoding.UnwrappedDecoder
import io.circe.generic.extras.encoding.UnwrappedEncoder
import io.circe.generic.extras.semiauto._

import scala.collection.immutable.{BitSet, IntMap}
import scala.collection.mutable
import scala.util.Try

object CirceEncodersDecoders {
  val printer: Printer = Printer.noSpaces.copy(dropNullValues = true, reuseWriters = true, predictSize = true)
  val prettyPrinter: Printer = Printer.spaces2.copy(dropNullValues = true, reuseWriters = true, predictSize = true)
  val escapingPrinter: Printer = printer.copy(escapeNonAscii = true)
  implicit val config: Configuration = Configuration.default.withDefaults.withDiscriminator("type")
  implicit val adtC3c: Codec[ADTBase] = deriveConfiguredCodec[ADTBase]
  implicit val anyValsC3c: Codec[AnyVals] = {
    implicit def valueClassEncoder[A <: AnyVal : UnwrappedEncoder]: Encoder[A] = implicitly

    implicit def valueClassDecoder[A <: AnyVal : UnwrappedDecoder]: Decoder[A] = implicitly

    deriveConfiguredCodec[AnyVals]
  }
  val (base64D5r: Decoder[Array[Byte]], base64E5r: Encoder[Array[Byte]]) =
    (Decoder.decodeString.map[Array[Byte]](Base64.getDecoder.decode),
      Encoder.encodeString.contramap[Array[Byte]](Base64.getEncoder.encodeToString))
  implicit val bidRequestC3c: Codec[OpenRTB.BidRequest] = {
    import io.circe.generic.extras.auto._

    deriveConfiguredCodec[OpenRTB.BidRequest]
  }
  implicit val bigIntE5r: Encoder[BigInt] = encodeJsonNumber
    .contramap(x => JsonNumber.fromDecimalStringUnsafe(new java.math.BigDecimal(x.bigInteger).toPlainString))
  implicit val (bitSetD5r: Decoder[BitSet], bitSetE5r: Encoder[BitSet]) =
    (Decoder.decodeArray[Int].map(arr => BitSet.fromBitMaskNoCopy(toBitMask(arr, Int.MaxValue ))),
      Encoder.encodeSeq[Int].contramapArray((m: mutable.BitSet) => m.toVector))
  implicit val distanceMatrixC3c: Codec[GoogleMapsAPI.DistanceMatrix] = {
    import io.circe.generic.auto._

    deriveConfiguredCodec[GoogleMapsAPI.DistanceMatrix]
  }
  implicit val gitHubActionsAPIC3c: Codec[GitHubActionsAPI.Response] = {
    implicit val c1: Codec[GitHubActionsAPI.Artifact] =
      Codec.forProduct9("id", "node_id", "name", "size_in_bytes", "url", "archive_download_url",
        "expired", "created_at", "expires_at") {
        (id: Long, node_id: String, name: String, size_in_bytes: Long, url: String, archive_download_url: String,
        expired: String, created_at: Instant, expires_at: Instant) =>
          GitHubActionsAPI.Artifact(id, node_id, name, size_in_bytes, url, archive_download_url,
            expired.toBoolean, created_at, expires_at)
      } { a =>
        (a.id, a.node_id, a.name, a.size_in_bytes, a.url, a.archive_download_url,
        a.expired.toString, a.created_at, a.expires_at)
      }
    deriveConfiguredCodec[GitHubActionsAPI.Response]
  }
  implicit val extractFieldsC3c: Codec[ExtractFields] = deriveConfiguredCodec[ExtractFields]
  implicit val geoJSONC3c: Codec[GeoJSON.GeoJSON] = {
    implicit val c1: Codec[GeoJSON.SimpleGeometry] = deriveConfiguredCodec[GeoJSON.SimpleGeometry]
    implicit val c2: Codec[GeoJSON.Geometry] = deriveConfiguredCodec[GeoJSON.Geometry]
    implicit val c3: Codec[GeoJSON.SimpleGeoJSON] = deriveConfiguredCodec[GeoJSON.SimpleGeoJSON]
    deriveConfiguredCodec[GeoJSON.GeoJSON]
  }
  implicit val (intMapD5r: Decoder[IntMap[Boolean]], intMapE5r: Encoder[IntMap[Boolean]]) =
    (Decoder.decodeMap[Int, Boolean].map(_.foldLeft(IntMap.empty[Boolean])((m, p) => m.updated(p._1, p._2))),
      Encoder.encodeMap[Int, Boolean].contramapObject((m: IntMap[Boolean]) => m))
  implicit val (longMapD5r: Decoder[mutable.LongMap[Boolean]], longMapE5r: Encoder[mutable.LongMap[Boolean]]) =
    (Decoder.decodeMap[Long, Boolean].map(_.foldLeft(new mutable.LongMap[Boolean])((m, p) => m += (p._1, p._2))),
      Encoder.encodeMapLike[Long, Boolean, mutable.Map].contramapObject((m: mutable.LongMap[Boolean]) => m))
  implicit val missingRequiredFieldsC3c: Codec[MissingRequiredFields] = deriveConfiguredCodec[MissingRequiredFields]
  implicit val nestedStructsC3c: Codec[NestedStructs] = deriveConfiguredCodec[NestedStructs]
  implicit val (suitD5r: Decoder[Suit], suitE5r: Encoder[Suit]) =
    (decodeString.emap(s => Try(Suit.valueOf(s)).fold[Either[String, Suit]](_ => Left("Suit"), Right.apply)),
      encodeString.contramap[Suit](_.name))
  implicit val suitADTC3c: Codec[SuitADT] = deriveEnumerationCodec[SuitADT]
  implicit val (suitEnumDecoder: Decoder[SuitEnum.Value], suitEnumEncoder: Encoder[SuitEnum.Value]) =
    (decodeEnumeration(SuitEnum), encodeEnumeration(SuitEnum))
  implicit val primitivesC3c: Codec[Primitives] = deriveConfiguredCodec[Primitives]
  implicit val tweetC3c: Codec[TwitterAPI.Tweet] = {
    import io.circe.generic.auto._

    deriveConfiguredCodec[TwitterAPI.Tweet]
  }
} 
Example 58
Source File: BsonCodec.scala    From scala-commons   with MIT License 5 votes vote down vote up
package com.avsystem.commons
package mongo

import java.time.Instant

import org.bson.types.ObjectId
import org.bson.{BsonArray, BsonBinary, BsonBoolean, BsonDateTime, BsonDocument, BsonDouble, BsonInt32, BsonInt64, BsonObjectId, BsonString, BsonValue}

import _root_.scala.collection.compat._
import _root_.scala.language.higherKinds


trait BsonCodec[A, BSON <: BsonValue] {self =>
  def fromBson(bson: BSON): A
  def toBson(a: A): BSON

  def map[B](fa: A => B, fb: B => A): BsonCodec[B, BSON] = new BsonCodec[B, BSON] {
    def fromBson(bson: BSON) = fa(self.fromBson(bson))
    def toBson(b: B) = self.toBson(fb(b))
  }

  def key(key: String): DocKey[A, BSON] = new DocKey[A, BSON](key, this)

  def collection[C[X] <: IterableOnce[X]](implicit fac: Factory[A, C[A]]): BsonCodec[C[A], BsonArray] =
    BsonCodec.create[C[A], BsonArray](
      ba => ba.iterator().asScala.map(bv => self.fromBson(bv.asInstanceOf[BSON])).to(fac),
      col => new BsonArray(col.iterator.map(self.toBson).to(JList))
    )
}

object BsonCodec {
  def create[A, BSON <: BsonValue](from: BSON => A, to: A => BSON): BsonCodec[A, BSON] = new BsonCodec[A, BSON] {
    def fromBson(bson: BSON) = from(bson)
    def toBson(a: A) = to(a)
  }

  private object _identity extends BsonCodec[BsonValue, BsonValue] {
    override def fromBson(bson: BsonValue) = bson
    override def toBson(a: BsonValue) = a
  }
  def identity[B <: BsonValue] = _identity.asInstanceOf[BsonCodec[B, B]]

  val objectId = create[ObjectId, BsonObjectId](_.getValue, new BsonObjectId(_))

  val byteArray = create[Array[Byte], BsonBinary](_.getData, new BsonBinary(_))

  val boolean = create[Boolean, BsonBoolean](_.getValue, new BsonBoolean(_))
  val int32 = create[Int, BsonInt32](_.getValue, new BsonInt32(_))
  val int64 = create[Long, BsonInt64](_.getValue, new BsonInt64(_))
  val double = create[Double, BsonDouble](_.getValue, new BsonDouble(_))

  val string = create[String, BsonString](_.getValue, new BsonString(_))

  val doc = create[Doc, BsonDocument](new Doc(_), _.toBson)
  val instant = create[Instant, BsonDateTime](
    bdt => Instant.ofEpochMilli(bdt.getValue),
    i => new BsonDateTime(i.toEpochMilli)
  )
} 
Example 59
Source File: IsoInstant.scala    From scala-commons   with MIT License 5 votes vote down vote up
package com.avsystem.commons
package serialization

import java.time.Instant
import java.time.format.DateTimeParseException

import com.avsystem.commons.serialization.GenCodec.ReadFailure

object IsoInstant {
  def format(millis: Long): String = {
    val res = Instant.ofEpochMilli(millis).toString
    // add trailing .000Z if omitted to align with JS implementation
    if (res.charAt(res.length - 5) == '.') res
    else res.substring(0, res.length - 1) + ".000Z"
  }

  def parse(string: String): Long =
    try Instant.parse(string).toEpochMilli catch {
      case _: DateTimeParseException => throw new ReadFailure(s"invalid ISO instant: $string")
    }
} 
Example 60
Source File: JavaTimeInterop.scala    From scala-commons   with MIT License 5 votes vote down vote up
package com.avsystem.commons
package jiop

import java.time.Instant

import com.avsystem.commons.jiop.JavaTimeInterop.InstantOps

trait JavaTimeInterop {
  implicit def instantOps(instant: Instant): InstantOps = new InstantOps(instant)
}
object JavaTimeInterop {
  class InstantOps(private val instant: Instant) extends AnyVal {
    def truncateToTimestamp: Timestamp = Timestamp(instant.toEpochMilli)
    def truncateToJDate: JDate = new JDate(instant.toEpochMilli)
  }
} 
Example 61
Source File: LoadBalancer.scala    From scastie   with Apache License 2.0 5 votes vote down vote up
package com.olegych.scastie.balancer

import java.time.Instant
import java.time.temporal.ChronoUnit

import com.olegych.scastie.api._
import org.slf4j.LoggerFactory

import scala.util.Random

case class Ip(v: String)

case class Task(config: Inputs, ip: Ip, taskId: TaskId, ts: Instant)

case class TaskHistory(data: Vector[Task], maxSize: Int) {
  def add(task: Task): TaskHistory = {
    val cappedData = if (data.length < maxSize) data else data.drop(1)
    copy(data = cappedData :+ task)
  }
}
case class LoadBalancer[R, S <: ServerState](servers: Vector[Server[R, S]]) {
  private val log = LoggerFactory.getLogger(getClass)

  def done(taskId: TaskId): Option[LoadBalancer[R, S]] = {
    Some(copy(servers = servers.map(_.done(taskId))))
  }

  def addServer(server: Server[R, S]): LoadBalancer[R, S] = {
    copy(servers = server +: servers)
  }

  def removeServer(ref: R): LoadBalancer[R, S] = {
    copy(servers = servers.filterNot(_.ref == ref))
  }

  def getRandomServer: Option[Server[R, S]] = {
    def random[T](xs: Seq[T]) = if (xs.nonEmpty) Some(xs(Random.nextInt(xs.size))) else None
    random(servers.filter(_.state.isReady))
  }

  def add(task: Task): Option[(Server[R, S], LoadBalancer[R, S])] = {
    log.info("Task added: {}", task.taskId)

    val (availableServers, unavailableServers) =
      servers.partition(_.state.isReady)

    def lastTenMinutes(v: Vector[Task]) = v.filter(_.ts.isAfter(Instant.now.minus(10, ChronoUnit.MINUTES)))
    def lastWithIp(v: Vector[Task]) = lastTenMinutes(v.filter(_.ip == task.ip)).lastOption

    if (availableServers.nonEmpty) {
      val selectedServer = availableServers.maxBy { s =>
        (
          s.mailbox.length < 3, //allow reload if server gets busy
          !s.currentConfig.needsReload(task.config), //pick those without need for reload
          -s.mailbox.length, //then those least busy
          lastTenMinutes(s.mailbox ++ s.history.data).exists(!_.config.needsReload(task.config)), //then those which use(d) this config
          lastWithIp(s.mailbox).orElse(lastWithIp(s.history.data)).map(_.ts.toEpochMilli), //then one most recently used by this ip, if any
          s.mailbox.lastOption.orElse(s.history.data.lastOption).map(-_.ts.toEpochMilli).getOrElse(0L) //then one least recently used
        )
      }
      val updatedServers = availableServers.map(old => if (old.id == selectedServer.id) old.add(task) else old)
      Some(
        (
          selectedServer,
          copy(
            servers = updatedServers ++ unavailableServers,
//            history = updatedHistory
          )
        )
      )
    } else {
      if (servers.isEmpty) {
        val msg = "All instances are down"
        log.error(msg)
      }
      None
    }
  }

} 
Example 62
Source File: LoadBalancerTestUtils.scala    From scastie   with Apache License 2.0 5 votes vote down vote up
package com.olegych.scastie.balancer

import java.time.Instant

import com.olegych.scastie.api._
import org.scalatest.Assertion
import org.scalatest.funsuite.AnyFunSuite

object TestTaskId {
  def apply(i: Int) = TaskId(SnippetId(i.toString, None))
}

case class TestServerRef(id: Int)
case class TestState(state: String, ready: Boolean = true) extends ServerState {
  def isReady: Boolean = ready
}

trait LoadBalancerTestUtils extends AnyFunSuite with TestUtils {
  type TestServer0 = Server[TestServerRef, TestState]

  type TestLoadBalancer0 = LoadBalancer[TestServerRef, TestState]

  @transient private var taskId = 1000
  def add(balancer: TestLoadBalancer0, config: Inputs): TestLoadBalancer0 = synchronized {
    val (_, balancer0) = balancer.add(Task(config, nextIp, TestTaskId(taskId), Instant.now)).get
    taskId += 1
    balancer0
  }

  // Ordering only for debug purposes
  object Multiset {
    def apply[T: Ordering](xs: Seq[T]): Multiset[T] =
      Multiset(xs.groupBy(x => x).map { case (k, vs) => (k, vs.size) })
  }
  case class Multiset[T: Ordering](inner: Map[T, Int]) {
    override def toString: String = {
      val size = inner.values.sum

      inner.toList
        .sortBy { case (k, v) => (-v, k) }
        .map {
          case (k, v) => s"$k($v)"
        }
        .mkString("Multiset(", ", ", s") {$size}")
    }
  }

  def assertConfigs(balancer: TestLoadBalancer0)(columns: Seq[String]*): Assertion = {
    assert(
      Multiset(balancer.servers.map(_.currentConfig.sbtConfigExtra)) == Multiset(
        columns.flatten.map(i => sbtConfig(i.toString).sbtConfigExtra)
      )
    )
  }

  @transient private var serverId = 0
  def server(
      c: String,
      mailbox: Vector[Task] = Vector(),
      state: TestState = TestState("default-state")
  ): TestServer0 = synchronized {
    val t = Server(TestServerRef(serverId), sbtConfig(c), state, mailbox)
    serverId += 1
    t
  }

  def servers(columns: Seq[String]*): Vector[TestServer0] = {
    columns.to(Vector).flatten.map(c => server(c))
  }

  @transient private var currentIp = 0
  def nextIp: Ip = synchronized {
    val t = Ip("ip" + currentIp)
    currentIp += 1
    t
  }

  def server(v: Int): TestServerRef = TestServerRef(v)

  def code(code: String) = Inputs.default.copy(code = code)
  def sbtConfig(sbtConfig: String) = Inputs.default.copy(sbtConfigExtra = sbtConfig)

  def history(columns: Seq[String]*): TaskHistory = {
    val records =
      columns.to(Vector).flatten.map(i => Task(Inputs.default.copy(code = i.toString), nextIp, TestTaskId(1), Instant.now)).reverse

    TaskHistory(Vector(records: _*), maxSize = 20)
  }
} 
Example 63
Source File: InstrumentedInputs.scala    From scastie   with Apache License 2.0 5 votes vote down vote up
package com.olegych.scastie.instrumentation

import java.io.{PrintWriter, StringWriter}
import java.time.Instant

import com.olegych.scastie.api._

import scala.meta.parsers.Parsed

case class InstrumentationFailureReport(message: String, line: Option[Int]) {
  def toProgress(snippetId: SnippetId): SnippetProgress = {
    SnippetProgress.default.copy(
      ts = Some(Instant.now.toEpochMilli),
      snippetId = Some(snippetId),
      compilationInfos = List(Problem(Error, line, message))
    )
  }
}

object InstrumentedInputs {
  def apply(inputs0: Inputs): Either[InstrumentationFailureReport, InstrumentedInputs] = {
    if (inputs0.isWorksheetMode) {
      val instrumented = Instrument(inputs0.code, inputs0.target).map { instrumentedCode =>
        inputs0.copy(code = instrumentedCode)
      }

      instrumented match {
        case Right(inputs) =>
          success(inputs)

        case Left(error) =>
          import InstrumentationFailure._

          error match {
            case HasMainMethod =>
              Right(InstrumentedInputs(inputs0.copy(_isWorksheetMode = false), isForcedProgramMode = true))

            case UnsupportedDialect =>
              Left(InstrumentationFailureReport("This Scala target does not have a worksheet mode", None))

            case ParsingError(Parsed.Error(pos, message, _)) =>
              val lineOffset = Instrument.getParsingLineOffset(inputs0)
              Left(InstrumentationFailureReport(message, Some(pos.startLine + lineOffset)))

            case InternalError(exception) =>
              val errors = new StringWriter()
              exception.printStackTrace(new PrintWriter(errors))
              val fullStack = errors.toString

              Left(InstrumentationFailureReport(fullStack, None))
          }

      }
    } else {
      success(inputs0)
    }
  }

  private def success(inputs: Inputs): Either[InstrumentationFailureReport, InstrumentedInputs] = {
    Right(InstrumentedInputs(inputs, isForcedProgramMode = false))
  }
}

case class InstrumentedInputs(
    inputs: Inputs,
    isForcedProgramMode: Boolean
) 
Example 64
Source File: SessionManager.scala    From pizza-auth-3   with MIT License 5 votes vote down vote up
package moe.pizza.auth.webapp

import java.time.Instant

import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import moe.pizza.auth.interfaces.UserDatabase
import moe.pizza.auth.webapp.SessionManager._
import moe.pizza.auth.webapp.Types.{HydratedSession, Session, Session2}
import org.http4s.{HttpService, _}
import org.http4s.server._
import org.slf4j.LoggerFactory
import pdi.jwt.{JwtAlgorithm, JwtCirce, JwtClaim}
import io.circe.generic.auto._
import Utils._
import io.circe.Decoder.Result

import scala.util.Try

object SessionManager {
  val HYDRATEDSESSION = AttributeKey[HydratedSession]("HYDRATEDSESSION")
  val LOGOUT = AttributeKey[String]("LOGOUT")
  val COOKIESESSION = "authsession"
}

class SessionManager(secretKey: String, ud: UserDatabase)
    extends HttpMiddleware {
  val log = LoggerFactory.getLogger(getClass)
  val OM = new ObjectMapper()
  OM.registerModule(DefaultScalaModule)

  case class MyJwt(exp: Long, iat: Long, session: String)

  implicit def toOption[A](e: Result[A]): Option[A] = {
    e match {
      case Left(_) => None
      case Right(a) => Some(a)
    }
  }

  override def apply(s: HttpService): HttpService = Service.lift { req =>
    log.info(s"Intercepting request ${req}")
    // TODO: this used to be nice with toOption, what happened
    val sessions =
      req.headers.get(headers.Cookie).toList.flatMap(_.values.list).flatMap {
        header =>
          JwtCirce.decodeJson(header.content, secretKey, Seq(JwtAlgorithm.HS256))
            .toOption
            .flatMap { jwt =>
              jwt.as[MyJwt] match {
                case Right(x) => Some(x)
                case Left(_) => None
              }
            }
            .flatMap { myjwt =>
              Try { OM.readValue(myjwt.session, classOf[Session2]) }.toOption
            }
      }
    log.info(s"found sessions: ${sessions}")

    // if we didn't find a valid session, make them one
    val session =
      sessions.headOption.getOrElse(Session2(List.empty, None, None, None))

    // do the inner request
    val hydrated = session.hydrate(ud)
    log.info(s"running inner router with hydrated session ${hydrated}")
    val response =
      s(req.copy(attributes = req.attributes.put(HYDRATEDSESSION, hydrated)))

    response.map { resp =>
      // do all of this once the request has been created
      val sessionToSave = resp.attributes
        .get(HYDRATEDSESSION)
        .map(_.dehydrate())
        .getOrElse(session)
      val oldsessions = resp.headers
        .get(headers.Cookie)
        .toList
        .flatMap(_.values.list)
        .filter(_.name == COOKIESESSION)
      if (resp.attributes.get(LOGOUT).isEmpty) {
        log.info(s"saving the session as a cookie")
        val claim = JwtClaim(
            expiration = Some(
              Instant.now
                .plusSeconds(86400 * 30)
                .getEpochSecond), // lasts 30 days
            issuedAt = Some(Instant.now.getEpochSecond)
          ) + ("session", OM.writeValueAsString(sessionToSave))
        val token = JwtCirce.encode(claim, secretKey, JwtAlgorithm.HS256)
        resp.addCookie(
          new Cookie(COOKIESESSION, token, None, None, None, path = Some("/"))
        )
      } else {
        log.info(s"log out flag was set, not saving any cookies")
        resp.removeCookie(COOKIESESSION)
      }
    }
  }
} 
Example 65
Source File: ProjectState.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.admin.projects

import java.time.Instant
import java.util.UUID

import ch.epfl.bluebrain.nexus.iam.types.Identity.Subject
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri

sealed trait ProjectState extends Product with Serializable

object ProjectState {

  
  final case class Current(
      id: UUID,
      label: String,
      organizationUuid: UUID,
      organizationLabel: String,
      description: Option[String],
      apiMappings: Map[String, AbsoluteIri],
      base: AbsoluteIri,
      vocab: AbsoluteIri,
      rev: Long,
      instant: Instant,
      subject: Subject,
      deprecated: Boolean
  ) extends ProjectState

} 
Example 66
Source File: OrganizationState.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.admin.organizations

import java.time.Instant
import java.util.UUID

import ch.epfl.bluebrain.nexus.admin.types.{ResourceF, ResourceMetadata}
import ch.epfl.bluebrain.nexus.iam.types.Identity.Subject
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig.HttpConfig
import ch.epfl.bluebrain.nexus.service.config.Vocabulary.nxv


    def toResourceMetadata(implicit http: HttpConfig): ResourceMetadata =
      ResourceF.unit(
        http.orgsBaseIri + label,
        id,
        rev,
        deprecated,
        Set(nxv.Organization.value),
        createdAt,
        createdBy,
        updatedAt,
        updatedBy
      )
  }

} 
Example 67
Source File: OrganizationEvent.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.admin.organizations

import java.time.Instant
import java.util.UUID

import ch.epfl.bluebrain.nexus.iam.types.Identity.Subject
import ch.epfl.bluebrain.nexus.rdf.implicits._
import ch.epfl.bluebrain.nexus.service.config.Contexts._
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig.HttpConfig
import ch.epfl.bluebrain.nexus.service.config.Vocabulary.nxv
import io.circe.generic.extras.Configuration
import io.circe.generic.extras.semiauto._
import io.circe.syntax._
import io.circe.{Encoder, Json}

import scala.annotation.nowarn


  final case class OrganizationDeprecated(
      id: UUID,
      rev: Long,
      instant: Instant,
      subject: Subject
  ) extends OrganizationEvent

  object JsonLd {

    @nowarn("cat=unused")
    implicit private val config: Configuration = Configuration.default
      .withDiscriminator("@type")
      .copy(transformMemberNames = {
        case nxv.`@id`.name        => nxv.uuid.prefix
        case nxv.label.name        => nxv.label.prefix
        case nxv.rev.name          => nxv.rev.prefix
        case nxv.instant.name      => nxv.instant.prefix
        case nxv.eventSubject.name => nxv.eventSubject.prefix
        case other                 => other
      })

    @nowarn("cat=unused")
    implicit private def subjectIdEncoder(implicit http: HttpConfig): Encoder[Subject] =
      Encoder.encodeJson.contramap(_.id.asJson)

    @nowarn("cat=unused")
    implicit final def orgEventEncoder(implicit http: HttpConfig): Encoder[OrganizationEvent] =
      Encoder.encodeJson.contramap[OrganizationEvent] { ev =>
        deriveConfiguredEncoder[OrganizationEvent]
          .mapJson { json =>
            val rev = Json.obj(nxv.rev.prefix -> Json.fromLong(ev.rev))
            json
              .deepMerge(rev)
              .addContext(adminCtxUri)
              .addContext(resourceCtxUri)
          }
          .apply(ev)
      }
  }
} 
Example 68
Source File: StorageIndexer.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.indexing

import java.time.Instant

import akka.actor.ActorSystem
import akka.stream.scaladsl.{Flow, Source}
import akka.util.Timeout
import cats.effect.{Effect, Timer}
import cats.implicits._
import ch.epfl.bluebrain.nexus.admin.client.AdminClient
import ch.epfl.bluebrain.nexus.iam.auth.AccessToken
import ch.epfl.bluebrain.nexus.kg.cache.{ProjectCache, StorageCache}
import ch.epfl.bluebrain.nexus.kg.config.KgConfig.StorageConfig
import ch.epfl.bluebrain.nexus.kg.resources._
import ch.epfl.bluebrain.nexus.kg.storage.Storage
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig
import ch.epfl.bluebrain.nexus.service.config.Vocabulary.nxv
import ch.epfl.bluebrain.nexus.sourcing.projections.ProgressFlow.{PairMsg, ProgressFlowElem}
import ch.epfl.bluebrain.nexus.sourcing.projections._
import com.typesafe.scalalogging.Logger

import scala.concurrent.ExecutionContext

// $COVERAGE-OFF$
object StorageIndexer {

  implicit private val log = Logger[StorageIndexer.type]

  def start[F[_]: Timer](storages: Storages[F], storageCache: StorageCache[F])(implicit
      projectCache: ProjectCache[F],
      F: Effect[F],
      as: ActorSystem,
      projectInitializer: ProjectInitializer[F],
      adminClient: AdminClient[F],
      config: ServiceConfig
  ): StreamSupervisor[F, Unit] = {

    implicit val authToken: Option[AccessToken] = config.serviceAccount.credentials
    implicit val indexing: IndexingConfig       = config.kg.keyValueStore.indexing
    implicit val ec: ExecutionContext           = as.dispatcher
    implicit val tm: Timeout                    = Timeout(config.kg.keyValueStore.askTimeout)
    implicit val storageConfig: StorageConfig   = config.kg.storage
    val name                                    = "storage-indexer"

    def toStorage(event: Event): F[Option[(Storage, Instant)]] =
      fetchProject(event.organization, event.id.parent, event.subject).flatMap { implicit project =>
        storages.fetchStorage(event.id).value.map {
          case Left(err)           =>
            log.error(s"Error on event '${event.id.show} (rev = ${event.rev})', cause: '${err.msg}'")
            None
          case Right(timedStorage) => Some(timedStorage)
        }
      }

    val source: Source[PairMsg[Any], _]   = cassandraSource(s"type=${nxv.Storage.value.show}", name)
    val flow: Flow[PairMsg[Any], Unit, _] = ProgressFlowElem[F, Any]
      .collectCast[Event]
      .groupedWithin(indexing.batch, indexing.batchTimeout)
      .distinct()
      .mergeEmit()
      .mapAsync(toStorage)
      .collectSome[(Storage, Instant)]
      .runAsync { case (storage, instant) => storageCache.put(storage)(instant) }()
      .flow
      .map(_ => ())

    StreamSupervisor.startSingleton(F.delay(source.via(flow)), name)
  }
}
// $COVERAGE-ON$ 
Example 69
Source File: StorageCache.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.cache

import java.time.{Clock, Instant}
import java.util.UUID
import java.util.concurrent.ConcurrentHashMap

import akka.actor.ActorSystem
import cats.Monad
import cats.effect.{Effect, Timer}
import cats.implicits._
import ch.epfl.bluebrain.nexus.commons.cache.{KeyValueStore, KeyValueStoreConfig}
import ch.epfl.bluebrain.nexus.kg.RevisionedValue
import ch.epfl.bluebrain.nexus.kg.cache.Cache._
import ch.epfl.bluebrain.nexus.kg.cache.StorageProjectCache._
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.ProjectRef
import ch.epfl.bluebrain.nexus.kg.storage.Storage
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri

class StorageCache[F[_]: Effect: Timer] private (projectToCache: ConcurrentHashMap[UUID, StorageProjectCache[F]])(
    implicit
    as: ActorSystem,
    config: KeyValueStoreConfig,
    clock: Clock
) {

  
private class StorageProjectCache[F[_]: Monad] private (store: KeyValueStore[F, AbsoluteIri, RevisionedStorage])
    extends Cache[F, AbsoluteIri, RevisionedStorage](store) {

  implicit private val ordering: Ordering[RevisionedStorage] = Ordering.by((s: RevisionedStorage) => s.rev).reverse

  implicit private def revisioned(storage: Storage)(implicit instant: Instant): RevisionedStorage =
    RevisionedValue(instant.toEpochMilli, storage)

  def get: F[List[Storage]] =
    store.values.map(_.toList.sorted.map(_.value))

  def getDefault: F[Option[Storage]]                            =
    get.map(_.collectFirst { case storage if storage.default => storage })

  def getBy(id: AbsoluteIri): F[Option[Storage]]                =
    get(id).map(_.collectFirst { case RevisionedValue(_, storage) if storage.id == id => storage })

  def put(storage: Storage)(implicit instant: Instant): F[Unit] =
    if (storage.deprecated) store.remove(storage.id)
    else store.put(storage.id, storage)
}

private object StorageProjectCache {

  type RevisionedStorage = RevisionedValue[Storage]

  def apply[F[_]: Effect: Timer](
      project: ProjectRef
  )(implicit as: ActorSystem, config: KeyValueStoreConfig): StorageProjectCache[F] =
    new StorageProjectCache(
      KeyValueStore.distributed(s"storage-${project.id}", (_, storage) => storage.value.rev)
    )

}

object StorageCache {

  def apply[F[_]: Timer: Effect](implicit as: ActorSystem, config: KeyValueStoreConfig, clock: Clock): StorageCache[F] =
    new StorageCache(new ConcurrentHashMap[UUID, StorageProjectCache[F]]())
} 
Example 70
Source File: PermissionsEvent.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.permissions

import java.time.Instant

import ch.epfl.bluebrain.nexus.iam.types.Identity.Subject
import ch.epfl.bluebrain.nexus.iam.types.{Identity, Permission}
import ch.epfl.bluebrain.nexus.rdf.implicits._
import ch.epfl.bluebrain.nexus.service.config.Contexts._
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig.HttpConfig
import io.circe.Encoder
import io.circe.generic.extras.Configuration

import scala.annotation.nowarn


  final case class PermissionsDeleted(
      rev: Long,
      instant: Instant,
      subject: Subject
  ) extends PermissionsEvent

  object JsonLd {
    import io.circe.generic.extras.semiauto._

    @nowarn("cat=unused")
    implicit def permissionsEventEncoder(implicit http: HttpConfig): Encoder[Event] = {
      implicit val config: Configuration            = Configuration.default
        .withDiscriminator("@type")
        .copy(transformMemberNames = {
          case "rev"     => "_rev"
          case "instant" => "_instant"
          case "subject" => "_subject"
          case other     => other
        })
      implicit val subjectEncoder: Encoder[Subject] = Identity.subjectIdEncoder
      deriveConfiguredEncoder[Event]
        .mapJson { json =>
          json
            .addContext(iamCtxUri)
            .addContext(resourceCtxUri)
        }
    }
  }
} 
Example 71
Source File: PermissionsState.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.permissions

import java.time.Instant

import ch.epfl.bluebrain.nexus.iam.config.IamConfig.PermissionsConfig
import ch.epfl.bluebrain.nexus.iam.permissions.PermissionsState.{Current, Initial}
import ch.epfl.bluebrain.nexus.iam.types.Identity.{Anonymous, Subject}
import ch.epfl.bluebrain.nexus.iam.types.{Permission, ResourceMetadata}
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig.HttpConfig

import scala.annotation.nowarn


  final case class Current(
      rev: Long,
      permissions: Set[Permission],
      createdAt: Instant,
      createdBy: Subject,
      updatedAt: Instant,
      updatedBy: Subject
  ) extends PermissionsState {

    override def resource(implicit http: HttpConfig, @nowarn("cat=unused") pc: PermissionsConfig): Resource =
      resourceMetadata.map(_ => permissions)

    override def resourceMetadata(implicit http: HttpConfig): ResourceMetadata =
      ResourceMetadata(id, rev, types, createdAt, createdBy, updatedAt, updatedBy)
  }
} 
Example 72
Source File: AclEvent.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.acls

import java.time.Instant

import ch.epfl.bluebrain.nexus.iam.types.Identity
import ch.epfl.bluebrain.nexus.iam.types.Identity.Subject
import ch.epfl.bluebrain.nexus.rdf.Iri.Path
import ch.epfl.bluebrain.nexus.rdf.implicits._
import ch.epfl.bluebrain.nexus.service.config.Contexts._
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig.HttpConfig
import io.circe.Encoder
import io.circe.generic.extras.Configuration

import scala.annotation.nowarn


  final case class AclDeleted(
      path: Path,
      rev: Long,
      instant: Instant,
      subject: Subject
  ) extends AclEvent

  object JsonLd {
    import io.circe.generic.extras.semiauto._

    @nowarn("cat=unused")
    implicit def aclEventEncoder(implicit httpConfig: HttpConfig): Encoder[AclEvent] = {
      implicit val config: Configuration                    = Configuration.default
        .withDiscriminator("@type")
        .copy(transformMemberNames = {
          case "rev"     => "_rev"
          case "instant" => "_instant"
          case "subject" => "_subject"
          case "path"    => "_path"
          case other     => other
        })
      implicit val arrayEncoder: Encoder[AccessControlList] = AccessControlList.aclArrayEncoder
      implicit val subjectEncoder: Encoder[Subject]         = Identity.subjectIdEncoder
      deriveConfiguredEncoder[AclEvent]
        .mapJson { json =>
          json
            .addContext(iamCtxUri)
            .addContext(resourceCtxUri)
        }
    }
  }
} 
Example 73
Source File: package.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam

import java.time.Instant

import ch.epfl.bluebrain.nexus.iam.config.IamConfig.PermissionsConfig
import ch.epfl.bluebrain.nexus.iam.types.Identity.Anonymous
import ch.epfl.bluebrain.nexus.iam.types.{Permission, ResourceF, ResourceMetadata}
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig.HttpConfig
import ch.epfl.bluebrain.nexus.service.config.Vocabulary.nxv
import ch.epfl.bluebrain.nexus.sourcing.Aggregate

package object acls {

  
  def defaultResourceOnSlash(implicit http: HttpConfig, pc: PermissionsConfig): Resource =
    ResourceF(
      http.aclsIri + "/",
      0L,
      types,
      Instant.EPOCH,
      Anonymous,
      Instant.EPOCH,
      Anonymous,
      AccessControlList(Anonymous -> pc.minimum)
    )
} 
Example 74
Source File: ResourceMetadata.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.types

import java.time.Instant

import ch.epfl.bluebrain.nexus.iam.types.Identity.Subject
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri

object ResourceMetadata {

  
  def apply(
      id: AbsoluteIri,
      rev: Long,
      types: Set[AbsoluteIri],
      createdAt: Instant,
      createdBy: Subject,
      updatedAt: Instant,
      updatedBy: Subject
  ): ResourceMetadata =
    ResourceF.unit(id, rev, types, createdAt, createdBy, updatedAt, updatedBy)
} 
Example 75
Source File: ResourceF.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.types

import java.time.Instant

import ch.epfl.bluebrain.nexus.iam.syntax._
import ch.epfl.bluebrain.nexus.iam.types.Identity.Subject
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri
import ch.epfl.bluebrain.nexus.rdf.implicits._
import ch.epfl.bluebrain.nexus.service.config.Contexts._
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig.HttpConfig
import ch.epfl.bluebrain.nexus.service.config.Vocabulary.nxv
import io.circe.syntax._
import io.circe.{Encoder, Json}


  def unit(
      id: AbsoluteIri,
      rev: Long,
      types: Set[AbsoluteIri],
      createdAt: Instant,
      createdBy: Subject,
      updatedAt: Instant,
      updatedBy: Subject
  ): ResourceF[Unit] =
    ResourceF(id, rev, types, createdAt, createdBy, updatedAt, updatedBy, ())

  implicit val permsEncoder: Encoder[Set[Permission]]                                         =
    Encoder.instance(perms => Json.obj("permissions" -> Json.fromValues(perms.toList.sortBy(_.value).map(_.asJson))))

  implicit def resourceFEncoder[A: Encoder](implicit http: HttpConfig): Encoder[ResourceF[A]] =
    Encoder.encodeJson.contramap { r => resourceMetaEncoder.apply(r.discard) deepMerge r.value.asJson }

  implicit def resourceMetaEncoder(implicit http: HttpConfig): Encoder[ResourceMetadata] =
    Encoder.encodeJson.contramap {
      case ResourceF(id, rev, types, createdAt, createdBy, updatedAt, updatedBy, _: Unit) =>
        val jsonTypes = types.toList match {
          case Nil      => Json.Null
          case t :: Nil => Json.fromString(t.lastSegment.getOrElse(t.asString))
          case _        => Json.arr(types.map(t => Json.fromString(t.lastSegment.getOrElse(t.asString))).toSeq: _*)
        }
        Json
          .obj(
            "@id"                -> id.asJson,
            "@type"              -> jsonTypes,
            nxv.rev.prefix       -> Json.fromLong(rev),
            nxv.createdBy.prefix -> createdBy.id.asJson,
            nxv.updatedBy.prefix -> updatedBy.id.asJson,
            nxv.createdAt.prefix -> Json.fromString(createdAt.toString),
            nxv.updatedAt.prefix -> Json.fromString(updatedAt.toString)
          )
          .addContext(iamCtxUri)
          .addContext(resourceCtxUri)
    }
} 
Example 76
Source File: TarFlowSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.archives

import java.nio.file.Files
import java.time.{Clock, Instant, ZoneId}

import akka.actor.ActorSystem
import akka.stream.scaladsl.FileIO
import akka.testkit.TestKit
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.storage.digestSink
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

import scala.concurrent.duration._

class TarFlowSpec
    extends TestKit(ActorSystem("TarFlowSpec"))
    with AnyWordSpecLike
    with Matchers
    with TestHelper
    with ScalaFutures {

  implicit private val ec    = system.dispatcher
  implicit private val clock = Clock.fixed(Instant.EPOCH, ZoneId.systemDefault())

  implicit override def patienceConfig: PatienceConfig = PatienceConfig(55.second, 150.milliseconds)

  "A TarFlow" should {

    "tar a bunch of sources" in {
      val digest   =
        "3fef41c5afe7a7ee11ee9d556a564fb57784cc5247b24c6ca70783f396fa158a1c7952504d3e1aa441de20cf065d740eec454c6ffb7fbc4b6351b950ee51c886"
      val elems    = 500
      val contents =
        List.tabulate(2) { i =>
          val content = (i until (i + elems)).toList.mkString(",") + "\n"
          ArchiveSource(content.length.toLong, s"some/path$i/$i.txt", produce(content))
        }
      val path     = Files.createTempFile("test", ".tar")
      TarFlow.write(contents).runWith(FileIO.toPath(path)).futureValue
      FileIO.fromPath(path).runWith(digestSink("SHA-512")).futureValue.value shouldEqual digest
      Files.delete(path)
    }
  }
} 
Example 77
Source File: ArchiveCacheSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.archives

import java.time.{Clock, Instant, ZoneId}

import cats.effect.{IO, Timer}
import ch.epfl.bluebrain.nexus.admin.client.types.Project
import ch.epfl.bluebrain.nexus.commons.test.ActorSystemFixture
import ch.epfl.bluebrain.nexus.commons.test.io.IOOptionValues
import ch.epfl.bluebrain.nexus.iam.types.Identity.Anonymous
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.archives.Archive.{File, Resource, ResourceDescription}
import ch.epfl.bluebrain.nexus.kg.resources.Id
import ch.epfl.bluebrain.nexus.kg.resources.syntax._
import ch.epfl.bluebrain.nexus.service.config.Settings
import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

import scala.concurrent.duration._

class ArchiveCacheSpec
    extends ActorSystemFixture("ArchiveCacheSpec", true)
    with TestHelper
    with AnyWordSpecLike
    with Matchers
    with IOOptionValues
    with Eventually {

  implicit override def patienceConfig: PatienceConfig = PatienceConfig(10.second, 50.milliseconds)

  private val appConfig                 = Settings(system).serviceConfig
  implicit private val config           =
    appConfig.copy(kg =
      appConfig.kg.copy(archives = appConfig.kg.archives.copy(cacheInvalidateAfter = 500.millis, maxResources = 100))
    )
  implicit private val timer: Timer[IO] = IO.timer(system.dispatcher)
  implicit private val archivesCfg      = config.kg.archives

  private val cache: ArchiveCache[IO] = ArchiveCache[IO].unsafeToFuture().futureValue
  implicit private val clock          = Clock.fixed(Instant.EPOCH, ZoneId.systemDefault())
  private val instant                 = clock.instant()

  def randomProject() = {
    val instant = Instant.EPOCH
    // format: off
    Project(genIri, genString(), genString(), None, genIri, genIri, Map.empty, genUUID, genUUID, 1L, false, instant, genIri, instant, genIri)
    // format: on
  }

  "An archive cache" should {

    "write and read an Archive" in {
      val resId     = Id(randomProject().ref, genIri)
      val resource1 = Resource(genIri, randomProject(), None, None, originalSource = true, None)
      val file1     = File(genIri, randomProject(), None, None, None)
      val archive   = Archive(resId, instant, Anonymous, Set(resource1, file1))
      val _         = cache.put(archive).value.some
      cache.get(archive.resId).value.some shouldEqual archive
    }

    "read a non existing resource" in {
      val resId = Id(randomProject().ref, genIri)
      cache.get(resId).value.ioValue shouldEqual None
    }

    "read after timeout" in {
      val resId   = Id(randomProject().ref, genIri)
      val set     = Set[ResourceDescription](Resource(genIri, randomProject(), None, None, originalSource = true, None))
      val archive = Archive(resId, instant, Anonymous, set)
      val _       = cache.put(archive).value.some
      val time    = System.currentTimeMillis()
      cache.get(resId).value.some shouldEqual archive
      eventually {
        cache.get(resId).value.ioValue shouldEqual None
      }
      val diff    = System.currentTimeMillis() - time
      diff should be > config.kg.archives.cacheInvalidateAfter.toMillis
      diff should be < config.kg.archives.cacheInvalidateAfter.toMillis + 300
    }
  }
} 
Example 78
Source File: SparqlLinkSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.indexing

import java.time.{Clock, Instant, ZoneId}

import ch.epfl.bluebrain.nexus.commons.sparql.client.SparqlResults.Binding
import ch.epfl.bluebrain.nexus.kg.config.Schemas._
import ch.epfl.bluebrain.nexus.kg.indexing.SparqlLink.{SparqlExternalLink, SparqlResourceLink}
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri
import ch.epfl.bluebrain.nexus.rdf.Vocabulary._
import ch.epfl.bluebrain.nexus.rdf.implicits._
import org.scalatest.wordspec.AnyWordSpecLike
import org.scalatest.OptionValues
import org.scalatest.matchers.should.Matchers
import ch.epfl.bluebrain.nexus.service.config.Vocabulary.nxv

class SparqlLinkSpec extends AnyWordSpecLike with Matchers with OptionValues {

  "A SparqlLink" should {

    val clock: Clock = Clock.fixed(Instant.ofEpochSecond(3600), ZoneId.systemDefault())

    val id        = url"http://example.com/id"
    val property  = url"http://example.com/friend"
    val property2 = url"http://example.com/friend2"
    val paths     = List(property, property2)

    "build SparqlExternalLink from SPARQL response" in {
      val bindings = Map(
        "s"     -> Binding("uri", id.asString),
        "paths" -> Binding("literal", s"${property.asString} ${property2.asString}")
      )
      SparqlExternalLink(bindings).value shouldEqual SparqlExternalLink(id, paths)
    }

    "build SparqlResourceLink from SPARQL response" in {
      val self     = url"http://127.0.0.1:8080/v1/resources/myorg/myproject/_/id"
      val project  = url"http://127.0.0.1:8080/v1/projects/myorg/myproject/"
      val author   = url"http://127.0.0.1:8080/v1/realms/myrealm/users/me"
      val bindings = Map(
        "s"              -> Binding("uri", id.asString),
        "paths"          -> Binding("literal", s"${property.asString} ${property2.asString}"),
        "_rev"           -> Binding("literal", "1", datatype = Some(xsd.long.asString)),
        "_self"          -> Binding("uri", self.asString),
        "_project"       -> Binding("uri", project.asString),
        "types"          -> Binding("literal", s"${nxv.Resolver.value.asString} ${nxv.Schema.value.asString}"),
        "_constrainedBy" -> Binding("uri", unconstrainedSchemaUri.asString),
        "_createdBy"     -> Binding("uri", author.asString),
        "_updatedBy"     -> Binding("uri", author.asString),
        "_createdAy"     -> Binding("uri", author.asString),
        "_createdAt"     -> Binding("literal", clock.instant().toString, datatype = Some(xsd.dateTime.asString)),
        "_updatedAt"     -> Binding("literal", clock.instant().toString, datatype = Some(xsd.dateTime.asString)),
        "_deprecated"    -> Binding("literal", "false", datatype = Some(xsd.boolean.asString))
      )
      SparqlResourceLink(bindings).value shouldEqual
        SparqlResourceLink(
          id,
          project,
          self,
          1L,
          Set[AbsoluteIri](nxv.Schema.value, nxv.Resolver.value),
          false,
          clock.instant(),
          clock.instant(),
          author,
          author,
          unconstrainedRef,
          paths
        )
    }
  }

} 
Example 79
Source File: TaggingAdapterSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.persistence

import java.time.{Clock, Instant, ZoneId}

import akka.persistence.journal.Tagged
import cats.syntax.show._
import ch.epfl.bluebrain.nexus.iam.types.Identity.Anonymous
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.config.Schemas._
import ch.epfl.bluebrain.nexus.kg.persistence.TaggingAdapterSpec.Other
import ch.epfl.bluebrain.nexus.kg.resources.Event._
import ch.epfl.bluebrain.nexus.kg.resources.{Id, OrganizationRef, Ref}
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.ProjectRef
import ch.epfl.bluebrain.nexus.service.config.Vocabulary.nxv
import io.circe.Json
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class TaggingAdapterSpec extends AnyWordSpecLike with Matchers with Inspectors with TestHelper {

  "A TaggingAdapter" should {
    val clock = Clock.fixed(Instant.ofEpochSecond(3600), ZoneId.systemDefault())

    def genJson(): Json = Json.obj("key" -> Json.fromString(genString()))

    val adapter = new TaggingAdapter()
    val orgRef  = OrganizationRef(genUUID)
    val id      = Id(ProjectRef(genUUID), nxv.projects.value)

    val mapping = Map(
      Set(
        s"type=${nxv.Schema.value.show}",
        s"type=${nxv.Resource.value.show}",
        s"project=${id.parent.id}",
        s"org=${orgRef.show}",
        "event"
      )                                                                                                   ->
        Created(
          id,
          orgRef,
          Ref(shaclSchemaUri),
          Set(nxv.Schema.value, nxv.Resource.value),
          genJson(),
          clock.instant(),
          Anonymous
        ),
      Set(
        s"type=${nxv.Resolver.value.show}",
        s"type=${nxv.Resource.value.show}",
        s"project=${id.parent.id}",
        s"org=${orgRef.show}",
        "event"
      )                                                                                                   ->
        Updated(id, orgRef, 1L, Set(nxv.Resource.value, nxv.Resolver.value), genJson(), clock.instant(), Anonymous),
      Set(s"type=${nxv.Resource.value.show}", s"project=${id.parent.id}", s"org=${orgRef.show}", "event") ->
        Deprecated(id, orgRef, 1L, Set(nxv.Resource.value), clock.instant(), Anonymous),
      Set(s"project=${id.parent.id}", s"org=${orgRef.show}", "event")                                     ->
        TagAdded(id, orgRef, 2L, 1L, "tag", clock.instant(), Anonymous)
    )

    "set the appropriate tags" in {
      forAll(mapping.toList) {
        case (tags, ev) => adapter.toJournal(ev) shouldEqual Tagged(ev, tags)
      }
    }

    "return an empty manifest" in {
      adapter.manifest(Other(genString())) shouldEqual ""
    }
  }
}

object TaggingAdapterSpec {
  final private[persistence] case class Other(value: String)

} 
Example 80
Source File: QueryResultEncoderSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.search

import java.time.Instant
import java.util.regex.Pattern.quote

import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.model.Uri.Query
import ch.epfl.bluebrain.nexus.commons.circe.syntax._
import ch.epfl.bluebrain.nexus.commons.search.QueryResult.{ScoredQueryResult, UnscoredQueryResult}
import ch.epfl.bluebrain.nexus.commons.search.QueryResults
import ch.epfl.bluebrain.nexus.commons.search.QueryResults.{ScoredQueryResults, UnscoredQueryResults}
import ch.epfl.bluebrain.nexus.commons.test.{Randomness, Resources}
import ch.epfl.bluebrain.nexus.kg.search.QueryResultEncoder._
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri
import ch.epfl.bluebrain.nexus.rdf.implicits._
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig.HttpConfig
import io.circe.Json
import io.circe.syntax._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class QueryResultEncoderSpec extends AnyWordSpecLike with Matchers with Resources with Randomness {

  implicit val orderedKeys = ServiceConfig.orderedKeys
  val org                  = genString()
  val proj                 = genString()
  val schema               = genString()
  val now                  = Instant.now()
  implicit val http        = HttpConfig("", 0, "v1", "http://nexus.com")
  implicit val uri         = Uri(s"http://nexus.com/resources/$org/$proj/$schema?type=someType&from=10&size=10")
  val before               = now.minusSeconds(60)

  "QueryResultsEncoder" should {
    def json(id: AbsoluteIri, createdAt: Instant): Json =
      jsonContentOf(
        "/resources/es-metadata.json",
        Map(
          quote("{id}")      -> id.asString,
          quote("{org}")     -> org,
          quote("{proj}")    -> proj,
          quote("{schema}")  -> schema,
          quote("{instant}") -> createdAt.toString
        )
      ) deepMerge Json.obj("_original_source" -> Json.fromString(Json.obj("k" -> Json.fromInt(1)).noSpaces))

    "encode ScoredQueryResults" in {
      val results: QueryResults[Json] = ScoredQueryResults[Json](
        3,
        0.3f,
        List(
          ScoredQueryResult(0.3f, json(url"http://nexus.com/result1", before)),
          ScoredQueryResult(0.2f, json(url"http://nexus.com/result2", before)),
          ScoredQueryResult(0.1f, json(url"http://nexus.com/result3", now))
        ),
        sort(now)
      )

      results.asJson.sortKeys shouldEqual jsonContentOf(
        "/search/scored-query-results.json",
        Map(
          quote("{org}")                -> org,
          quote("{proj}")               -> proj,
          quote("{schema}")             -> schema,
          quote("{before}")             -> before.toString,
          quote("{lastElementCreated}") -> now.toString,
          quote("{after}")              -> after(now)
        )
      )
    }
    "encode UnscoredQueryResults" in {
      val results: QueryResults[Json] = UnscoredQueryResults[Json](
        3,
        List(
          UnscoredQueryResult(json(url"http://nexus.com/result1", before)),
          UnscoredQueryResult(json(url"http://nexus.com/result2", before)),
          UnscoredQueryResult(json(url"http://nexus.com/result3", now))
        ),
        sort(now)
      )

      results.asJson.sortKeys shouldEqual jsonContentOf(
        "/search/unscored-query-results.json",
        Map(
          quote("{org}")                -> org,
          quote("{proj}")               -> proj,
          quote("{schema}")             -> schema,
          quote("{before}")             -> before.toString,
          quote("{lastElementCreated}") -> now.toString,
          quote("{after}")              -> after(now)
        )
      )

    }
  }

  private def sort(instant: Instant): Option[String] = Some(Json.arr(Json.fromString(instant.toString)).noSpaces)
  private def after(instant: Instant): String        =
    Query("after" -> List(Json.fromString(instant.toString)).asJson.noSpaces).toString()

} 
Example 81
Source File: ResourceDecoderSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.serializers

import java.time.Instant

import akka.http.scaladsl.testkit.ScalatestRouteTest
import ch.epfl.bluebrain.nexus.commons.test.{EitherValues, Resources}
import ch.epfl.bluebrain.nexus.iam.types.Identity.User
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.config.Schemas
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.ProjectRef
import ch.epfl.bluebrain.nexus.kg.resources.{Id, ResourceF, ResourceGraph}
import ch.epfl.bluebrain.nexus.rdf.implicits._
import io.circe.Decoder
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
import org.scalatest.{Inspectors, OptionValues}

class ResourceDecoderSpec
    extends AnyWordSpecLike
    with Matchers
    with Inspectors
    with EitherValues
    with ScalatestRouteTest
    with OptionValues
    with Resources
    with TestHelper {

  private val json                                     = jsonContentOf("/serialization/resource.json")
  private val projectRef                               = ProjectRef(genUUID)
  private val id                                       = url"http://example.com/prefix/myId"
  private val graph                                    = json.toGraph(id).rightValue
  implicit private val decoder: Decoder[ResourceGraph] = ResourceF.resourceGraphDecoder(projectRef)

  private val model = ResourceF(
    Id(projectRef, url"http://example.com/prefix/myId"),
    1L,
    Set(url"https://example.com/vocab/A", url"https://example.com/vocab/B"),
    deprecated = false,
    Map.empty,
    None,
    Instant.parse("2020-01-17T12:45:01.479676Z"),
    Instant.parse("2020-01-17T13:45:01.479676Z"),
    User("john", "bbp"),
    User("brenda", "bbp"),
    Schemas.unconstrainedRef,
    graph
  )

  "A resource" should {
    "be decoded" in {
      json.as[ResourceGraph].rightValue shouldEqual model
    }
  }

} 
Example 82
Source File: ResourceFSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.types

import java.time.{Clock, Instant, ZoneId}

import ch.epfl.bluebrain.nexus.util.{EitherValues, Resources}
import ch.epfl.bluebrain.nexus.iam.testsyntax._
import ch.epfl.bluebrain.nexus.iam.types.Identity.User
import ch.epfl.bluebrain.nexus.rdf.implicits._
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig.HttpConfig
import ch.epfl.bluebrain.nexus.service.config.Vocabulary.nxv
import io.circe.Printer
import io.circe.syntax._
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

//noinspection TypeAnnotation
class ResourceFSpec extends AnyWordSpecLike with Matchers with Inspectors with EitherValues with Resources {

  "A ResourceMetadata" should {
    val user          = User("mysubject", "myrealm")
    val user2         = User("mysubject2", "myrealm")
    implicit val http = HttpConfig("some", 8080, "v1", "http://nexus.example.com")
    val clock: Clock  = Clock.fixed(Instant.ofEpochSecond(3600), ZoneId.systemDefault())
    val instant       = clock.instant()
    val id            = url"http://example.com/id"
    val printer       = Printer.spaces2.copy(dropNullValues = true)

    "be converted to Json correctly" when {
      "using multiple types" in {
        val json  = jsonContentOf("/resources/write-response.json")
        val model =
          ResourceMetadata(id, 1L, Set(nxv.AccessControlList.value, nxv.Realm.value), instant, user, instant, user2)
        model.asJson.sort.printWith(printer) shouldEqual json.printWith(printer)
      }
      "using a single type" in {
        val json  = jsonContentOf("/resources/write-response-singletype.json")
        val model = ResourceMetadata(id, 1L, Set(nxv.AccessControlList.value), instant, user, instant, user2)
        model.asJson.sort.printWith(printer) shouldEqual json.printWith(printer)
      }
      "using no types" in {
        val json  = jsonContentOf("/resources/write-response-notypes.json")
        val model = ResourceMetadata(id, 1L, Set(), instant, user, instant, user2)
        model.asJson.sort.printWith(printer) shouldEqual json.printWith(printer)
      }
    }
  }
} 
Example 83
Source File: TaggingAdapterSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.io

import java.time.Instant

import akka.persistence.journal.Tagged
import ch.epfl.bluebrain.nexus.iam.acls.AclEvent.AclDeleted
import ch.epfl.bluebrain.nexus.iam.permissions.PermissionsEvent.PermissionsDeleted
import ch.epfl.bluebrain.nexus.iam.realms.RealmEvent.RealmDeprecated
import ch.epfl.bluebrain.nexus.iam.types.Identity.Anonymous
import ch.epfl.bluebrain.nexus.iam.types.Label
import ch.epfl.bluebrain.nexus.rdf.Iri.Path
import ch.epfl.bluebrain.nexus.util.EitherValues
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class TaggingAdapterSpec extends AnyWordSpecLike with Matchers with Inspectors with EitherValues {

  private val pd = PermissionsDeleted(2L, Instant.EPOCH, Anonymous)
  private val ad = AclDeleted(Path("/a/b/c").rightValue, 2L, Instant.EPOCH, Anonymous)
  private val rd = RealmDeprecated(Label.unsafe("blah"), 2L, Instant.EPOCH, Anonymous)

  private val data = Map[AnyRef, (String, AnyRef)](
    pd  -> ("permissions-event" -> Tagged(pd, Set("permissions", "event"))),
    ad  -> ("acl-event"         -> Tagged(ad, Set("acl", "event"))),
    rd  -> ("realm-event"       -> Tagged(rd, Set("realm", "event"))),
    "a" -> (""                  -> "a")
  )

  "A TaggingAdapter" should {
    val adapter = new TaggingAdapter
    "return the correct manifests" in {
      forAll(data.toList) {
        case (event, (manifest, _)) => adapter.manifest(event) shouldEqual manifest
      }
    }
    "return the correct transformed event" in {
      forAll(data.toList) {
        case (event, (_, transformed)) => adapter.toJournal(event) shouldEqual transformed
      }
    }
  }

} 
Example 84
Source File: InfluxPoint.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.cli.clients
import java.nio.charset.StandardCharsets
import java.time.Instant
import java.util.concurrent.TimeUnit.SECONDS

import ch.epfl.bluebrain.nexus.cli.config.influx.TypeConfig
import fs2.Chunk
import org.http4s.headers.`Content-Type`
import org.http4s.{EntityEncoder, MediaType}

import scala.util.Try


  def fromSparqlResults(
      results: SparqlResults,
      tc: TypeConfig
  ): List[InfluxPoint] =
    results.results.bindings.flatMap { bindings =>
      val values = tc.values.flatMap(value => bindings.get(value).map(value -> _.value)).toMap
      Option.when(values.nonEmpty) {
        val tags      = bindings.view
          .filterKeys(key => !tc.values(key) && key != tc.timestamp)
          .mapValues(_.value)
        val timestamp = bindings.get(tc.timestamp).flatMap(binding => Try(Instant.parse(binding.value)).toOption)
        InfluxPoint(tc.measurement, tags.toMap, values, timestamp)
      }
    }
} 
Example 85
Source File: Event.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.cli.sse

import java.time.Instant
import java.util.UUID

import ch.epfl.bluebrain.nexus.cli.utils.Codecs
import io.circe.generic.semiauto.deriveDecoder
import io.circe.{Decoder, Json}
import org.http4s.Uri


final case class Event(
    eventType: EventType,
    resourceId: Uri,
    rev: Long,
    organization: OrgUuid,
    project: ProjectUuid,
    resourceTypes: Set[Uri],
    instant: Instant,
    raw: Json
)

object Event extends Codecs {

  final private[Event] case class APIEvent(
      `_organizationUuid`: UUID,
      `_projectUuid`: UUID,
      `@type`: EventType,
      `_types`: Option[Set[Uri]],
      `_resourceId`: Uri,
      `_rev`: Option[Long],
      `_instant`: Instant
  ) {
    def asEvent(raw: Json): Event =
      Event(
        `@type`,
        `_resourceId`,
        `_rev`.getOrElse(1L),
        OrgUuid(`_organizationUuid`),
        ProjectUuid(`_projectUuid`),
        `_types`.getOrElse(Set.empty[Uri]),
        `_instant`,
        raw
      )
  }

  private[Event] object APIEvent {
    implicit val apiEventDecoder: Decoder[APIEvent] = deriveDecoder[APIEvent]
  }

  implicit final val eventDecoder: Decoder[Event] =
    Decoder.instance { cursor => cursor.as[APIEvent].map(_.asEvent(cursor.value)) }

} 
Example 86
Source File: InfluxPointSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.cli.clients

import java.time.Instant
import java.util.regex.Pattern.quote

import cats.effect.IO
import cats.implicits._
import ch.epfl.bluebrain.nexus.cli.config.influx.TypeConfig
import ch.epfl.bluebrain.nexus.cli.utils.{Resources, TimeTransformation}
import fs2._
import fs2.text._
import org.http4s.EntityEncoder
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class InfluxPointSpec extends AnyWordSpecLike with Matchers with Resources with Inspectors with TimeTransformation {

  private def writeToString[A](a: A)(implicit W: EntityEncoder[IO, A]): String =
    Stream
      .emit(W.toEntity(a))
      .covary[IO]
      .flatMap(_.body)
      .through(utf8Decode)
      .foldMonoid
      .compile
      .last
      .map(_.getOrElse(""))
      .unsafeRunSync

  "An InfluxPoint" should {

    val created = Instant.now()
    val updated = created.plusSeconds(5)

    "be created from SparqlResults" in {

      val sparqlResults = jsonContentOf(
        "/templates/sparql-results-influx.json",
        Map(
          quote("{created}") -> created.toString,
          quote("{updated}") -> updated.toString,
          quote("{bytes}")   -> 1234.toString,
          quote("{project}") -> "myorg/myproject"
        )
      ).as[SparqlResults].getOrElse(throw new IllegalArgumentException)

      val typeConfig = TypeConfig("https://neuroshapes.org/Subject", "", "datastats", Set("bytes"), "updated")

      val expected = InfluxPoint(
        "datastats",
        Map("created" -> created.toString, "project" -> "myorg/myproject", "deprecated" -> "false"),
        Map("bytes"   -> "1234"),
        Some(updated)
      )

      InfluxPoint.fromSparqlResults(sparqlResults, typeConfig) shouldEqual
        List(expected)

    }

    "converted to string" in {
      val point      = InfluxPoint(
        "m1",
        Map("created" -> created.toString, "project" -> "org/proj", "deprecated" -> "false"),
        Map("bytes"   -> "1234"),
        Some(updated)
      )
      val pointNoTag = InfluxPoint(
        "m2",
        Map.empty,
        Map("bytes" -> "2345"),
        Some(updated)
      )

      val list = List(
        point      -> s"m1,created=${created.toString},project=org/proj,deprecated=false bytes=1234 ${toNano(updated)}",
        pointNoTag -> s"m2 bytes=2345 ${toNano(updated)}"
      )

      forAll(list) {
        case (point, str) => writeToString(point) shouldEqual str
      }
    }
  }

} 
Example 87
Source File: TezosTypesTest.scala    From Conseil   with Apache License 2.0 5 votes vote down vote up
package tech.cryptonomic.conseil.common.tezos

import java.time.Instant

import org.scalatest.{EitherValues, Matchers, OptionValues, WordSpec}
import tech.cryptonomic.conseil.common.tezos.TezosTypes._

class TezosTypesTest extends WordSpec with Matchers with OptionValues with EitherValues {

  val sut = TezosTypes

  "The Base58Check verifier" should {
      "accept an empty string" in {
        sut.isBase58Check("") shouldBe true
      }

      "accept a correctly encoded string" in {
        sut.isBase58Check(
          "signiRfcqmbGc6UtW1WzuJNGzRRsWDLpafxZZPwwTMntFwup8rTxXEgcLD5UBWkYmMqZECVEr33Xw5sh9NVi45c4FVAXvQSf"
        ) shouldBe true
      }

      "reject a string with forbidden chars" in {
        sut.isBase58Check(
          "signiRfcqmbGc6UtW1WzulJNGzRRsWDLpafxZZPwwTMntFwup8rTxXEgcLD5UBWkYmMqZECVEr33Xw5sh9NVi45c4FVAXvQSf"
        ) shouldBe false
        sut.isBase58Check(
          "$signiRfcqmbGc6UtW1WzulJNGzRRsWDpafxZZPwwTMntFwup8rTxXEgcLD5UBWkYmMqZECVEr33Xw5sh9NVi45c4FVAXvQSf"
        ) shouldBe false
        sut.isBase58Check(
          "signiRfcqmbGc6UtW1WzulJNGzRRsWDpafxZZPwwTMntFwup8rTxXEgcLD5UBWkYmMqZECVEr33Xw5sh9NVi45c4FVAXvQSf*"
        ) shouldBe false
      }

      "reject a string with spaces" in {
        sut.isBase58Check(
          "signiRfcqmbGc6UtW1WzuJNGzRRs DLpafxZZPwwTMntFwup8rTxXEgcLD5UBWkYmMqZECVEr33Xw5sh9NVi45c4FVAXvQSf"
        ) shouldBe false
        sut.isBase58Check(
          " signiRfcqmbGc6UtW1WzuJNGzRRsDLpafxZZPwwTMntFwup8rTxXEgcLD5UBWkYmMqZECVEr33Xw5sh9NVi45c4FVAXvQSf"
        ) shouldBe false
        sut.isBase58Check(
          "signiRfcqmbGc6UtW1WzuJNGzRRsDLpafxZZPwwTMntFwup8rTxXEgcLD5UBWkYmMqZECVEr33Xw5sh9NVi45c4FVAXvQSf "
        ) shouldBe false
      }

    }

  "The Syntax import" should {
      "allow building Block-tagged generic data" in {
        import TezosTypes.Syntax._
        val someTime = Some(Instant.ofEpochMilli(0))
        val content = "A content string"
        val (hash, level) = (BlockHash("hash"), 1)

        content.taggedWithBlock(hash, level, someTime, None, None) shouldEqual BlockTagged(
          hash,
          level,
          someTime,
          None,
          None,
          content
        )
      }
    }

  "The BlockTagged wrapper" should {
      "convert to a tuple" in {
        val someTime = Some(Instant.ofEpochMilli(0))
        val content = "A content string"
        val (hash, level) = (BlockHash("hash"), 1)

        BlockTagged(hash, level, someTime, None, None, content).asTuple shouldEqual (hash, level, someTime, None, None, content)
      }
    }

} 
Example 88
Source File: Record.scala    From kinesis-stream   with MIT License 5 votes vote down vote up
package px.kinesis.stream.consumer

import java.time.Instant

import akka.Done
import akka.util.ByteString
import px.kinesis.stream.consumer.checkpoint.CheckpointTracker
import software.amazon.kinesis.retrieval.KinesisClientRecord
import software.amazon.kinesis.retrieval.kpl.ExtendedSequenceNumber

import scala.concurrent.Future

case class Record(
  key: String,
  data: ByteString,
  sequenceNumber: String,
  subSequenceNumber: Long,
  shardId: String,
  approximateArrivalTimestamp: Instant,
  markProcessed: () => Future[Done]
) {
  def extendedSequenceNumber = new ExtendedSequenceNumber(sequenceNumber, subSequenceNumber)
}

object Record {
  def from(kinesisRecord: KinesisClientRecord, shardId: String, tracker: CheckpointTracker): Record = {
    val extendedSequenceNumber =
      new ExtendedSequenceNumber(
        kinesisRecord.sequenceNumber(),
        kinesisRecord.subSequenceNumber()
      )

    val markProcessed: () => Future[Done] =
      () => tracker.process(shardId, extendedSequenceNumber)

    Record(
      kinesisRecord.partitionKey(),
      ByteString(kinesisRecord.data()),
      kinesisRecord.sequenceNumber(),
      kinesisRecord.subSequenceNumber(),
      shardId,
      kinesisRecord.approximateArrivalTimestamp(),
      markProcessed
    )
  }

} 
Example 89
Source File: FileDownload.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.publish.download

import java.nio.file.{Files, Path}
import java.time.Instant

import coursier.core.Authentication
import coursier.publish.download.logger.DownloadLogger
import coursier.util.Task

import scala.util.control.NonFatal


final case class FileDownload(base: Path) extends Download {
  private val base0 = base.normalize()
  def downloadIfExists(
    url: String,
    authentication: Option[Authentication],
    logger: DownloadLogger
  ): Task[Option[(Option[Instant], Array[Byte])]] = {

    val p = base0.resolve(url).normalize()
    if (p.startsWith(base0))
      Task.delay {
        logger.downloadingIfExists(url)
        val res = try {
          if (Files.isRegularFile(p)) {
            val lastModified = Files.getLastModifiedTime(p).toInstant
            Right(Some((Some(lastModified), Files.readAllBytes(p))))
          } else
            Right(None)
        } catch {
          case NonFatal(e) =>
            Left(e)
        }
        logger.downloadedIfExists(
          url,
          res.toOption.flatMap(_.map(_._2.length)),
          res.left.toOption.map(e => new Download.Error.FileException(e))
        )

        Task.fromEither(res)
      }.flatMap(identity)
    else
      Task.fail(new Exception(s"Invalid path: $url (base: $base0, p: $p)"))
  }
} 
Example 90
Source File: OkhttpDownload.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.publish.download

import java.time.Instant
import java.util.concurrent.ExecutorService

import coursier.cache.CacheUrl
import coursier.core.Authentication
import coursier.publish.download.logger.DownloadLogger
import coursier.util.Task
import okhttp3.internal.http.HttpDate
import okhttp3.{OkHttpClient, Request, Response}

import scala.collection.JavaConverters._
import scala.util.{Failure, Success, Try}

final case class OkhttpDownload(client: OkHttpClient, pool: ExecutorService) extends Download {

  import OkhttpDownload.TryOps

  def downloadIfExists(url: String, authentication: Option[Authentication], logger: DownloadLogger): Task[Option[(Option[Instant], Array[Byte])]] = {

    // FIXME Some duplication with upload below…

    val request = {
      val b = new Request.Builder()
        .url(url)
        .get()

      // Handling this ourselves rather than via client.setAuthenticator / com.squareup.okhttp.Authenticator
      for (auth <- authentication; (k, v) <- auth.allHttpHeaders)
        b.addHeader(k, v)

      b.build()
    }

    Task.schedule(pool) {
      logger.downloadingIfExists(url)

      val res = Try {
        var response: Response = null

        try {
          response = client.newCall(request).execute()

          if (response.isSuccessful) {
            val lastModifiedOpt = Option(response.header("Last-Modified")).map { s =>
              HttpDate.parse(s).toInstant
            }
            Right(Some((lastModifiedOpt, response.body().bytes())))
          } else {
            val code = response.code()
            if (code / 100 == 4)
              Right(None)
            else {
              val content = Try(response.body().string()).getOrElse("")
              Left(new Download.Error.HttpError(url, code, response.headers().toMultimap.asScala.mapValues(_.asScala.toList).iterator.toMap, content))
            }
          }
        } finally {
          if (response != null)
            response.body().close()
        }
      }.toEither.flatMap(identity)

      logger.downloadedIfExists(
        url,
        res.toOption.flatMap(_.map(_._2.length)),
        res.left.toOption.map(e => new Download.Error.DownloadError(url, e))
      )

      Task.fromEither(res)
    }.flatMap(identity)
  }

}

object OkhttpDownload {

  // for 2.11
  private[publish] implicit class TryOps[T](private val t: Try[T]) {
    def toEither: Either[Throwable, T] =
      t match {
        case Success(t) => Right(t)
        case Failure(e) => Left(e)
      }
  }

  def create(pool: ExecutorService): Download = {
    // Seems we can't even create / shutdown the client thread pool (via its Dispatcher)…
    OkhttpDownload(new OkHttpClient, pool)
  }
} 
Example 91
Source File: Content.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.publish

import java.nio.file.{Files, Path}
import java.time.Instant

import coursier.util.Task


sealed abstract class Content extends Product with Serializable {
  def lastModifiedTask: Task[Instant]
  // TODO Support chunked reading
  def contentTask: Task[Array[Byte]]

  def pathOpt: Option[Path] = None
}

object Content {

  final case class File(path: Path) extends Content {
    def lastModifiedTask: Task[Instant] =
      Task.delay {
        Files.getLastModifiedTime(path)
          .toInstant
      }
    def contentTask: Task[Array[Byte]] =
      Task.delay {
        Files.readAllBytes(path)
      }
    override def pathOpt: Option[Path] =
      Some(path)
  }

  final case class InMemory(lastModified: Instant, content: Array[Byte]) extends Content {
    def lastModifiedTask: Task[Instant] =
      Task.point(lastModified)
    def contentTask: Task[Array[Byte]] =
      Task.point(content)
  }

} 
Example 92
Source File: NopSigner.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.publish.signing

import java.time.Instant

import coursier.publish.Content
import coursier.publish.fileset.{FileSet, Path}
import coursier.publish.signing.logger.SignerLogger
import coursier.util.Task

object NopSigner extends Signer {
  def sign(content: Content): Task[Either[String, String]] =
    Task.point(Right(""))

  override def signatures(
    fileSet: FileSet,
    now: Instant,
    dontSignExtensions: Set[String],
    dontSignFiles: Set[String],
    logger: => SignerLogger
  ): Task[Either[(Path, Content, String), FileSet]] =
    Task.point(Right(FileSet.empty))
} 
Example 93
Source File: Update.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.cli.install

import java.time.Instant

import caseapp.core.app.CaseApp
import caseapp.core.RemainingArgs
import coursier.install.{Channels, InstallDir, Updatable}
import coursier.util.{Sync, Task}

object Update extends CaseApp[UpdateOptions] {
  def run(options: UpdateOptions, args: RemainingArgs): Unit = {

    val params = UpdateParams(options).toEither match {
      case Left(errors) =>
        for (err <- errors.toList)
          System.err.println(err)
        sys.exit(1)
      case Right(p) => p
    }

    val names =
      if (args.all.isEmpty)
        Updatable.list(params.shared.dir)
      else
        args.all

    val now = Instant.now()

    val pool = Sync.fixedThreadPool(params.cache.parallel)
    val cache = params.cache.cache(pool, params.output.logger())

    val graalvmHome = { version: String =>
      params.sharedJava.javaHome(cache, params.output.verbosity)
        .get(s"graalvm:$version")
    }

    val installDir = params.shared.installDir(cache)
        .withVerbosity(params.output.verbosity)
        .withNativeImageJavaHome(Some(graalvmHome))

    val tasks = names.map { name =>
      installDir.maybeUpdate(
        name,
        source => Channels(Seq(source.channel), params.selectedRepositories(source.repositories), cache)
          .find(source.id)
          .map(_.map { case (_, path, descBytes) => (path, descBytes) }),
        now,
        params.force
      ).map {
        case None =>
          if (params.output.verbosity >= 0)
            System.err.println(s"Could not update $name (concurrent operation ongoing)")
        case Some(true) =>
          if (params.output.verbosity >= 0)
            System.err.println(s"Updated $name")
        case Some(false) =>
      }
    }

    val task = tasks.foldLeft(Task.point(())) { (acc, t) =>
      for (_ <- acc; _ <- t) yield ()
    }

    try task.unsafeRun()(cache.ec)
    catch {
      case e: InstallDir.InstallDirException =>
        System.err.println(e.getMessage)
        if (params.output.verbosity >= 2)
          throw e
        else
          sys.exit(1)
    }
  }
} 
Example 94
Source File: DateFormatter.scala    From delta   with Apache License 2.0 5 votes vote down vote up
sealed trait DateFormatter extends Serializable {
  def parse(s: String): Int // returns days since epoch
  def format(days: Int): String
}

class Iso8601DateFormatter(
    pattern: String,
    locale: Locale) extends DateFormatter with DateTimeFormatterHelper {

  @transient
  private lazy val formatter = getOrCreateFormatter(pattern, locale)
  private val UTC = ZoneId.of("UTC")

  private def toInstant(s: String): Instant = {
    val temporalAccessor = formatter.parse(s)
    toInstantWithZoneId(temporalAccessor, UTC)
  }

  override def parse(s: String): Int = instantToDays(toInstant(s))

  override def format(days: Int): String = {
    val instant = Instant.ofEpochSecond(days * DateTimeUtils.SECONDS_PER_DAY)
    formatter.withZone(UTC).format(instant)
  }
}

object DateFormatter {
  val defaultPattern: String = "yyyy-MM-dd"
  val defaultLocale: Locale = Locale.US

  def apply(format: String, locale: Locale): DateFormatter = {
    new Iso8601DateFormatter(format, locale)
  }

  def apply(format: String): DateFormatter = apply(format, defaultLocale)

  def apply(): DateFormatter = apply(defaultPattern)
} 
Example 95
Source File: SharedAuth.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.shared.authorization

import java.time.Instant

import com.typesafe.config.Config
import cool.graph.DataItem
import cool.graph.shared.models._
import pdi.jwt
import pdi.jwt.{Jwt, JwtAlgorithm, JwtClaim, JwtOptions}
import spray.json._

import scala.concurrent.Future
import scala.util.{Failure, Success}

case class JwtUserData[T](projectId: String, userId: String, authData: Option[T], modelName: String = "User")
case class JwtCustomerData(clientId: String)
case class JwtPermanentAuthTokenData(clientId: String, projectId: String, permanentAuthTokenId: String)

object JwtClaimJsonProtocol extends DefaultJsonProtocol {
  implicit val formatClientModel              = jsonFormat(JwtCustomerData, "clientId")
  implicit def formatUserModel[T: JsonFormat] = jsonFormat(JwtUserData.apply[T], "projectId", "userId", "authData", "modelName")
  implicit val formatPermanentAuthTokenModel  = jsonFormat(JwtPermanentAuthTokenData, "clientId", "projectId", "permanentAuthTokenId")
}

trait SharedAuth {
  import JwtClaimJsonProtocol._

  val config: Config
  lazy val jwtSecret: String = config.getString("jwtSecret")
  val expiringSeconds: Int   = 60 * 60 * 24 * 30

  case class Expiration(exp: Long)
  implicit val formatExpiration = jsonFormat(Expiration, "exp")

  def loginUser[T: JsonFormat](project: Project, user: DataItem, authData: Option[T]): Future[String] = {
    val claimPayload = JwtUserData(projectId = project.id, userId = user.id, authData = authData).toJson.compactPrint
    val sessionToken = Jwt.encode(JwtClaim(claimPayload).issuedNow.expiresIn(expiringSeconds), jwtSecret, JwtAlgorithm.HS256)

    Future.successful(sessionToken)
  }

  
  protected def isExpired(sessionToken: String): Boolean = {
    Jwt
      .decodeRaw(sessionToken, JwtOptions(signature = false, expiration = false))
      .map(_.parseJson.convertTo[Expiration])
      .map(_.exp) match {
      case Success(expiration) =>
        (expiration * 1000) < Instant.now().toEpochMilli

      case Failure(e) => {
        // todo: instead of returning false when there is no exp, make sure all tokens have exp
        println("token-had-no-exp-claim")
        false
      }
    }
  }

  protected def parseTokenAsClientData(sessionToken: String): Option[JwtCustomerData] = {
    Jwt
      .decodeRaw(sessionToken, config.getString("jwtSecret"), Seq(JwtAlgorithm.HS256))
      .map(_.parseJson.convertTo[JwtCustomerData])
      .map(Some(_))
      .getOrElse(None)
  }

  def parseTokenAsTemporaryRootToken(token: String): Option[JwtPermanentAuthTokenData] = {
    Jwt
      .decodeRaw(token, config.getString("jwtSecret"), Seq(JwtAlgorithm.HS256))
      .map(_.parseJson.convertTo[JwtPermanentAuthTokenData])
      .map(Some(_))
      .getOrElse(None)
  }

  def isValidTemporaryRootToken(project: Project, token: String): Boolean = {
    parseTokenAsTemporaryRootToken(token) match {
      case Some(rootToken) => !isExpired(token) && rootToken.projectId == project.id
      case None            => false
    }
  }

  def generateRootToken(clientId: String, projectId: String, id: String, expiresInSeconds: Option[Long]): String = {
    val claim = JwtClaim(JwtPermanentAuthTokenData(clientId = clientId, projectId = projectId, permanentAuthTokenId = id).toJson.compactPrint).issuedNow
    val claimToEncode = expiresInSeconds match {
      case Some(expiration) => claim.expiresIn(expiration)
      case None             => claim
    }

    Jwt.encode(
      claimToEncode,
      config.getString("jwtSecret"),
      jwt.JwtAlgorithm.HS256
    )
  }
} 
Example 96
Source File: SlickDaoSupport.scala    From scala-ddd-base   with MIT License 5 votes vote down vote up
package com.github.j5ik2o.dddbase.slick

import java.time.{ Instant, ZoneId, ZonedDateTime }

trait SlickDaoSupport {

  val profile: slick.jdbc.JdbcProfile

  import profile.api._

  implicit val zonedDateTimeColumnType =
    MappedColumnType.base[ZonedDateTime, java.sql.Timestamp](
      { zdt =>
        new java.sql.Timestamp(zdt.toInstant.toEpochMilli)
      }, { ts =>
        val instant = Instant.ofEpochMilli(ts.getTime)
        ZonedDateTime.ofInstant(instant, ZoneId.systemDefault())
      }
    )

  trait Record

  trait SoftDeletableRecord extends Record {
    val status: String
  }

  abstract class TableBase[T](_tableTag: Tag, _tableName: String, _schemaName: Option[String] = None)
      extends Table[T](_tableTag, _schemaName, _tableName)

  trait SoftDeletableTableSupport[T] { this: TableBase[T] =>
    def status: Rep[String]
  }
} 
Example 97
Source File: Schema.scala    From osmesa   with Apache License 2.0 5 votes vote down vote up
package osmesa.analytics.updater

import java.sql.Timestamp
import java.time.Instant

import geotrellis.vectortile.Layer
import org.apache.log4j.Logger
import osmesa.analytics.updater.Implicits._

trait Schema {
  val layer: Layer
  val features: Map[String, (Option[AugmentedDiffFeature], AugmentedDiffFeature)]

  val newFeatures: Seq[VTFeature]
  lazy val replacementFeatures: Seq[VTFeature] = Seq.empty[VTFeature]
  lazy val retainedFeatures: Seq[VTFeature] = Seq.empty[VTFeature]

  protected lazy val logger: Logger = Logger.getLogger(getClass)

  protected lazy val touchedFeatures: Map[String, Seq[VTFeature]] =
    Map.empty[String, Seq[VTFeature]]

  protected lazy val versionInfo: Map[String, (Int, Int, Timestamp)] =
    touchedFeatures
      .mapValues(_.last)
      .mapValues(
        f =>
          (
            f.data("__version").toInt,
            f.data("__minorVersion").toInt,
            Timestamp.from(Instant.ofEpochMilli(f.data("__updated")))
        ))

  protected lazy val minorVersions: Map[String, Int] =
    features
      .mapValues {
        case (_, curr) => curr.data
      }
      .map {
        case (id, f) =>
          versionInfo.get(id) match {
            case Some((prevVersion, _, _)) if prevVersion < f.version => (id, 0)
            case Some((prevVersion, prevMinorVersion, _)) if prevVersion == f.version =>
              (id, prevMinorVersion + 1)
            case _ => (id, 0)
          }
      }
}

trait SchemaBuilder {
  val layerName: String

  def apply(layer: Layer,
            features: Map[String, (Option[AugmentedDiffFeature], AugmentedDiffFeature)]): Schema
} 
Example 98
Source File: KnowledgeGraphSpec.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.graphx

import java.time.Instant

import org.scalatest._

class KnowledgeGraphSpec extends FlatSpec with Matchers {
  "Quantity" should "handle date 2015-11-10T10:15:30.00Z" in {
    val nov10 = Quantity[Instant]("November 10th",Instant.parse("2015-11-10T10:15:30.00Z"))
    nov10.lang shouldBe ("en")
  }
  "NamedEntity" should "person" in {
    val kerry = NamedEntity("Senator John Kerry","John Kerry, U.S. Senator and Secretary of State","person")
    kerry.lang shouldBe ("en")
  }
  "Concept" should "handle invitation to meet" in {
    val meet =  Concept("invitation to meet","meet")
    meet.lang shouldBe ("en")
  }
} 
Example 99
Source File: AuditSerialiser.scala    From play-auditing   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.audit.serialiser

import play.api.libs.json.{JsString, JsValue, Json, Writes}
import uk.gov.hmrc.play.audit.model.{DataCall, DataEvent, ExtendedDataEvent, MergedDataEvent}
import java.time.{Instant, ZoneId}
import java.time.format.DateTimeFormatter

object DateWriter {
  // Datastream does not support default X offset (i.e. `Z` must be `+0000`)
  implicit def instantWrites = new Writes[Instant] {
    private val dateFormat = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ")

    def writes(instant: Instant): JsValue =
      JsString(dateFormat.withZone(ZoneId.of("UTC")).format(instant))
  }
}

trait AuditSerialiserLike {
  def serialise(event: DataEvent): JsValue
  def serialise(event: ExtendedDataEvent): JsValue
  def serialise(event: MergedDataEvent): JsValue
}

class AuditSerialiser extends AuditSerialiserLike {
  private implicit val dateWriter: Writes[Instant] = DateWriter.instantWrites
  private implicit val dataEventWriter: Writes[DataEvent] = Json.writes[DataEvent]
  private implicit val dataCallWriter: Writes[DataCall] = Json.writes[DataCall]
  private implicit val extendedDataEventWriter: Writes[ExtendedDataEvent] = Json.writes[ExtendedDataEvent]
  private implicit val mergedDataEventWriter: Writes[MergedDataEvent] = Json.writes[MergedDataEvent]

  override def serialise(event: DataEvent): JsValue =
    Json.toJson(event)

  override def serialise(event: ExtendedDataEvent): JsValue =
    Json.toJson(event)

  override def serialise(event: MergedDataEvent): JsValue =
    Json.toJson(event)
}

object AuditSerialiser extends AuditSerialiser 
Example 100
Source File: DataEvent.scala    From play-auditing   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.play.audit.model

import java.util.UUID
import java.time.Instant

import play.api.libs.json._

case class DataEvent(
  auditSource: String,
  auditType  : String,
  eventId    : String              = UUID.randomUUID().toString,
  tags       : Map[String, String] = Map.empty,
  detail     : Map[String, String] = Map.empty,
  generatedAt: Instant             = Instant.now
)

case class ExtendedDataEvent(
  auditSource: String,
  auditType  : String,
  eventId    : String              = UUID.randomUUID().toString,
  tags       : Map[String, String] = Map.empty,
  detail     : JsValue             = JsString(""),
  generatedAt: Instant             = Instant.now
)

case class DataCall(
  tags       : Map[String, String],
  detail     : Map[String, String],
  generatedAt: Instant
)

case class MergedDataEvent(
  auditSource: String,
  auditType  : String,
  eventId    : String = UUID.randomUUID().toString,
  request    : DataCall,
  response   : DataCall
) 
Example 101
Source File: AuditSerialiserSpec.scala    From play-auditing   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.audit.serialiser

import java.time.Instant

import play.api.libs.json.JsString
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
import uk.gov.hmrc.play.audit.model.{DataCall, DataEvent, ExtendedDataEvent, MergedDataEvent}

class AuditSerialiserSpec extends AnyWordSpecLike with Matchers {

  "AuditSerialiser" should {
    "serialise DataEvent" in {
      AuditSerialiser.serialise(DataEvent(
        auditSource = "myapp",
        auditType   = "RequestReceived",
        eventId     = "cb5ebe82-cf3c-4f15-bd92-39a6baa1f929",
        tags        = Map("tagkey" -> "tagval"),
        detail      = Map("detailkey" -> "detailval"),
        generatedAt = Instant.parse("2007-12-03T10:15:30.123Z")
      )).toString shouldBe """{"auditSource":"myapp","auditType":"RequestReceived","eventId":"cb5ebe82-cf3c-4f15-bd92-39a6baa1f929","tags":{"tagkey":"tagval"},"detail":{"detailkey":"detailval"},"generatedAt":"2007-12-03T10:15:30.123+0000"}"""
    }

    "serialise ExtendedDataEvent" in {
      AuditSerialiser.serialise(ExtendedDataEvent(
        auditSource = "myapp",
        auditType   = "RequestReceived",
        eventId     = "cb5ebe82-cf3c-4f15-bd92-39a6baa1f929",
        tags        = Map("tagkey" -> "tagval"),
        detail      = JsString("detail"),
        generatedAt = Instant.parse("2007-12-03T10:15:30.123Z")
      )).toString shouldBe """{"auditSource":"myapp","auditType":"RequestReceived","eventId":"cb5ebe82-cf3c-4f15-bd92-39a6baa1f929","tags":{"tagkey":"tagval"},"detail":"detail","generatedAt":"2007-12-03T10:15:30.123+0000"}"""
    }

    "serialise MergedDataEvent" in {
      AuditSerialiser.serialise(MergedDataEvent(
        auditSource = "myapp",
        auditType   = "RequestReceived",
        eventId     = "cb5ebe82-cf3c-4f15-bd92-39a6baa1f929",
        request     = DataCall(
                        tags   = Map("requesttagkey" -> "requesttagval"),
                        detail = Map("requestdetailkey" -> "requestdetailval"),
                        generatedAt = Instant.parse("2007-12-03T10:15:30.123Z")
                      ),
        response    = DataCall(
                        tags   = Map("responsetagkey" -> "responsetagval"),
                        detail = Map("responsedetailkey" -> "responsedetailval"),
                        generatedAt = Instant.parse("2007-12-03T10:16:31.123Z")
                      )
      )).toString shouldBe """{"auditSource":"myapp","auditType":"RequestReceived","eventId":"cb5ebe82-cf3c-4f15-bd92-39a6baa1f929","request":{"tags":{"requesttagkey":"requesttagval"},"detail":{"requestdetailkey":"requestdetailval"},"generatedAt":"2007-12-03T10:15:30.123+0000"},"response":{"tags":{"responsetagkey":"responsetagval"},"detail":{"responsedetailkey":"responsedetailval"},"generatedAt":"2007-12-03T10:16:31.123+0000"}}"""
    }
  }
} 
Example 102
Source File: AkkaPersistenceRuntimeWirings.scala    From ticket-booking-aecor   with Apache License 2.0 5 votes vote down vote up
package ru.pavkin.booking

import java.time.Instant
import java.util.concurrent.TimeUnit

import aecor.data.{ EitherK, Enriched }
import aecor.runtime.akkapersistence.serialization.{
  DecodingFailure,
  PersistentDecoder,
  PersistentEncoder,
  PersistentRepr
}
import aecor.runtime.akkapersistence.{ AkkaPersistenceRuntime, CassandraJournalAdapter }
import akka.actor.ActorSystem
import cats.effect._
import cats.implicits._
import ru.pavkin.booking.common.models.BookingKey
import ru.pavkin.booking.booking.entity._
import ru.pavkin.booking.booking.entity.BookingWireCodecs._
import ru.pavkin.booking.booking.serialization.BookingEventSerializer
import ru.pavkin.booking.common.effect.TimedOutBehaviour

import scala.concurrent.duration._


final class AkkaPersistenceRuntimeWirings[F[_]](
  val bookings: BookingKey => EitherK[Booking, BookingCommandRejection, F]
)

object AkkaPersistenceRuntimeWirings {
  def apply[F[_]: ConcurrentEffect: Timer](system: ActorSystem,
                                           clock: Clock[F]): F[AkkaPersistenceRuntimeWirings[F]] = {

    val journalAdapter = CassandraJournalAdapter(system)
    val runtime = AkkaPersistenceRuntime(system, journalAdapter)

    implicit val eventEncoder: PersistentEncoder[Enriched[EventMetadata, BookingEvent]] =
      PersistentEncoder.instance { evt =>
        val (manifest, bytes) = BookingEventSerializer.serialize(evt)
        PersistentRepr(manifest, bytes)
      }

    implicit val eventDecoder: PersistentDecoder[Enriched[EventMetadata, BookingEvent]] =
      PersistentDecoder.instance { repr =>
        BookingEventSerializer
          .deserialize(repr.manifest, repr.payload)
          .leftMap(ex => DecodingFailure(ex.getMessage, Some(ex)))
      }

    val generateTimestamp: F[EventMetadata] =
      clock.realTime(TimeUnit.MILLISECONDS).map(Instant.ofEpochMilli).map(EventMetadata)

    val bookingsBehavior =
      TimedOutBehaviour(
        EventsourcedBooking.behavior[F](clock).enrich[EventMetadata](generateTimestamp)
      )(2.seconds)

    val bookings: F[BookingKey => EitherK[Booking, BookingCommandRejection, F]] = runtime
      .deploy(EventsourcedBooking.entityName, bookingsBehavior, EventsourcedBooking.tagging)

    bookings.map(new AkkaPersistenceRuntimeWirings(_))
  }
} 
Example 103
Source File: EntityWirings.scala    From ticket-booking-aecor   with Apache License 2.0 5 votes vote down vote up
package ru.pavkin.booking

import java.time.Instant
import java.util.concurrent.TimeUnit

import aecor.data.EitherK
import aecor.runtime.Eventsourced
import aecor.runtime.akkageneric.{ GenericAkkaRuntime, GenericAkkaRuntimeSettings }
import akka.actor.ActorSystem
import cats.effect._
import cats.implicits._
import ru.pavkin.booking.booking.booking.Bookings
import ru.pavkin.booking.common.models.BookingKey
import ru.pavkin.booking.booking.entity.{
  Booking,
  BookingCommandRejection,
  EventMetadata,
  EventsourcedBooking
}
import ru.pavkin.booking.booking.entity.BookingWireCodecs._
import ru.pavkin.booking.common.effect.TimedOutBehaviour

import scala.concurrent.duration._
final class EntityWirings[F[_]](val bookings: Bookings[F])

object EntityWirings {
  def apply[F[_]: ConcurrentEffect: Timer](
    system: ActorSystem,
    clock: Clock[F],
    postgresWirings: PostgresWirings[F]
  ): F[EntityWirings[F]] = {
    val genericAkkaRuntime = GenericAkkaRuntime(system)

    val generateTimestamp: F[EventMetadata] =
      clock.realTime(TimeUnit.MILLISECONDS).map(Instant.ofEpochMilli).map(EventMetadata)

    val bookingsBehavior =
      TimedOutBehaviour(
        EventsourcedBooking.behavior[F](clock).enrich[EventMetadata](generateTimestamp)
      )(2.seconds)

    val createBehavior: BookingKey => F[EitherK[Booking, BookingCommandRejection, F]] =
      Eventsourced(
        entityBehavior = bookingsBehavior,
        journal = postgresWirings.bookingsJournal,
        snapshotting = None
      )

    val bookings: F[Bookings[F]] = genericAkkaRuntime
      .runBehavior(
        typeName = EventsourcedBooking.entityName,
        createBehavior = createBehavior,
        settings = GenericAkkaRuntimeSettings.default(system)
      )
      .map(Eventsourced.Entities.fromEitherK(_))

    bookings.map(new EntityWirings(_))
  }

} 
Example 104
Source File: TypeMappers.scala    From ticket-booking-aecor   with Apache License 2.0 5 votes vote down vote up
package ru.pavkin.booking.common.protobuf

import java.time.{ Duration, Instant }

import ru.pavkin.booking.common.models._
import scalapb.TypeMapper
import shapeless._

import scala.util.Try

trait AnyValTypeMapper {

  implicit def anyValTypeMapper[V, U](implicit ev: V <:< AnyVal,
                                      V: Unwrapped.Aux[V, U]): TypeMapper[U, V] = {
    val _ = ev
    TypeMapper[U, V](V.wrap)(V.unwrap)
  }

}

trait CaseClassTypeMapper {

  implicit def caseClassTypeMapper[A, B, Repr <: HList](
    implicit aGen: Generic.Aux[A, Repr],
    bGen: Generic.Aux[B, Repr]
  ): TypeMapper[A, B] =
    TypeMapper { x: A =>
      bGen.from(aGen.to(x))
    } { x =>
      aGen.from(bGen.to(x))
    }

}

trait BaseTypeMapper {

  implicit val bigDecimal: TypeMapper[String, BigDecimal] =
    TypeMapper[String, BigDecimal] { x =>
      val value = if (x.isEmpty) "0" else x
      BigDecimal(value)
    }(_.toString())

  implicit val instant: TypeMapper[Long, Instant] =
    TypeMapper[Long, Instant](Instant.ofEpochMilli)(_.toEpochMilli)

  implicit val instantOpt: TypeMapper[Long, Option[Instant]] =
    instant.map2(i => if (i.toEpochMilli == 0) None else Some(i))(
      _.getOrElse(Instant.ofEpochMilli(0))
    )

  implicit val duration: TypeMapper[String, java.time.Duration] =
    TypeMapper[String, Duration] { s =>
      Try(Duration.parse(s)).getOrElse(Duration.ZERO)
    } {
      _.toString
    }

}

trait TypeMapperInstances extends BaseTypeMapper with AnyValTypeMapper with CaseClassTypeMapper {

  implicit class TypeMapperOps[A <: Any](a: A) {
    def toCustom[B](implicit tm: TypeMapper[A, B]): B = tm.toCustom(a)
    def toBase[B](implicit tm: TypeMapper[B, A]): B = tm.toBase(a)
  }

}

object TypeMappers extends TypeMapperInstances {

  implicit val money: TypeMapper[String, Money] =
    bigDecimal.map2(Money(_))(_.amount)
} 
Example 105
Source File: BookingState.scala    From ticket-booking-aecor   with Apache License 2.0 5 votes vote down vote up
package ru.pavkin.booking.booking.entity

import java.time.Instant

import aecor.data.Folded
import cats.data.NonEmptyList
import ru.pavkin.booking.common.models._
import aecor.data.Folded.syntax._

case class BookingState(clientId: ClientId,
                        concertId: ConcertId,
                        seats: NonEmptyList[Seat],
                        tickets: Option[NonEmptyList[Ticket]],
                        status: BookingStatus,
                        expiresAt: Option[Instant],
                        paymentId: Option[PaymentId]) {

  def handleEvent(e: BookingEvent): Folded[BookingState] = e match {
    case _: BookingPlaced => impossible
    case e: BookingConfirmed =>
      copy(tickets = Some(e.tickets), expiresAt = e.expiresAt, status = BookingStatus.Confirmed).next
    case _: BookingDenied | _: BookingCancelled => copy(status = BookingStatus.Canceled).next
    case BookingExpired                         => copy(status = BookingStatus.Canceled).next
    case e: BookingPaid                         => copy(paymentId = Some(e.paymentId)).next
    case BookingSettled                         => copy(status = BookingStatus.Settled).next
  }

}

object BookingState {

  def init(e: BookingEvent): Folded[BookingState] = e match {
    case e: BookingPlaced =>
      BookingState(
        e.clientId,
        e.concertId,
        e.seats,
        None,
        BookingStatus.AwaitingConfirmation,
        None,
        None
      ).next
    case _ => impossible
  }
} 
Example 106
Source File: EventsourcedBookingWithoutExpiration.scala    From ticket-booking-aecor   with Apache License 2.0 5 votes vote down vote up
package ru.pavkin.booking.booking.entity

import java.time.Instant

import aecor.MonadActionReject
import aecor.data._
import cats.Monad
import cats.data.EitherT._
import cats.data.NonEmptyList
import cats.syntax.all._
import ru.pavkin.booking.common.models.BookingStatus._
import ru.pavkin.booking.common.models._

// Just an example, isn't uses
class EventsourcedBookingWithoutExpiration[F[_]](
  implicit F: MonadActionReject[F, Option[BookingState], BookingEvent, BookingCommandRejection]
) extends Booking[F] {

  import F._

  val ignore: F[Unit] = unit

  def place(client: ClientId, concert: ConcertId, seats: NonEmptyList[Seat]): F[Unit] =
    read.flatMap {
      case Some(_) => reject(BookingAlreadyExists)
      case None =>
        if (seats.distinct =!= seats) reject(DuplicateSeats)
        else if (seats.size > 10) reject(TooManySeats)
        else append(BookingPlaced(client, concert, seats))
    }

  def confirm(tickets: NonEmptyList[Ticket], expiresAt: Option[Instant]): F[Unit] =
    status.flatMap {
      case AwaitingConfirmation =>
        append(BookingConfirmed(tickets, null)) >>
          whenA(tickets.foldMap(_.price).amount <= 0)(append(BookingSettled))

      case Confirmed | Settled => ignore
      case Denied              => reject(BookingIsDenied)
      case Canceled            => reject(BookingIsAlreadyCanceled)
    }

  def expire: F[Unit] = ???

  def deny(reason: String): F[Unit] =
    status.flatMap {
      case AwaitingConfirmation =>
        append(BookingDenied(reason))
      case Denied              => ignore
      case Confirmed | Settled => reject(BookingIsAlreadyConfirmed)
      case Canceled            => reject(BookingIsAlreadyCanceled)
    }

  def cancel(reason: String): F[Unit] =
    status.flatMap {
      case AwaitingConfirmation | Confirmed =>
        append(BookingCancelled(reason))
      case Canceled | Denied => ignore
      case Settled           => reject(BookingIsAlreadySettled)
    }

  def receivePayment(paymentId: PaymentId): F[Unit] =
    status.flatMap {
      case AwaitingConfirmation        => reject(BookingIsNotConfirmed)
      case Canceled | Denied | Settled => reject(BookingIsAlreadySettled)
      case Confirmed                   => append(BookingPaid(paymentId)) >> append(BookingSettled)
    }

  def status: F[BookingStatus] = read.flatMap {
    case Some(s) => pure(s.status)
    case _       => reject(BookingNotFound)
  }

  def tickets: F[Option[NonEmptyList[Ticket]]] = read.map(_.flatMap(_.tickets))
}

object EventsourcedBookingWithoutExpiration {

  def behavior[F[_]: Monad]: EventsourcedBehavior[
    EitherK[Booking, BookingCommandRejection, ?[_]],
    F,
    Option[BookingState],
    BookingEvent
  ] =
    EventsourcedBehavior
      .optionalRejectable(
        new EventsourcedBookingWithoutExpiration(),
        BookingState.init,
        _.handleEvent(_)
      )
} 
Example 107
Source File: BookingEvent.scala    From ticket-booking-aecor   with Apache License 2.0 5 votes vote down vote up
package ru.pavkin.booking.booking.entity

import java.time.Instant

import cats.data.NonEmptyList
import ru.pavkin.booking.common.models._

sealed trait BookingEvent extends Product with Serializable

case class BookingPlaced(clientId: ClientId, concertId: ConcertId, seats: NonEmptyList[Seat])
    extends BookingEvent
case class BookingConfirmed(tickets: NonEmptyList[Ticket], expiresAt: Option[Instant])
    extends BookingEvent
case class BookingDenied(reason: String) extends BookingEvent
case class BookingCancelled(reason: String) extends BookingEvent
case object BookingExpired extends BookingEvent
case class BookingPaid(paymentId: PaymentId) extends BookingEvent
case object BookingSettled extends BookingEvent 
Example 108
Source File: Booking.scala    From ticket-booking-aecor   with Apache License 2.0 5 votes vote down vote up
package ru.pavkin.booking.booking.entity

import java.time.Instant

import aecor.macros.boopickleWireProtocol
import cats.data.NonEmptyList
import cats.tagless.autoFunctorK
import ru.pavkin.booking.common.models._
import boopickle.Default._
import BookingWireCodecs._

@autoFunctorK(false)
@boopickleWireProtocol
trait Booking[F[_]] {

  def place(client: ClientId, concert: ConcertId, seats: NonEmptyList[Seat]): F[Unit]
  def confirm(tickets: NonEmptyList[Ticket], expiresAt: Option[Instant]): F[Unit]
  def deny(reason: String): F[Unit]
  def cancel(reason: String): F[Unit]
  def receivePayment(paymentId: PaymentId): F[Unit]
  def expire: F[Unit]
  def status: F[BookingStatus]
}

object Booking 
Example 109
Source File: BookingWireCodecs.scala    From ticket-booking-aecor   with Apache License 2.0 5 votes vote down vote up
package ru.pavkin.booking.booking.entity

import java.time.Instant

import boopickle.Default._
import scodec.Codec

object BookingWireCodecs {

  implicit val instantPickler: boopickle.Pickler[Instant] =
    boopickle.DefaultBasic.longPickler.xmap(Instant.ofEpochMilli)(_.toEpochMilli)

  implicit val rejectionPickler: boopickle.Pickler[BookingCommandRejection] =
    compositePickler[BookingCommandRejection]
      .addConcreteType[BookingAlreadyExists.type]
      .addConcreteType[BookingNotFound.type]
      .addConcreteType[TooManySeats.type]
      .addConcreteType[DuplicateSeats.type]
      .addConcreteType[BookingIsNotConfirmed.type]
      .addConcreteType[BookingIsAlreadyCanceled.type]
      .addConcreteType[BookingIsAlreadyConfirmed.type]
      .addConcreteType[BookingIsAlreadySettled.type]
      .addConcreteType[BookingIsDenied.type]

  implicit val rejectionCodec: Codec[BookingCommandRejection] =
    aecor.macros.boopickle.BoopickleCodec.codec[BookingCommandRejection]

} 
Example 110
Source File: PostgresBookingViewRepository.scala    From ticket-booking-aecor   with Apache License 2.0 5 votes vote down vote up
package ru.pavkin.booking.booking.view

import java.sql.Timestamp
import java.time.Instant

import cats.Monad
import cats.implicits._
import doobie._
import doobie.implicits._
import doobie.util.transactor.Transactor
import io.circe.{ Decoder, Encoder, Json }
import io.circe.parser._
import org.postgresql.util.PGobject
import ru.pavkin.booking.common.models._

class PostgresBookingViewRepository[F[_]: Monad](transactor: Transactor[F],
                                                 tableName: String = "bookings")
    extends BookingViewRepository[F] {

  implicit val jsonMeta: Meta[Json] =
    Meta.Advanced
      .other[PGobject]("json")
      .timap[Json](a => parse(a.getValue).leftMap[Json](e => throw e).merge)(a => {
        val o = new PGobject
        o.setType("json")
        o.setValue(a.noSpaces)
        o
      })

  implicit val seatsMeta: Meta[List[Seat]] = jsonMeta.timap(
    j => Decoder[List[Seat]].decodeJson(j).right.get
  )(s => Encoder[List[Seat]].apply(s))

  implicit val ticketsMeta: Meta[List[Ticket]] = jsonMeta.timap(
    j => Decoder[List[Ticket]].decodeJson(j).right.get
  )(s => Encoder[List[Ticket]].apply(s))

  implicit val instantMeta: Meta[Instant] =
    Meta[Timestamp].timap(_.toInstant)(Timestamp.from)

  implicit val bookingStatusMeta: Meta[BookingStatus] =
    Meta[String].timap(BookingStatus.withName)(_.entryName)

  def get(bookingId: BookingKey): F[Option[BookingView]] =
    queryView(bookingId).option.transact(transactor)

  def byClient(clientId: ClientId): F[List[BookingView]] =
    queryForClient(clientId).to[List].transact(transactor)

  def set(view: BookingView): F[Unit] =
    Update[BookingView](setViewQuery).run(view).transact(transactor).void

  def expired(now: Instant): fs2.Stream[F, BookingKey] =
    queryExpired(now).stream.transact(transactor)

  def createTable: F[Unit] = createTableQuery.transact(transactor).void

  private val setViewQuery =
    s"""INSERT INTO $tableName
    (booking_id, client_id, concert_id, seats, tickets, status, confirmed_at, expires_at, version)
    VALUES (?,?,?,?,?,?,?,?,?)
    ON CONFLICT (booking_id)
    DO UPDATE SET
     tickets = EXCLUDED.tickets,
     status = EXCLUDED.status,
     confirmed_at = EXCLUDED.confirmed_at,
     expires_at = EXCLUDED.expires_at,
     version = EXCLUDED.version;"""

  private def queryView(bookingId: BookingKey) =
    (fr"SELECT * FROM " ++ Fragment.const(tableName) ++
      fr"WHERE booking_id = $bookingId;")
      .query[BookingView]

  private def queryExpired(now: Instant) =
    (fr"SELECT booking_id FROM " ++ Fragment.const(tableName) ++
      fr"WHERE status = ${BookingStatus.Confirmed: BookingStatus} AND expires_at < $now;")
      .query[BookingKey]

  private def queryForClient(clientId: ClientId) =
    (fr"SELECT * FROM " ++ Fragment.const(tableName) ++
      fr"WHERE client_id = $clientId;")
      .query[BookingView]

  private val createTableQuery = (fr"""
    CREATE TABLE IF NOT EXISTS """ ++ Fragment.const(tableName) ++
    fr""" (
    booking_id    text      NOT NULL PRIMARY KEY,
    client_id     text      NOT NULL,
    concert_id    text      NOT NULL,
    seats         json      NOT NULL,
    tickets       json      NOT NULL,
    status        text      NOT NULL,
    confirmed_at  timestamptz,
    expires_at    timestamptz,
    version       bigint    NOT NULL
    );
  """).update.run

} 
Example 111
Source File: BookingView.scala    From ticket-booking-aecor   with Apache License 2.0 5 votes vote down vote up
package ru.pavkin.booking.booking.view

import java.time.Instant

import io.circe._
import io.circe.generic.semiauto._
import ru.pavkin.booking.common.models.{ BookingKey, _ }
import ru.pavkin.booking.common.json.AnyValCoders._

case class BookingView(bookingId: BookingKey,
                       clientId: ClientId,
                       concertId: ConcertId,
                       seats: List[Seat],
                       tickets: List[Ticket],
                       status: BookingStatus,
                       confirmedAt: Option[Instant],
                       expiresAt: Option[Instant],
                       version: Long)

object BookingView {
  implicit val decoder: Decoder[BookingView] = deriveDecoder
  implicit val encoder: Encoder[BookingView] = deriveEncoder
} 
Example 112
Source File: StubConfirmationService.scala    From ticket-booking-aecor   with Apache License 2.0 5 votes vote down vote up
package ru.pavkin.booking.booking.service

import java.time.temporal.ChronoUnit
import java.time.{ Duration, Instant }
import java.util.concurrent.TimeUnit

import cats.Monad
import cats.data.NonEmptyList
import cats.effect.{ Clock, Sync }
import cats.effect.concurrent.Ref
import cats.implicits._
import ru.pavkin.booking.booking.service.TicketReservationService._
import ru.pavkin.booking.booking.service.StubConfirmationService.ConcertState
import ru.pavkin.booking.common.models._

class StubConfirmationService[F[_]: Monad](clock: Clock[F],
                                           state: Ref[F, Map[ConcertId, ConcertState]])
    extends TicketReservationService[F] {

  val expireAfter: Duration = Duration.of(6, ChronoUnit.HOURS)

  def reserve(bookingId: BookingKey,
           concertId: ConcertId,
           seats: NonEmptyList[Seat]): F[Either[ReservationFailure, Reservation]] =
    clock
      .realTime(TimeUnit.MILLISECONDS)
      .map(Instant.ofEpochMilli)
      .flatMap(
        now =>
          state.modify[Either[ReservationFailure, Reservation]](
            concerts =>
              concerts.get(concertId) match {
                case None => concerts -> Left(UnknownSeats)
                case Some(concertState) =>
                  concertState
                    .book(bookingId, seats)
                    .fold(e => concerts -> Left(e), {
                      case (c, t) =>
                        concerts.updated(concertId, c) -> Right(
                          Reservation(t, Some(now.plus(expireAfter)))
                        )
                    })

            }
        )
      )

  def release(bookingId: BookingKey): F[Either[ReleaseFailure, Unit]] =
    state.modify[Either[ReleaseFailure, Unit]](
      concerts =>
        Either
          .fromOption(concerts.find(_._2.bookedSeats.contains(bookingId)), UnknownBooking)
          .flatMap {
            case (concertId, concertState) =>
              concertState.release(bookingId).map(concertId -> _)
          } match {
          case Left(value)                  => concerts -> Left(value)
          case Right((concertId, newState)) => concerts.updated(concertId, newState) -> Right(())
      }
    )
}

object StubConfirmationService {

  def apply[F[_]: Sync](clock: Clock[F],
                        initial: Map[ConcertId, ConcertState]): F[StubConfirmationService[F]] =
    Ref.of(initial).map(new StubConfirmationService(clock, _))

  case class ConcertState(prices: Map[Seat, Money],
                          availableSeats: Set[Seat],
                          bookedSeats: Map[BookingKey, NonEmptyList[Seat]]) {

    def book(
      bookingId: BookingKey,
      seats: NonEmptyList[Seat]
    ): Either[ReservationFailure, (ConcertState, NonEmptyList[Ticket])] =
      if (bookedSeats.contains(bookingId)) Left(SeatsAlreadyBooked)
      else if (!seats.forall(availableSeats)) Left(SeatsAlreadyBooked)
      else if (!seats.forall(prices.contains)) Left(UnknownSeats)
      else
        Right(
          copy(
            availableSeats = availableSeats.diff(seats.toList.toSet),
            bookedSeats = bookedSeats.updated(bookingId, seats)
          ) -> seats.map(s => Ticket(s, prices(s)))
        )

    def release(bookingId: BookingKey): Either[ReleaseFailure, ConcertState] =
      bookedSeats.get(bookingId) match {
        case Some(booked) =>
          Right(
            copy(
              availableSeats = availableSeats ++ booked.toList.toSet,
              bookedSeats = bookedSeats - bookingId
            )
          )
        case None => Left(UnknownBooking)
      }
  }

} 
Example 113
Source File: TicketReservationService.scala    From ticket-booking-aecor   with Apache License 2.0 5 votes vote down vote up
package ru.pavkin.booking.booking.service

import java.time.Instant

import cats.data.NonEmptyList
import ru.pavkin.booking.booking.service.TicketReservationService.{
  Reservation,
  ReservationFailure,
  ReleaseFailure
}
import ru.pavkin.booking.common.models.{ BookingKey, ConcertId, Seat, Ticket }

trait TicketReservationService[F[_]] {

  def reserve(bookingId: BookingKey,
              concertId: ConcertId,
              seats: NonEmptyList[Seat]): F[Either[ReservationFailure, Reservation]]

  def release(bookingId: BookingKey): F[Either[ReleaseFailure, Unit]]
}

object TicketReservationService {

  case class Reservation(tickets: NonEmptyList[Ticket], expiresAt: Option[Instant])

  sealed trait ReservationFailure
  case object SeatsAlreadyBooked extends ReservationFailure
  case object UnknownSeats extends ReservationFailure
  case object DuplicateSeats extends ReservationFailure

  sealed trait ReleaseFailure
  case object UnknownBooking extends ReleaseFailure
} 
Example 114
Source File: BookingExpirationProcess.scala    From ticket-booking-aecor   with Apache License 2.0 5 votes vote down vote up
package ru.pavkin.booking.booking.process

import java.time.Instant

import cats.effect.Sync
import cats.implicits._
import ru.pavkin.booking.booking.booking.Bookings
import ru.pavkin.booking.booking.view.BookingViewRepository

class BookingExpirationProcess[F[_]: Sync](bookings: Bookings[F],
                                            bookingView: BookingViewRepository[F])
    extends (Instant => F[Unit]) {

  def apply(now: Instant): F[Unit] =
    bookingView
      .expired(now)
      .evalMap(k => bookings(k).expire.void)
      .compile
      .drain

} 
Example 115
Source File: BookingExpirationProcessWiring.scala    From ticket-booking-aecor   with Apache License 2.0 5 votes vote down vote up
package ru.pavkin.booking.booking.process

import java.time.Instant
import java.util.concurrent.TimeUnit

import aecor.distributedprocessing.DistributedProcessing
import cats.effect.{Clock, ConcurrentEffect, Timer}
import cats.implicits._
import ru.pavkin.booking.common.streaming.Fs2Process

import scala.concurrent.duration.FiniteDuration

class BookingExpirationProcessWiring[F[_]: ConcurrentEffect: Timer](clock: Clock[F],
                                                                    frequency: FiniteDuration,
                                                                    process: Instant => F[Unit]) {

  val processStream: fs2.Stream[F, Unit] =
    fs2.Stream
      .fixedDelay[F](frequency)
      .evalMap(_ => clock.realTime(TimeUnit.MILLISECONDS).map(Instant.ofEpochMilli))
      .evalMap(process)

  def processes: List[DistributedProcessing.Process[F]] =
    List(Fs2Process(processStream))

} 
Example 116
Source File: CmWellConsumeHandler.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.tools.neptune.export

import java.net.{URL, URLDecoder, URLEncoder}
import java.time.Instant

import org.apache.http.client.methods.{CloseableHttpResponse, HttpGet}
import org.apache.http.impl.client.DefaultHttpClient
import org.apache.http.util.EntityUtils
import org.slf4j.LoggerFactory

object CmWellConsumeHandler {

  protected lazy val logger = LoggerFactory.getLogger("cm_well_consumer")
  val maxRetry = 5

  private val sleepTimeout = 10000

  def bulkConsume(cluster: String, position: String, format: String, updateMode:Boolean, retryCount:Int= 0): CloseableHttpResponse = {
    val withMeta = if(updateMode) "&with-meta" else ""
    val url = "http://" + cluster + "/_bulk-consume?position=" + position + "&format=" + format + withMeta
    val client = new DefaultHttpClient
    client.setHttpRequestRetryHandler(new CustomHttpClientRetryHandler())
    val get = new HttpGet(url)
    logger.info("Going to bulk consume,url= " + url)
    val response = client.execute(get)
    val statusCode = response.getStatusLine.getStatusCode
    if (statusCode != 200 && statusCode != 204) {
      if(statusCode == 503) {
        logger.error("Failed to bulk consume, error status code=" + statusCode + "response entity=" + EntityUtils.toString(response.getEntity) + ".Going to retry...")
        Thread.sleep(sleepTimeout)
        bulkConsume(cluster, position, format, updateMode)
      }
      else{
        if (retryCount < maxRetry) {
          logger.error("Failed to bulk consume, error status code=" + statusCode + "response entity=" + EntityUtils.toString(response.getEntity) + ".Going to retry...,retry count=" + retryCount)
          Thread.sleep(sleepTimeout)
          bulkConsume(cluster, position, format, updateMode, retryCount + 1)
        } else {
          throw new Throwable("Failed to consume from cm-well, error code status=" + statusCode + ", response entity=" + EntityUtils.toString(response.getEntity))
        }
      }
    }
    response
  }

  def retrivePositionFromCreateConsumer(cluster: String, lengthHint: Int, qp: Option[String], updateMode:Boolean, automaticUpdateMode:Boolean, toolStartTime:Instant, retryCount:Int = 0): String = {
    val withDeletedParam = if(updateMode || automaticUpdateMode) "&with-deleted" else ""
    //initial mode
    val qpTillStartTime = if(!updateMode && !automaticUpdateMode)  URLEncoder.encode(",system.lastModified<") + toolStartTime.toString else ""
    //automatic update mode
    val qpAfterStartTime = if(!updateMode && automaticUpdateMode) URLEncoder.encode(",system.lastModified>>" )+ toolStartTime.toString else ""
    val createConsumerUrl = "http://" + cluster + "/?op=create-consumer&qp=-system.parent.parent_hierarchy:/meta/" + qp.getOrElse("") + qpTillStartTime + qpAfterStartTime + "&recursive&length-hint=" + lengthHint + withDeletedParam
    logger.info("create-consumer-url=" + createConsumerUrl)
    val get = new HttpGet(createConsumerUrl)
    val client = new DefaultHttpClient
    client.setHttpRequestRetryHandler(new CustomHttpClientRetryHandler())
    val response = client.execute(get)
    val res = response.getAllHeaders.find(_.getName == "X-CM-WELL-POSITION").map(_.getValue).getOrElse("")
    logger.info("create-Consumer http status=" + response.getStatusLine.getStatusCode)
    val statusCode = response.getStatusLine.getStatusCode
    if (statusCode != 200) {
      if(statusCode == 503){
        logger.error("Failed to retrieve position via create-consumer api,error status code=" + statusCode + ", response entity=" + EntityUtils.toString(response.getEntity) + ".Going to retry...")
        Thread.sleep(sleepTimeout)
        retrivePositionFromCreateConsumer(cluster, lengthHint, qp, updateMode, automaticUpdateMode, toolStartTime)
      }else {
        if (retryCount < maxRetry) {
          logger.error("Failed to retrieve position via create-consumer api,error status code=" + statusCode + ", response entity=" + EntityUtils.toString(response.getEntity) + ".Going to retry..., retry count=" + retryCount)
          Thread.sleep(sleepTimeout)
          retrivePositionFromCreateConsumer(cluster, lengthHint, qp, updateMode, automaticUpdateMode, toolStartTime, retryCount+1)
        }
        else {
          throw new Throwable("Failed to consume from cm-well, error code status=" + statusCode + ", response entity=" + EntityUtils.toString(response.getEntity))
        }
      }
    }
    res
  }


} 
Example 117
Source File: EventHubReceiver.scala    From toketi-kafka-connect-iothub   with MIT License 5 votes vote down vote up
// Copyright (c) Microsoft. All rights reserved.

package com.microsoft.azure.iot.kafka.connect.source

import java.time.{Duration, Instant}
import java.util.concurrent.Executors

import com.microsoft.azure.eventhubs.{EventHubClient, EventPosition, PartitionReceiver}

import scala.collection.JavaConverters._
import scala.collection.mutable.ListBuffer

class EventHubReceiver(val connectionString: String, val receiverConsumerGroup: String, val partition: String,
    var offset: Option[String], val startTime: Option[Instant], val receiveTimeout: Duration) extends DataReceiver {

  private[this] var isClosing = false

  private val executorService = Executors.newSingleThreadExecutor()
  private val eventHubClient = EventHubClient.createSync(connectionString, executorService)
  if (eventHubClient == null) {
    throw new IllegalArgumentException("Unable to create EventHubClient from the input parameters.")
  }

  private val eventPosition = if (startTime.isDefined) {
    EventPosition.fromEnqueuedTime(startTime.get)
  }  else {
    EventPosition.fromOffset(offset.get)
  }
  private val eventHubReceiver: PartitionReceiver = eventHubClient.createReceiverSync(
    receiverConsumerGroup, partition.toString, eventPosition)
  if (this.eventHubReceiver == null) {
    throw new IllegalArgumentException("Unable to create PartitionReceiver from the input parameters.")
  }
  this.eventHubReceiver.setReceiveTimeout(receiveTimeout)

  override def close(): Unit = {
    if (this.eventHubReceiver != null) {
      this.eventHubReceiver.synchronized {
        this.isClosing = true
        eventHubReceiver.close().join()
      }
    }
  }

  override def receiveData(batchSize: Int): Iterable[IotMessage] = {
    var iotMessages = ListBuffer.empty[IotMessage]
      var curBatchSize = batchSize
      var endReached = false
      // Synchronize on the eventHubReceiver object, and make sure the task is not closing,
      // in which case, the eventHubReceiver might be closed.
      while (curBatchSize > 0 && !endReached && !this.isClosing) {
        this.eventHubReceiver.synchronized {
          if(!this.isClosing) {
            val batch = this.eventHubReceiver.receiveSync(curBatchSize)
            if (batch != null) {
              val batchIterable = batch.asScala
              iotMessages ++= batchIterable.map(e => {
                val content = new String(e.getBytes)
                val iotDeviceData = IotMessage(content, e.getSystemProperties.asScala, e.getProperties.asScala)
                iotDeviceData
              })
              curBatchSize -= batchIterable.size
            } else {
              endReached = true
            }
          }
        }
    }
    iotMessages
  }
} 
Example 118
Source File: IotMessageConverter.scala    From toketi-kafka-connect-iothub   with MIT License 5 votes vote down vote up
// Copyright (c) Microsoft. All rights reserved.

package com.microsoft.azure.iot.kafka.connect.source

import java.time.Instant
import java.util.Date

import com.microsoft.azure.eventhubs.impl.AmqpConstants
import org.apache.kafka.connect.data.{Schema, SchemaBuilder, Struct}

import scala.collection.JavaConverters._
import scala.reflect.ClassTag

object IotMessageConverter {

  val offsetKey = "offset"

  private val schemaName          = "iothub.kafka.connect"
  private val schemaVersion       = 1
  private val deviceIdKey         = "deviceId"
  private val contentTypeKey      = "contentType"
  private val sequenceNumberKey   = "sequenceNumber"
  private val enqueuedTimeKey     = "enqueuedTime"
  private val contentKey          = "content"
  private val systemPropertiesKey = "systemProperties"
  private val propertiesKey       = "properties"
  private val deviceIdIotHubKey   = "iothub-connection-device-id"

  // Public for testing purposes
  lazy val schema: Schema = SchemaBuilder.struct()
    .name(schemaName)
    .version(schemaVersion)
    .field(deviceIdKey, Schema.STRING_SCHEMA)
    .field(offsetKey, Schema.STRING_SCHEMA)
    .field(contentTypeKey, Schema.OPTIONAL_STRING_SCHEMA)
    .field(enqueuedTimeKey, Schema.STRING_SCHEMA)
    .field(sequenceNumberKey, Schema.INT64_SCHEMA)
    .field(contentKey, Schema.STRING_SCHEMA)
    .field(systemPropertiesKey, propertiesMapSchema)
    .field(propertiesKey, propertiesMapSchema)

  private lazy val propertiesMapSchema: Schema = SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.STRING_SCHEMA)

  def getIotMessageStruct(iotMessage: IotMessage): Struct = {

    val systemProperties = iotMessage.systemProperties
    val deviceId: String = getOrDefaultAndRemove(systemProperties, deviceIdIotHubKey, "")
    val offset: String = getOrDefaultAndRemove(systemProperties, AmqpConstants.OFFSET_ANNOTATION_NAME, "")
    val sequenceNumber: Long = getOrDefaultAndRemove(systemProperties, AmqpConstants.SEQUENCE_NUMBER_ANNOTATION_NAME, 0)
    val enqueuedTime: Option[Instant] = getEnqueuedTime(systemProperties)
    val enqueuedTimeStr = if(enqueuedTime.isDefined) enqueuedTime.get.toString else ""

    val properties = iotMessage.properties
    val contentType: String = getOrDefaultAndRemove(properties, contentTypeKey, "")

    val systemPropertiesMap = systemProperties.map(i => (i._1, i._2.toString))

    new Struct(schema)
      .put(deviceIdKey, deviceId)
      .put(offsetKey, offset)
      .put(contentTypeKey, contentType)
      .put(enqueuedTimeKey, enqueuedTimeStr)
      .put(sequenceNumberKey, sequenceNumber)
      .put(contentKey, iotMessage.content)
      .put(systemPropertiesKey, systemPropertiesMap.asJava)
      .put(propertiesKey, properties.asJava)
  }

  private def getEnqueuedTime(map: scala.collection.mutable.Map[String, Object]): Option[Instant] = {
    val enqueuedTimeValue: Date = getOrDefaultAndRemove(map, AmqpConstants.ENQUEUED_TIME_UTC_ANNOTATION_NAME, null)
    if (enqueuedTimeValue != null) Some(enqueuedTimeValue.toInstant) else None
  }

  private def getOrDefaultAndRemove[T: ClassTag, S: ClassTag](map: scala.collection.mutable.Map[String, S],
      key: String, defaultVal: T): T = {

    if (map.contains(key)) {
      val retVal: T = map(key).asInstanceOf[T]
      map.remove(key)
      retVal
    } else {
      defaultVal
    }
  }
} 
Example 119
Source File: C2DMessageConverterTest.scala    From toketi-kafka-connect-iothub   with MIT License 5 votes vote down vote up
package com.microsoft.azure.iot.kafka.connect.sink

import java.time.Instant
import java.util.Date

import com.microsoft.azure.iot.kafka.connect.sink.testhelpers.{TestSchemas, TestSinkRecords}
import com.microsoft.azure.iot.kafka.connect.source.JsonSerialization
import org.apache.kafka.connect.errors.ConnectException
import org.scalatest.{FlatSpec, GivenWhenThen}

class C2DMessageConverterTest extends FlatSpec with GivenWhenThen with JsonSerialization {

  "C2DMessageConverter" should "validate the schema of a struct record against the expected schema" in {
    Given("A valid record schema")
    var schema = TestSchemas.validSchema
    When("ValidateSchema is called")
    Then("No exception is thrown")
    C2DMessageConverter.validateStructSchema(schema)

    Given("A valid record schema")
    schema = TestSchemas.validSchemaWithMissingOptionalField
    When("ValidateSchema is called")
    Then("No exception is thrown")
    C2DMessageConverter.validateStructSchema(schema)

    Given("A schema with an invalid type")
    schema = TestSchemas.invalidSchemaTypeSchema
    When("ValidateSchema is called")
    Then("A ConnectException is thrown")
    intercept[ConnectException] {
      C2DMessageConverter.validateStructSchema(schema)
    }

    Given("A schema with an invalid field type")
    schema = TestSchemas.invalidFieldTypeSchema
    When("ValidateSchema is called")
    Then("A ConnectException is thrown")
    intercept[ConnectException] {
      C2DMessageConverter.validateStructSchema(schema)
    }

    Given("A schema with a missing field")
    schema = TestSchemas.missingFieldSchema
    When("ValidateSchema is called")
    Then("A ConnectException is thrown")
    intercept[ConnectException] {
      C2DMessageConverter.validateStructSchema(schema)
    }
  }

  "C2DMessageConverter" should "deserialize sink records of String schema and return the C2D Message" in {
    Given("A valid record of string schema")
    var record = TestSinkRecords.getStringSchemaRecord()
    When("DeserializeMessage is called")
    var c2DMessage = C2DMessageConverter.deserializeMessage(record, record.valueSchema())
    Then("A valid C2D message is obtained")
    assert(c2DMessage != null)
    assert(c2DMessage.deviceId == "device1")
    assert(c2DMessage.messageId == "message1")
    assert(c2DMessage.message == "Turn on")
    assert(c2DMessage.expiryTime.isDefined)
    assert(c2DMessage.expiryTime.get.after(Date.from(Instant.parse("2016-01-01T00:00:00Z"))))

    Given("A valid record of string schema")
    record = TestSinkRecords.getStringSchemaRecord2()
    When("DeserializeMessage is called")
    c2DMessage = C2DMessageConverter.deserializeMessage(record, record.valueSchema())
    Then("A valid C2D message is obtained")
    assert(c2DMessage != null)
    assert(c2DMessage.deviceId == "device1")
    assert(c2DMessage.messageId == "message1")
    assert(c2DMessage.message == "Turn on")
    assert(c2DMessage.expiryTime.isEmpty)
  }

  "C2DMessageConverter" should "throw an exception if record with string schema has invalid data" in {
    Given("A record of string schema with invalid data")
    val record = TestSinkRecords.getInvalidScringSchemaRecord()
    When("DeserializeMessage is called")
    Then("Then a ConnectException is called")
    intercept[ConnectException] {
      C2DMessageConverter.deserializeMessage(record, record.valueSchema())
    }
  }
} 
Example 120
Source File: IotMessageConverterTest.scala    From toketi-kafka-connect-iothub   with MIT License 5 votes vote down vote up
// Copyright (c) Microsoft. All rights reserved.

package com.microsoft.azure.iot.kafka.connect.source

import java.text.SimpleDateFormat
import java.time.Instant

import com.microsoft.azure.eventhubs.impl.AmqpConstants
import com.microsoft.azure.iot.kafka.connect.source.testhelpers.DeviceTemperature
import org.apache.kafka.connect.data.Struct
import org.json4s.jackson.Serialization._
import org.scalatest.{FlatSpec, GivenWhenThen}

import scala.collection.mutable
import scala.util.Random

class IotMessageConverterTest extends FlatSpec with GivenWhenThen with JsonSerialization {

  private val random: Random = new Random

  "IotMessage Converter" should "populate right values for kafka message struct fields" in {

    Given("IotMessage object")
    val deviceTemp = DeviceTemperature(100.01, "F")
    val deviceTempStr = write(deviceTemp)

    val sequenceNumber = random.nextLong()
    val correlationId = random.nextString(10)
    val offset = random.nextString(10)
    val enqueuedDate = new SimpleDateFormat("MM/dd/yyyy").parse("12/01/2016")
    val systemProperties = mutable.Map[String, Object](
      "iothub-connection-device-id" → "device10",
      AmqpConstants.SEQUENCE_NUMBER_ANNOTATION_NAME → sequenceNumber.asInstanceOf[Object],
      AmqpConstants.AMQP_PROPERTY_CORRELATION_ID → correlationId,
      AmqpConstants.OFFSET_ANNOTATION_NAME → offset,
      AmqpConstants.ENQUEUED_TIME_UTC_ANNOTATION_NAME → enqueuedDate)

    val timestamp = Instant.now().toString
    val messageProperties = mutable.Map[String, Object](
      "timestamp" → timestamp,
      "contentType" → "temperature"
    )

    val iotMessage = IotMessage(deviceTempStr, systemProperties, messageProperties)

    When("getIotMessageStruct is called with IotMessage object")
    val kafkaMessageStruct: Struct = IotMessageConverter.getIotMessageStruct(iotMessage)

    Then("The struct has all the expected properties")
    assert(kafkaMessageStruct.getString("deviceId") == "device10")
    assert(kafkaMessageStruct.getString("offset") == offset)
    assert(kafkaMessageStruct.getString("contentType") == "temperature")
    assert(kafkaMessageStruct.getString("enqueuedTime") == enqueuedDate.toInstant.toString)
    assert(kafkaMessageStruct.getInt64("sequenceNumber") == sequenceNumber)
    assert(kafkaMessageStruct.getString("content") == deviceTempStr)

    val structSystemProperties = kafkaMessageStruct.getMap[String, String]("systemProperties")
    assert(structSystemProperties != null)
    assert(structSystemProperties.size == 1)
    assert(structSystemProperties.get(AmqpConstants.AMQP_PROPERTY_CORRELATION_ID) == correlationId)

    val structProperties = kafkaMessageStruct.getMap[String, String]("properties")
    assert(structProperties != null)
    assert(structProperties.size == 1)
    assert(structProperties.get("timestamp") == timestamp)
  }

  it should "use default values for missing properties" in {

    val deviceTemp = DeviceTemperature(100.01, "F")
    val deviceTempStr = write(deviceTemp)

    val systemProperties = mutable.Map.empty[String, Object]
    val messageProperties = mutable.Map.empty[String, Object]

    val iotMessage = IotMessage(deviceTempStr, systemProperties, messageProperties)

    When("getIotMessageStruct is called with IotMessage object")
    val kafkaMessageStruct: Struct = IotMessageConverter.getIotMessageStruct(iotMessage)

    Then("The struct has all the expected properties")
    assert(kafkaMessageStruct.getString("deviceId") == "")
    assert(kafkaMessageStruct.getString("offset") == "")
    assert(kafkaMessageStruct.getString("contentType") == "")
    assert(kafkaMessageStruct.getString("enqueuedTime") == "")
    assert(kafkaMessageStruct.getInt64("sequenceNumber") == 0)
    assert(kafkaMessageStruct.getString("content") == deviceTempStr)

    val structSystemProperties = kafkaMessageStruct.getMap[String, String]("systemProperties")
    assert(structSystemProperties != null)
    assert(structSystemProperties.size == 0)

    val structProperties = kafkaMessageStruct.getMap[String, String]("properties")
    assert(structProperties != null)
    assert(structProperties.size == 0)
  }
} 
Example 121
Source File: MockDataReceiver.scala    From toketi-kafka-connect-iothub   with MIT License 5 votes vote down vote up
// Copyright (c) Microsoft. All rights reserved.

package com.microsoft.azure.iot.kafka.connect.source.testhelpers

import java.text.SimpleDateFormat
import java.time.{Duration, Instant}

import com.microsoft.azure.eventhubs.impl.AmqpConstants
import com.microsoft.azure.iot.kafka.connect.source.{DataReceiver, IotMessage, JsonSerialization}
import org.json4s.jackson.Serialization.write

import scala.collection.mutable
import scala.util.Random

class MockDataReceiver(val connectionString: String, val receiverConsumerGroup: String, val partition: String,
    var offset: Option[String], val startTime: Option[Instant], val receiveTimeout: Duration
    ) extends DataReceiver with JsonSerialization {

  private val random: Random = new Random

  override def receiveData(batchSize: Int): Iterable[IotMessage] = {
    val list = scala.collection.mutable.ListBuffer.empty[IotMessage]
    for (i <- 0 until batchSize) {
      list += generateIotMessage(i)
    }
    list
  }

  def generateIotMessage(index: Int): IotMessage = {
    val temp = 70 + random.nextInt(10) + random.nextDouble()
    val deviceTemp = DeviceTemperature(temp, "F")
    val deviceTempStr = write(deviceTemp)

    val systemProperties = mutable.Map[String, Object](
      "iothub-connection-device-id" → s"device$index",
      AmqpConstants.SEQUENCE_NUMBER_ANNOTATION_NAME → index.toLong.asInstanceOf[Object],
      AmqpConstants.AMQP_PROPERTY_CORRELATION_ID → random.nextString(10),
      AmqpConstants.OFFSET_ANNOTATION_NAME → random.nextString(10),
      AmqpConstants.ENQUEUED_TIME_UTC_ANNOTATION_NAME → new SimpleDateFormat("MM/dd/yyyy").parse("12/01/2016"))

    val messageProperties = mutable.Map[String, Object](
      "timestamp" → Instant.now().toString,
      "contentType" → "temperature"
    )

    val iotMessage = IotMessage(deviceTempStr, systemProperties, messageProperties)
    iotMessage
  }

  override def close(): Unit = {}
} 
Example 122
Source File: IotHubSourceTaskTest.scala    From toketi-kafka-connect-iothub   with MIT License 5 votes vote down vote up
// Copyright (c) Microsoft. All rights reserved.

package com.microsoft.azure.iot.kafka.connect.source

import java.time.{Duration, Instant}
import java.util

import com.microsoft.azure.iot.kafka.connect.source.testhelpers.{DeviceTemperature, MockDataReceiver, TestConfig, TestIotHubSourceTask}
import org.apache.kafka.connect.data.Struct
import org.json4s.jackson.Serialization.read
import org.scalatest.{FlatSpec, GivenWhenThen}

class IotHubSourceTaskTest extends FlatSpec with GivenWhenThen with JsonSerialization {

  "IotHubSourceTask poll" should "return a list of SourceRecords with the right format" in {

    Given("IotHubSourceTask instance")

    val iotHubSourceTask = new TestIotHubSourceTask
    iotHubSourceTask.start(TestConfig.sourceTaskTestProps)

    When("IotHubSourceTask.poll is called")
    val sourceRecords = iotHubSourceTask.poll()

    Then("It returns a list of SourceRecords")
    assert(sourceRecords != null)
    assert(sourceRecords.size() == 15)
    for (i <- 0 until 15) {
      val record = sourceRecords.get(i)
      assert(record.topic() == TestConfig.sourceTaskTestProps.get(IotHubSourceConfig.KafkaTopic))
      assert(record.valueSchema() == IotMessageConverter.schema)
      val messageStruct = record.value().asInstanceOf[Struct]
      assert(messageStruct.getString("deviceId").startsWith("device"))
      assert(messageStruct.getString("contentType") == "temperature")
      val enqueuedTime = Instant.parse(messageStruct.getString("enqueuedTime"))
      assert(enqueuedTime.isAfter(Instant.parse("2016-11-20T00:00:00Z")))

      val systemProperties = messageStruct.getMap[String, String]("systemProperties")
      assert(systemProperties != null)
      assert(systemProperties.get("sequenceNumber") != "")
      assert(systemProperties.get("correlationId") != "")

      val properties = messageStruct.getMap[String, String]("properties")
      assert(properties != null)
      assert(properties.get("timestamp") != "")

      val deviceTemperature = read[DeviceTemperature](messageStruct.get("content").asInstanceOf[String])
      assert(deviceTemperature != null)
      assert(deviceTemperature.unit == "F")
      assert(deviceTemperature.value != 0)
    }
  }

  "IotHubSourceTask start" should "initialize all properties" in {

    Given("A list of properties for IotHubSourceTask")
    val props: util.Map[String, String] = TestConfig.sourceTaskTestProps

    When("IotHubSourceTask is started")
    val task = new TestIotHubSourceTask
    task.start(props)

    Then("Data receiver should be properly initialized")
    assert(task.partitionSources.length == 3)
    assert(!task.partitionSources.exists(s => s.dataReceiver == null))
    for (ps ← task.partitionSources) {
      val dataReceiver = ps.dataReceiver.asInstanceOf[MockDataReceiver]
      assert(dataReceiver.offset.isDefined)
      assert(dataReceiver.startTime.isEmpty)
      assert(dataReceiver.connectionString != "")
      assert(dataReceiver.receiverConsumerGroup != "")
      assert(dataReceiver.receiveTimeout == Duration.ofSeconds(5))
    }
  }

  it should "initialize start time correctly on the data receiver when it is passed in the config" in {

    Given("A list of properties with StartTime for IotHubSourceTask")
    val props: util.Map[String, String] = TestConfig.sourceTaskTestPropsStartTime

    When("IotHubSourceTask is started")
    val task = new TestIotHubSourceTask
    task.start(props)

    Then("Data receiver should be properly initialized, with StartTime, while Offsets value should be ignored")
    assert(task.partitionSources.length == 3)
    assert(!task.partitionSources.exists(s => s.dataReceiver == null))
    for (ps ← task.partitionSources) {
      val dataReceiver = ps.dataReceiver.asInstanceOf[MockDataReceiver]
      assert(dataReceiver.offset.isEmpty)
      assert(dataReceiver.startTime.isDefined)
      assert(dataReceiver.startTime.get == Instant.parse("2016-12-10T00:00:00Z"))
      assert(dataReceiver.connectionString != "")
      assert(dataReceiver.receiverConsumerGroup != "")
    }
  }
} 
Example 123
Source File: ProducerRecord.scala    From skafka   with MIT License 5 votes vote down vote up
package com.evolutiongaming.skafka.producer

import java.time.Instant

import cats.effect.Sync
import cats.implicits._
import com.evolutiongaming.skafka._

final case class ProducerRecord[+K, +V](
  topic: Topic,
  value: Option[V] = None,
  key: Option[K] = None,
  partition: Option[Partition] = None,
  timestamp: Option[Instant] = None,
  headers: List[Header] = Nil)

object ProducerRecord {

  def apply[K, V](topic: Topic, value: V, key: K): ProducerRecord[K, V] = {
    ProducerRecord(topic = topic, value = Some(value), key = Some(key))
  }


  implicit class ProducerRecordOps[K, V](val self: ProducerRecord[K, V]) extends AnyVal {

    def toBytes[F[_] : Sync](implicit
      toBytesK: ToBytes[F, K],
      toBytesV: ToBytes[F, V]
    ): F[ProducerRecord[Bytes, Bytes]] = {
      val topic = self.topic

      for {
        k <- self.key.traverse { toBytesK(_, topic) }
        v <- self.value.traverse { toBytesV(_, topic) }
      } yield {
        self.copy(value = v, key = k)
      }
    }
  }
} 
Example 124
Source File: ProducerConverters.scala    From skafka   with MIT License 5 votes vote down vote up
package com.evolutiongaming.skafka.producer

import java.time.Instant

import cats.implicits._
import com.evolutiongaming.catshelper.{ApplicativeThrowable, MonadThrowable}
import com.evolutiongaming.skafka.Converters._
import com.evolutiongaming.skafka.{Offset, Partition, TopicPartition}
import org.apache.kafka.clients.producer.{ProducerRecord => ProducerRecordJ, RecordMetadata => RecordMetadataJ}
import org.apache.kafka.common.record.RecordBatch
import org.apache.kafka.common.requests.ProduceResponse

import scala.jdk.CollectionConverters._

object ProducerConverters {

  implicit class ProducerRecordOps[K, V](val self: ProducerRecord[K, V]) extends AnyVal {

    def asJava: ProducerRecordJ[K, V] = {
      new ProducerRecordJ[K, V](
        self.topic,
        self.partition.fold[java.lang.Integer](null) { a => java.lang.Integer.valueOf(a.value) },
        self.timestamp.fold[java.lang.Long](null) { timestamp => timestamp.toEpochMilli },
        self.key.getOrElse(null.asInstanceOf[K]),
        self.value.getOrElse(null.asInstanceOf[V]),
        self.headers.map { _.asJava }.asJava)
    }
  }


  implicit class JProducerRecordOps[K, V](val self: ProducerRecordJ[K, V]) extends AnyVal {

    def asScala[F[_] : ApplicativeThrowable]: F[ProducerRecord[K, V]] = {

      Option(self.partition)
        .traverse { partition => Partition.of[F](partition.intValue()) }
        .map { partition =>
          ProducerRecord(
            topic = self.topic,
            value = Option(self.value),
            key = Option(self.key),
            partition = partition,
            timestamp = Option(self.timestamp) map { Instant.ofEpochMilli(_) },
            headers = self.headers.asScala.map { _.asScala }.toList)
        }
    }
  }


  implicit class JRecordMetadataOps(val self: RecordMetadataJ) extends AnyVal {

    def asScala[F[_] : MonadThrowable]: F[RecordMetadata] = {
      for {
        partition <- Partition.of[F](self.partition())
        offset    <- (self.offset noneIf ProduceResponse.INVALID_OFFSET).traverse { Offset.of[F] }
      } yield {
        RecordMetadata(
          topicPartition = TopicPartition(self.topic, partition),
          timestamp = (self.timestamp noneIf RecordBatch.NO_TIMESTAMP).map(Instant.ofEpochMilli),
          offset = offset,
          keySerializedSize = self.serializedKeySize noneIf -1,
          valueSerializedSize = self.serializedValueSize noneIf -1)
      }
    }
  }


  implicit class RecordMetadataOps(val self: RecordMetadata) extends AnyVal {

    def asJava: RecordMetadataJ = {
      new RecordMetadataJ(
        self.topicPartition.asJava,
        0,
        self.offset.fold(ProduceResponse.INVALID_OFFSET) { _.value },
        self.timestamp.fold(RecordBatch.NO_TIMESTAMP)(_.toEpochMilli),
        null,
        self.keySerializedSize getOrElse -1,
        self.valueSerializedSize getOrElse -1)
    }
  }
} 
Example 125
Source File: ProducerConvertersSpec.scala    From skafka   with MIT License 5 votes vote down vote up
package com.evolutiongaming.skafka.producer

import java.time.Instant
import java.time.temporal.ChronoUnit

import cats.implicits._
import com.evolutiongaming.skafka.producer.ProducerConverters._
import com.evolutiongaming.skafka.{Header, Offset, Partition, TopicPartition}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.util.Try

class ProducerConvertersSpec extends AnyWordSpec with Matchers {

  "ProducerConverters" should {

    val instant = Instant.now().truncatedTo(ChronoUnit.MILLIS)

    "convert Producer.Record" in {
      val record1 = ProducerRecord[Int, String](topic = "topic", value = Some("value"))
      record1.asJava.asScala[Try] shouldEqual record1.pure[Try]

      val record2 = ProducerRecord[Int, String](
        topic = "topic",
        value = Some("value"),
        key = Some(1),
        partition = Some(Partition.min),
        timestamp = Some(instant),
        headers = List(Header("key", Array[Byte](1, 2, 3))))
      record2.asJava.asScala[Try] shouldEqual record2.pure[Try]
    }

    "convert RecordMetadata" in {
      val topicPartition = TopicPartition("topic", Partition.min)
      val metadata1 = RecordMetadata(topicPartition)
      metadata1.pure[Try] shouldEqual metadata1.asJava.asScala[Try]

      val metadata2 = RecordMetadata(topicPartition, Some(instant), Offset.min.some, 10.some, 100.some)
      metadata2.pure[Try] shouldEqual metadata2.asJava.asScala[Try]
    }
  }
} 
Example 126
Source File: ConsumerConvertersSpec.scala    From skafka   with MIT License 5 votes vote down vote up
package com.evolutiongaming.skafka.consumer

import java.time.Instant
import java.time.temporal.ChronoUnit

import cats.implicits._
import com.evolutiongaming.skafka._
import com.evolutiongaming.skafka.Converters._
import com.evolutiongaming.skafka.consumer.ConsumerConverters._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.util.Try


class ConsumerConvertersSpec extends AnyWordSpec with Matchers {

  val instant: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS)

  "ConsumerConverters" should {

    "convert OffsetAndMetadata" in {
      val value = OffsetAndMetadata(Offset.min, "metadata")
      value.pure[Try] shouldEqual value.asJava.asScala[Try]
    }

    "convert OffsetAndTimestamp" in {
      val value = OffsetAndTimestamp(Offset.min, instant)
      value.pure[Try] shouldEqual value.asJava.asScala[Try]
    }

    for {
      timestampAndType <- List(
        None,
        Some(TimestampAndType(instant, TimestampType.Create)),
        Some(TimestampAndType(instant, TimestampType.Append)))
      key <- List(Some(WithSize("key", 1)), None)
      value <- List(Some(WithSize("value", 1)), None)
    } {
      s"convert ConsumerRecord, key: $key, value: $value, timestampAndType: $timestampAndType" in {
        val consumerRecord = ConsumerRecord(
          topicPartition = TopicPartition("topic", Partition.min),
          offset = Offset.min,
          timestampAndType = timestampAndType,
          key = key,
          value = value,
          headers = List(Header("key", Bytes.empty)))
        consumerRecord.pure[Try] shouldEqual consumerRecord.asJava.asScala[Try]
      }
    }
  }
} 
Example 127
Source File: ApplicationTimer.scala    From Full-Stack-Scala-Starter   with Apache License 2.0 5 votes vote down vote up
package services

import java.time.{Clock, Instant}
import javax.inject._
import play.api.Logger
import play.api.inject.ApplicationLifecycle
import scala.concurrent.Future


@Singleton
class ApplicationTimer @Inject() (clock: Clock, appLifecycle: ApplicationLifecycle) {

  // This code is called when the application starts.
  private val start: Instant = clock.instant
  Logger.info(s"ApplicationTimer demo: Starting application at $start.")

  // When the application starts, register a stop hook with the
  // ApplicationLifecycle object. The code inside the stop hook will
  // be run when the application stops.
  appLifecycle.addStopHook { () =>
    val stop: Instant = clock.instant
    val runningTime: Long = stop.getEpochSecond - start.getEpochSecond
    Logger.info(s"ApplicationTimer demo: Stopping application at ${clock.instant} after ${runningTime}s.")
    Future.successful(())
  }
} 
Example 128
Source File: Encodings.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.cassandra.encoding

import java.time.{ Instant, LocalDate, ZonedDateTime, ZoneId }
import java.util.Date

import com.datastax.driver.core.{ LocalDate => CasLocalDate }
import io.getquill.context.cassandra.CassandraContext

trait Encodings extends CassandraMapperConversions with CassandraTypes {
  this: CassandraContext[_] =>

  protected val zoneId = ZoneId.systemDefault

  implicit val encodeJava8LocalDate: MappedEncoding[LocalDate, CasLocalDate] = MappedEncoding(ld =>
    CasLocalDate.fromYearMonthDay(ld.getYear, ld.getMonthValue, ld.getDayOfMonth))
  implicit val decodeJava8LocalDate: MappedEncoding[CasLocalDate, LocalDate] = MappedEncoding(ld =>
    LocalDate.of(ld.getYear, ld.getMonth, ld.getDay))

  implicit val encodeJava8Instant: MappedEncoding[Instant, Date] = MappedEncoding(Date.from)
  implicit val decodeJava8Instant: MappedEncoding[Date, Instant] = MappedEncoding(_.toInstant)

  implicit val encodeJava8ZonedDateTime: MappedEncoding[ZonedDateTime, Date] = MappedEncoding(zdt =>
    Date.from(zdt.toInstant))
  implicit val decodeJava8ZonedDateTime: MappedEncoding[Date, ZonedDateTime] = MappedEncoding(d =>
    ZonedDateTime.ofInstant(d.toInstant, zoneId))
} 
Example 129
Source File: DiffIgnoreIntTest.scala    From diffx   with Apache License 2.0 5 votes vote down vote up
package com.softwaremill.diffx

import java.time.Instant

import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers

class DiffIgnoreIntTest extends AnyFlatSpec with Matchers {
  val instant: Instant = Instant.now()
  val p1 = Person("p1", 22, instant)
  val p2 = Person("p2", 11, instant)

  it should "allow importing and exporting implicits" in {
    implicit val d: Diff[Person] = Derived[Diff[Person]].value.ignore(_.name)
    compare(p1, p2) shouldBe DiffResultObject(
      "Person",
      Map("name" -> Identical("p1"), "age" -> DiffResultValue(22, 11), "in" -> Identical(instant))
    )
  }

  it should "allow importing and exporting implicits using macro on derived instance" in {
    implicit val d: Diff[Person] = Derived[Diff[Person]].ignore(_.name)
    compare(p1, p2) shouldBe DiffResultObject(
      "Person",
      Map("name" -> Identical("p1"), "age" -> DiffResultValue(22, 11), "in" -> Identical(instant))
    )
  }

  it should "allow calling ignore multiple times" in {
    implicit val d: Diff[Person] = Derived[Diff[Person]].ignore[Person, String](_.name).ignore[Person, Int](_.age)
    compare(p1, p2) shouldBe Identical(p1)
  }
} 
Example 130
Source File: DiscoveryUtilsSpec.scala    From odsc-west-streaming-trends   with GNU General Public License v3.0 5 votes vote down vote up
package com.twilio.open.streaming.trend.discovery

import java.time.Instant
import java.time.format.DateTimeFormatter

import com.twilio.open.streaming.trend.discovery.protocol.{CallEvent, Dimensions}
import org.scalatest.{FlatSpec, Matchers}

class DiscoveryUtilsSpec extends FlatSpec with Matchers {

  // example using java serialization with case class
  "DiscoveryUtils" should " serialize and deserialize a CallEvent object" in {
    val eventTime = Instant.from(DateTimeFormatter.ISO_DATE_TIME.parse("2018-03-08T18:00:00Z"))
    val loggedTime = eventTime.plusSeconds(34)
    //eventTime: Long, loggedTime: Long, eventId: String, eventType: String,dimensions: Dimensions, signalingEvent: Option[SignalingEvent]
    //case class Dimensions(country: Option[String], continent: Option[String], carrier: Option[String],direction: Option[String])
    val ce = CallEvent(eventTime.toEpochMilli, loggedTime.toEpochMilli, "uuid1", "signaling", Dimensions(
      country = Some("us"),
      continent = Some("na"),
      carrier = Some("verizon"),
      direction = Some("inbound")
    ), None)

    val ceSer = DiscoveryUtils.serialize(ce)
    val ceDeser = DiscoveryUtils.deserialize[CallEvent](ceSer)
    ce.equals(ceDeser)
  }
} 
Example 131
Source File: StateInferencer.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.sessionmanager.service.sessionspawner

import java.time.Instant

import com.google.inject.Inject
import com.google.inject.name.Named
import org.apache.spark.launcher.SparkAppHandle

import ai.deepsense.sessionmanager.service.Status
import ai.deepsense.sessionmanager.service.Status.Status

trait StateInferencer {
  def handleHeartbeat(currentTime: Instant): StateInferencer
  def statusForApi(currentTime: Instant, sparkState: SparkAppHandle.State): Status
}

// Factory is needed because of config and guice
class StateInferencerFactory @Inject()(
    @Named("session-manager.executor-first-heartbeat-timeout") val firstHeartbeatTimeout: Int,
    @Named("session-manager.heartbeat-maximum-delay") val heartbeatMaximumDelay: Int
) {

  def newInferencer(startTime: Instant): StateInferencer =
    StateInferencerForClientMode(startTime, None)

  private case class StateInferencerForClientMode(
      startTime: Instant, lastHeartbeat: Option[Instant]
  ) extends StateInferencer {

    def handleHeartbeat(currentTime: Instant): StateInferencer = this.copy(
      lastHeartbeat = Some(currentTime)
    )

    def statusForApi(currentTime: Instant, sparkState: SparkAppHandle.State): Status = {
      val executorIsNotRunning = sparkState.isFinal

      if(executorIsNotRunning) {
        Status.Error
      } else {
        lastHeartbeat match {
          case None => statusBeforeFirstHeartbeat(currentTime)
          case Some(lastHeartbeatTime) => statusAfterFirstHeartbeat(currentTime, lastHeartbeatTime)
        }
      }
    }

    private def statusAfterFirstHeartbeat(currentTime: Instant, lastHeartbeatTime: Instant) = {
      val secondsFromLastHeartbeat = secondsBetween(currentTime, lastHeartbeatTime)
      if (secondsFromLastHeartbeat < heartbeatMaximumDelay) {
        Status.Running
      } else {
        Status.Error
      }
    }

    private def statusBeforeFirstHeartbeat(currentTime: Instant) = {
      val secondsFromStart = secondsBetween(currentTime, startTime)
      if (secondsFromStart < firstHeartbeatTimeout) {
        Status.Creating
      } else {
        Status.Error
      }
    }

    private def secondsBetween(a: Instant, b: Instant) =
      java.time.Duration.between(a, b).abs().getSeconds.toInt

  }

} 
Example 132
Source File: SparkLauncherSessionSpawner.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.sessionmanager.service.sessionspawner.sparklauncher

import java.time.Instant

import scalaz.Scalaz._
import scalaz.Validation.FlatMap._
import scalaz._

import com.google.inject.Inject

import ai.deepsense.commons.models.ClusterDetails
import ai.deepsense.commons.utils.Logging
import ai.deepsense.sessionmanager.service.sessionspawner.sparklauncher.clusters.SeahorseSparkLauncher
import ai.deepsense.sessionmanager.service.sessionspawner.sparklauncher.executor.SessionExecutorArgs
import ai.deepsense.sessionmanager.service.sessionspawner.sparklauncher.outputintercepting.OutputInterceptorFactory
import ai.deepsense.sessionmanager.service.sessionspawner.sparklauncher.spark.LoggingSparkAppListener
import ai.deepsense.sessionmanager.service.sessionspawner.{ExecutorSession, SessionConfig, SessionSpawner, StateInferencerFactory}

class SparkLauncherSessionSpawner @Inject()(
  private val sparkLauncherConfig: SparkLauncherConfig,
  private val outputInterceptorFactory: OutputInterceptorFactory,
  private val stateInferencerFactory: StateInferencerFactory
) extends SessionSpawner with Logging {

  override def createSession(
      sessionConfig: SessionConfig,
      clusterDetails: ClusterDetails): ExecutorSession = {
    logger.info(s"Creating session for workflow ${sessionConfig.workflowId}")

    val interceptorHandle = outputInterceptorFactory.prepareInterceptorWritingToFiles(
      clusterDetails
    )

    val applicationArgs = SessionExecutorArgs(sessionConfig, sparkLauncherConfig)
    val startedSession = for {
      launcher <- SeahorseSparkLauncher(applicationArgs, sparkLauncherConfig, clusterDetails)
      listener = new LoggingSparkAppListener()
      handle <- handleUnexpectedExceptions {
        interceptorHandle.attachTo(launcher)
        launcher.startApplication(listener)
      }
      stateInferencer = stateInferencerFactory.newInferencer(Instant.now())
      executorSession = ExecutorSession(
        sessionConfig, clusterDetails, Some(handle), stateInferencer, interceptorHandle
      )
    } yield executorSession

    startedSession.fold(error => {
      interceptorHandle.writeOutput(error.getMessage)
      val stateInferencer = stateInferencerFactory.newInferencer(Instant.now())
      ExecutorSession(sessionConfig, clusterDetails, None, stateInferencer, interceptorHandle)
    }, identity)
  }

  private def handleUnexpectedExceptions[T, E <: SparkLauncherError]
      (code: => T): Validation[UnexpectedException, T] =
    try {
      code.success
    } catch {
      case ex: Exception => UnexpectedException(ex).failure
    }

} 
Example 133
Source File: ExecutorSession.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.sessionmanager.service.sessionspawner

import java.time.Instant

import org.apache.spark.launcher.SparkAppHandle

import ai.deepsense.commons.models.ClusterDetails
import ai.deepsense.sessionmanager.service.{Session, Status}
import ai.deepsense.sessionmanager.service.sessionspawner.sparklauncher.outputintercepting.OutputInterceptorHandle

case class ExecutorSession(
    sessionConfig: SessionConfig,
    clusterDetails: ClusterDetails,
    private val sparkAppHandleOpt: Option[SparkAppHandle],
    private val state: StateInferencer,
    private val outputInterceptorHandle: OutputInterceptorHandle) {

  def sessionForApi(): Session = {
    val status = sparkAppHandleOpt match {
      case None => Status.Error // no working spark process at all
      case Some(sparkAppHandle) => state.statusForApi(Instant.now(), sparkAppHandle.getState)
    }
    Session(
      sessionConfig.workflowId,
      status,
      clusterDetails
    )
  }

  def handleHeartbeat(): ExecutorSession = this.copy(
    state = state.handleHeartbeat(Instant.now())
  )

  def kill(): Unit = {
    sparkAppHandleOpt.foreach(_.kill())
    outputInterceptorHandle.close()
  }

} 
Example 134
Source File: E2ESpec.scala    From sqs-kafka-connect   with Apache License 2.0 5 votes vote down vote up
package com.hivehome.kafka.connect.sqs

import java.time.Instant

import org.scalatest.{FunSuite, Matchers}
import org.slf4j.LoggerFactory

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future


class E2ESpec extends FunSuite with Matchers with SQSSupport {
  val logger = LoggerFactory.getLogger(getClass.getName)
  private val KafkaTopic: String = "connect-test"
  override val queueName = "test-sqs" // kafka connect should be setup with this SQS
  queueUrl = sqs.getQueueUrl(queueName).getQueueUrl

  private val props = Map(
    "bootstrap.servers" -> sys.env.getOrElse("KAFKA", "localhost:9092"),
    "schema.registry.url" -> sys.env.getOrElse("SCHEMA_REGISTRY", "http://localhost:8081"))

  val consumer = KafkaAvroConsumer[String, String](props, topicName = KafkaTopic)

  // Test is ignored because it does not run without dependent services
  ignore("should route message SQS -> Kafka") {
    Future {
      // sleep is required so that the message to SQS
      // is sent after the consumer is listening on the kafka topic
      Thread.sleep(500)
      logger.debug("sending message..")
      sendMessage(Instant.now().toString)
      logger.debug("sent message..")
    }

    val msgs = consumer.poll(1, accept = _ => true)

    msgs should have size 1
  }
} 
Example 135
Source File: TimePeriod.scala    From TransmogrifAI   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.salesforce.op.stages.impl.feature

import java.time.temporal.WeekFields
import java.time.{Instant, LocalDateTime, ZoneId}

import com.salesforce.op.utils.date.DateTimeUtils
import enumeratum.{Enum, EnumEntry}

case class TimePeriodVal(value: Int, min: Int, max: Int)

sealed abstract class TimePeriod(extractFn: LocalDateTime => TimePeriodVal) extends EnumEntry with Serializable {
  def extractTimePeriodVal(millis: Long): TimePeriodVal = extractFn(
    Instant
      .ofEpochMilli(millis)
      .atZone(ZoneId.of(DateTimeUtils.DefaultTimeZone.toString)).toLocalDateTime)

  def extractIntFromMillis(millis: Long): Int = extractTimePeriodVal(millis).value
}

object TimePeriod extends Enum[TimePeriod] {
  @transient val weekFields = WeekFields.of(java.time.DayOfWeek.MONDAY, 1)

  val values: Seq[TimePeriod] = findValues
  case object DayOfMonth extends TimePeriod(dt => TimePeriodVal(dt.getDayOfMonth, 1, 31))
  case object DayOfWeek extends TimePeriod(dt => TimePeriodVal(dt.getDayOfWeek.getValue, 1, 7))
  case object DayOfYear extends TimePeriod(dt => TimePeriodVal(dt.getDayOfYear, 1, 366))
  case object HourOfDay extends TimePeriod(dt => TimePeriodVal(dt.getHour, 0, 24))
  case object MonthOfYear extends TimePeriod(dt => TimePeriodVal(dt.getMonthValue, 1, 12))
  case object WeekOfMonth extends TimePeriod(dt => TimePeriodVal(dt.get(weekFields.weekOfMonth()), 1, 6))
  case object WeekOfYear extends TimePeriod(dt => TimePeriodVal(dt.get(weekFields.weekOfYear()), 1, 53))
} 
Example 136
Source File: ResourceAccessControlList.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.client.types

import java.time.Instant

import ch.epfl.bluebrain.nexus.iam.client.config.Contexts._
import ch.epfl.bluebrain.nexus.iam.client.config.IamClientConfig
import ch.epfl.bluebrain.nexus.iam.client.config.Vocabulary._
import ch.epfl.bluebrain.nexus.iam.client.types.Identity.Subject
import ch.epfl.bluebrain.nexus.rdf.Iri
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri
import ch.epfl.bluebrain.nexus.rdf.implicits._
import io.circe._
import io.circe.syntax._


final case class ResourceAccessControlList(
    id: AbsoluteIri,
    rev: Long,
    types: Set[AbsoluteIri],
    createdAt: Instant,
    createdBy: Subject,
    updatedAt: Instant,
    updatedBy: Subject,
    value: AccessControlList
)
object ResourceAccessControlList {

  implicit def resourceAccessControlListEncoder(implicit config: IamClientConfig): Encoder[ResourceAccessControlList] =
    Encoder.encodeJson.contramap {
      case ResourceAccessControlList(id, rev, types, createdAt, createdBy, updatedAt, updatedBy, acl) =>
        val jsonTypes = types.toList match {
          case Nil      => Json.Null
          case t :: Nil => Json.fromString(t.lastSegment.getOrElse(t.asString))
          case _        => Json.arr(types.map(t => Json.fromString(t.lastSegment.getOrElse(t.asString))).toSeq: _*)
        }
        Json
          .obj(
            "@id"                -> id.asJson,
            "@type"              -> jsonTypes,
            nxv.rev.prefix       -> Json.fromLong(rev),
            nxv.createdBy.prefix -> createdBy.id.asJson,
            nxv.updatedBy.prefix -> updatedBy.id.asJson,
            nxv.createdAt.prefix -> Json.fromString(createdAt.toString),
            nxv.updatedAt.prefix -> Json.fromString(updatedAt.toString)
          )
          .addContext(resourceCtxUri)
          .addContext(iamCtxUri) deepMerge acl.asJson
    }

  implicit def resourceAccessControlListDecoder: Decoder[ResourceAccessControlList] =
    Decoder.instance { hc =>
      def toSubject(id: AbsoluteIri): Decoder.Result[Subject] =
        Identity(id)
          .collect { case s: Subject => s }
          .toRight(DecodingFailure(s"wrong subject with id '${id.asString}'", hc.history))
      def decodeTypes(cursor: HCursor): Decoder.Result[Set[AbsoluteIri]] =
        cursor
          .get[Set[String]]("@type")
          .orElse(cursor.get[String]("@type").map(str => Set(str)))
          .orElse(Right(Set.empty))
          .map(types => types.map(tpe => Iri.absolute(tpe).getOrElse(nxv.base + tpe)))
      for {
        id        <- hc.get[AbsoluteIri]("@id")
        types     <- decodeTypes(hc)
        rev       <- hc.get[Long](nxv.rev.prefix)
        createdBy <- hc.get[AbsoluteIri](nxv.createdBy.prefix).flatMap(toSubject)
        updatedBy <- hc.get[AbsoluteIri](nxv.updatedBy.prefix).flatMap(toSubject)
        createdAt <- hc.get[Instant](nxv.createdAt.prefix)
        updatedAt <- hc.get[Instant](nxv.updatedAt.prefix)
        acl       <- hc.value.as[AccessControlList]
      } yield ResourceAccessControlList(id, rev, types, createdAt, createdBy, updatedAt, updatedBy, acl)
    }

  private[ResourceAccessControlList] implicit class AbsoluteIriSyntax(private val iri: AbsoluteIri) extends AnyVal {
    def lastSegment: Option[String] =
      iri.path.head match {
        case segment: String => Some(segment)
        case _               => None
      }
  }
} 
Example 137
Source File: PermissionsEvent.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.permissions

import java.time.Instant

import ch.epfl.bluebrain.nexus.iam.config.AppConfig.HttpConfig
import ch.epfl.bluebrain.nexus.iam.config.Contexts._
import ch.epfl.bluebrain.nexus.iam.types.Identity.Subject
import ch.epfl.bluebrain.nexus.iam.types.{Identity, Permission}
import ch.epfl.bluebrain.nexus.rdf.implicits._
import com.github.ghik.silencer.silent
import io.circe.Encoder
import io.circe.generic.extras.Configuration


  final case class PermissionsDeleted(
      rev: Long,
      instant: Instant,
      subject: Subject
  ) extends PermissionsEvent

  object JsonLd {
    import io.circe.generic.extras.semiauto._

    @silent // defined implicits are not recognized as being used
    implicit def permissionsEventEncoder(implicit http: HttpConfig): Encoder[Event] = {
      implicit val config: Configuration = Configuration.default
        .withDiscriminator("@type")
        .copy(transformMemberNames = {
          case "rev"     => "_rev"
          case "instant" => "_instant"
          case "subject" => "_subject"
          case other     => other
        })
      implicit val subjectEncoder: Encoder[Subject] = Identity.subjectIdEncoder
      deriveConfiguredEncoder[Event]
        .mapJson { json =>
          json
            .addContext(iamCtxUri)
            .addContext(resourceCtxUri)
        }
    }
  }
} 
Example 138
Source File: PermissionsState.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.permissions

import java.time.Instant

import ch.epfl.bluebrain.nexus.iam.config.AppConfig.{HttpConfig, PermissionsConfig}
import ch.epfl.bluebrain.nexus.iam.permissions.PermissionsState.{Current, Initial}
import ch.epfl.bluebrain.nexus.iam.types.Identity.{Anonymous, Subject}
import ch.epfl.bluebrain.nexus.iam.types.{Permission, ResourceMetadata}
import com.github.ghik.silencer.silent


  final case class Current(
      rev: Long,
      permissions: Set[Permission],
      createdAt: Instant,
      createdBy: Subject,
      updatedAt: Instant,
      updatedBy: Subject
  ) extends PermissionsState {

    override def resource(implicit http: HttpConfig, @silent pc: PermissionsConfig): Resource =
      resourceMetadata.map(_ => permissions)

    override def resourceMetadata(implicit http: HttpConfig): ResourceMetadata =
      ResourceMetadata(id, rev, types, createdAt, createdBy, updatedAt, updatedBy)
  }
} 
Example 139
Source File: AclEvent.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.acls

import java.time.Instant

import ch.epfl.bluebrain.nexus.iam.config.AppConfig.HttpConfig
import ch.epfl.bluebrain.nexus.iam.config.Contexts._
import ch.epfl.bluebrain.nexus.iam.types.Identity
import ch.epfl.bluebrain.nexus.iam.types.Identity.Subject
import ch.epfl.bluebrain.nexus.rdf.Iri.Path
import ch.epfl.bluebrain.nexus.rdf.implicits._
import com.github.ghik.silencer.silent
import io.circe.Encoder
import io.circe.generic.extras.Configuration


  final case class AclDeleted(
      path: Path,
      rev: Long,
      instant: Instant,
      subject: Subject
  ) extends AclEvent

  object JsonLd {
    import io.circe.generic.extras.semiauto._

    @silent // defined implicits are not recognized as being used
    implicit def aclEventEncoder(implicit httpConfig: HttpConfig): Encoder[AclEvent] = {
      implicit val config: Configuration = Configuration.default
        .withDiscriminator("@type")
        .copy(transformMemberNames = {
          case "rev"     => "_rev"
          case "instant" => "_instant"
          case "subject" => "_subject"
          case "path"    => "_path"
          case other     => other
        })
      implicit val arrayEncoder: Encoder[AccessControlList] = AccessControlList.aclArrayEncoder
      implicit val subjectEncoder: Encoder[Subject]         = Identity.subjectIdEncoder
      deriveConfiguredEncoder[AclEvent]
        .mapJson { json =>
          json
            .addContext(iamCtxUri)
            .addContext(resourceCtxUri)
        }
    }
  }
} 
Example 140
Source File: package.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam

import java.time.Instant

import ch.epfl.bluebrain.nexus.iam.config.AppConfig.{HttpConfig, PermissionsConfig}
import ch.epfl.bluebrain.nexus.iam.config.Vocabulary._
import ch.epfl.bluebrain.nexus.iam.types.Identity.Anonymous
import ch.epfl.bluebrain.nexus.iam.types.{Permission, ResourceF, ResourceMetadata}
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri
import ch.epfl.bluebrain.nexus.sourcing.Aggregate

package object acls {

  
  def defaultResourceOnSlash(implicit http: HttpConfig, pc: PermissionsConfig): Resource =
    ResourceF(
      http.aclsIri + "/",
      0L,
      types,
      Instant.EPOCH,
      Anonymous,
      Instant.EPOCH,
      Anonymous,
      AccessControlList(Anonymous -> pc.minimum)
    )
} 
Example 141
Source File: ResourceMetadata.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.types

import java.time.Instant

import ch.epfl.bluebrain.nexus.iam.types.Identity.Subject
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri

object ResourceMetadata {

  
  def apply(
      id: AbsoluteIri,
      rev: Long,
      types: Set[AbsoluteIri],
      createdAt: Instant,
      createdBy: Subject,
      updatedAt: Instant,
      updatedBy: Subject
  ): ResourceMetadata =
    ResourceF.unit(id, rev, types, createdAt, createdBy, updatedAt, updatedBy)
} 
Example 142
Source File: ResourceF.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.types

import java.time.Instant

import ch.epfl.bluebrain.nexus.iam.config.AppConfig.HttpConfig
import ch.epfl.bluebrain.nexus.iam.config.Contexts._
import ch.epfl.bluebrain.nexus.iam.config.Vocabulary.nxv
import ch.epfl.bluebrain.nexus.iam.syntax._
import ch.epfl.bluebrain.nexus.iam.types.Identity.Subject
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri
import ch.epfl.bluebrain.nexus.rdf.implicits._
import io.circe.syntax._
import io.circe.{Encoder, Json}


  def unit(
      id: AbsoluteIri,
      rev: Long,
      types: Set[AbsoluteIri],
      createdAt: Instant,
      createdBy: Subject,
      updatedAt: Instant,
      updatedBy: Subject
  ): ResourceF[Unit] =
    ResourceF(id, rev, types, createdAt, createdBy, updatedAt, updatedBy, ())

  implicit val permsEncoder: Encoder[Set[Permission]] =
    Encoder.instance(perms => Json.obj("permissions" -> Json.fromValues(perms.toList.sortBy(_.value).map(_.asJson))))

  implicit def resourceFEncoder[A: Encoder](implicit http: HttpConfig): Encoder[ResourceF[A]] =
    Encoder.encodeJson.contramap { r =>
      resourceMetaEncoder.apply(r.discard) deepMerge r.value.asJson
    }

  implicit def resourceMetaEncoder(implicit http: HttpConfig): Encoder[ResourceMetadata] =
    Encoder.encodeJson.contramap {
      case ResourceF(id, rev, types, createdAt, createdBy, updatedAt, updatedBy, _: Unit) =>
        val jsonTypes = types.toList match {
          case Nil      => Json.Null
          case t :: Nil => Json.fromString(t.lastSegment.getOrElse(t.asString))
          case _        => Json.arr(types.map(t => Json.fromString(t.lastSegment.getOrElse(t.asString))).toSeq: _*)
        }
        Json
          .obj(
            "@id"                -> id.asJson,
            "@type"              -> jsonTypes,
            nxv.rev.prefix       -> Json.fromLong(rev),
            nxv.createdBy.prefix -> createdBy.id.asJson,
            nxv.updatedBy.prefix -> updatedBy.id.asJson,
            nxv.createdAt.prefix -> Json.fromString(createdAt.toString),
            nxv.updatedAt.prefix -> Json.fromString(updatedAt.toString)
          )
          .addContext(iamCtxUri)
          .addContext(resourceCtxUri)
    }
} 
Example 143
Source File: ResourceFSpec.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.types

import java.time.{Clock, Instant, ZoneId}

import ch.epfl.bluebrain.nexus.commons.test.{EitherValues, Resources}
import ch.epfl.bluebrain.nexus.iam.config.AppConfig.HttpConfig
import ch.epfl.bluebrain.nexus.iam.config.Vocabulary._
import ch.epfl.bluebrain.nexus.iam.testsyntax._
import ch.epfl.bluebrain.nexus.iam.types.Identity.User
import ch.epfl.bluebrain.nexus.rdf.implicits._
import io.circe.Printer
import io.circe.syntax._
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

//noinspection TypeAnnotation
class ResourceFSpec extends AnyWordSpecLike with Matchers with Inspectors with EitherValues with Resources {

  "A ResourceMetadata" should {
    val user          = User("mysubject", "myrealm")
    val user2         = User("mysubject2", "myrealm")
    implicit val http = HttpConfig("some", 8080, "v1", "http://nexus.example.com")
    val clock: Clock  = Clock.fixed(Instant.ofEpochSecond(3600), ZoneId.systemDefault())
    val instant       = clock.instant()
    val id            = url"http://example.com/id"
    val printer       = Printer.spaces2.copy(dropNullValues = true)

    "be converted to Json correctly" when {
      "using multiple types" in {
        val json  = jsonContentOf("/resources/write-response.json")
        val model = ResourceMetadata(id, 1L, Set(nxv.AccessControlList, nxv.Realm), instant, user, instant, user2)
        model.asJson.sort.printWith(printer) shouldEqual json.printWith(printer)
      }
      "using a single type" in {
        val json  = jsonContentOf("/resources/write-response-singletype.json")
        val model = ResourceMetadata(id, 1L, Set(nxv.AccessControlList), instant, user, instant, user2)
        model.asJson.sort.printWith(printer) shouldEqual json.printWith(printer)
      }
      "using no types" in {
        val json  = jsonContentOf("/resources/write-response-notypes.json")
        val model = ResourceMetadata(id, 1L, Set(), instant, user, instant, user2)
        model.asJson.sort.printWith(printer) shouldEqual json.printWith(printer)
      }
    }
  }
} 
Example 144
Source File: TaggingAdapterSpec.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.io

import java.time.Instant

import akka.persistence.journal.Tagged
import ch.epfl.bluebrain.nexus.commons.test.EitherValues
import ch.epfl.bluebrain.nexus.iam.acls.AclEvent.AclDeleted
import ch.epfl.bluebrain.nexus.iam.permissions.PermissionsEvent.PermissionsDeleted
import ch.epfl.bluebrain.nexus.iam.realms.RealmEvent.RealmDeprecated
import ch.epfl.bluebrain.nexus.iam.types.Identity.Anonymous
import ch.epfl.bluebrain.nexus.iam.types.Label
import ch.epfl.bluebrain.nexus.rdf.Iri.Path
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class TaggingAdapterSpec extends AnyWordSpecLike with Matchers with Inspectors with EitherValues {

  private val pd = PermissionsDeleted(2L, Instant.EPOCH, Anonymous)
  private val ad = AclDeleted(Path("/a/b/c").rightValue, 2L, Instant.EPOCH, Anonymous)
  private val rd = RealmDeprecated(Label.unsafe("blah"), 2L, Instant.EPOCH, Anonymous)

  private val data = Map[AnyRef, (String, AnyRef)](
    pd  -> ("permissions-event" -> Tagged(pd, Set("permissions", "event"))),
    ad  -> ("acl-event"         -> Tagged(ad, Set("acl", "event"))),
    rd  -> ("realm-event"       -> Tagged(rd, Set("realm", "event"))),
    "a" -> (""                  -> "a")
  )

  "A TaggingAdapter" should {
    val adapter = new TaggingAdapter
    "return the correct manifests" in {
      forAll(data.toList) {
        case (event, (manifest, _)) => adapter.manifest(event) shouldEqual manifest
      }
    }
    "return the correct transformed event" in {
      forAll(data.toList) {
        case (event, (_, transformed)) => adapter.toJournal(event) shouldEqual transformed
      }
    }
  }

} 
Example 145
Source File: ServiceMetadataDocumentGenerator.scala    From haystack-traces   with Apache License 2.0 5 votes vote down vote up
package com.expedia.www.haystack.trace.indexer.writers.es

import java.time.Instant

import com.expedia.open.tracing.Span
import com.expedia.www.haystack.commons.metrics.MetricsSupport
import com.expedia.www.haystack.trace.commons.clients.es.document.ServiceMetadataDoc
import com.expedia.www.haystack.trace.commons.utils.SpanUtils
import com.expedia.www.haystack.trace.indexer.config.entities.ServiceMetadataWriteConfiguration
import org.apache.commons.lang3.StringUtils

import scala.collection.mutable

class ServiceMetadataDocumentGenerator(config: ServiceMetadataWriteConfiguration) extends MetricsSupport {

  private var serviceMetadataMap = new mutable.HashMap[String, mutable.Set[String]]()
  private var allOperationCount: Int = 0
  private var lastFlushInstant = Instant.MIN

  private def shouldFlush: Boolean = {
    config.flushIntervalInSec == 0 || Instant.now().minusSeconds(config.flushIntervalInSec).isAfter(lastFlushInstant)
  }

  private def areStatementsReadyToBeExecuted(): Seq[ServiceMetadataDoc] = {
    if (serviceMetadataMap.nonEmpty && (shouldFlush || allOperationCount > config.flushOnMaxOperationCount)) {
      val statements = serviceMetadataMap.flatMap {
        case (serviceName, operationList) =>
          createServiceMetadataDoc(serviceName, operationList)
      }

      lastFlushInstant = Instant.now()
      serviceMetadataMap = new mutable.HashMap[String, mutable.Set[String]]()
      allOperationCount = 0
      statements.toSeq
    } else {
      Nil
    }
  }

  
  def createServiceMetadataDoc(serviceName: String, operationList: mutable.Set[String]): List[ServiceMetadataDoc] = {
    operationList.map(operationName => ServiceMetadataDoc(serviceName, operationName)).toList

  }
} 
Example 146
Source File: StateClock.scala    From aecor   with MIT License 5 votes vote down vote up
package aecor.testkit

import java.time.temporal.TemporalAmount
import java.time.{ Instant, ZoneId }

import aecor.util.Clock
import cats.mtl.MonadState
import monocle.Lens

class StateClock[F[_]: MonadState[*[_], S], S](zoneId: ZoneId, S: Lens[S, Instant])
    extends Clock[F] {
  private val F = S.transformMonadState(MonadState[F, S])
  override def zone: F[ZoneId] = F.monad.pure(zoneId)
  override def instant: F[Instant] = F.get
  def tick(temporalAmount: TemporalAmount): F[Unit] =
    F.modify(_.plus(temporalAmount))
}

object StateClock {
  def apply[F[_], S](zoneId: ZoneId,
                     S: Lens[S, Instant])(implicit F0: MonadState[F, S]): StateClock[F, S] =
    new StateClock[F, S](zoneId, S)
} 
Example 147
Source File: ScheduleEventCodecSpec.scala    From aecor   with MIT License 5 votes vote down vote up
package aecor.tests

import java.time.temporal.{ ChronoField, Temporal }
import java.time.{ Instant, LocalDateTime }

import aecor.runtime.akkapersistence.serialization.{ PersistentDecoder, PersistentEncoder }
import aecor.schedule.ScheduleEvent
import org.scalacheck.{ Arbitrary, Gen, Properties, ScalacheckShapeless }
import org.scalacheck.Prop.forAll

class ScheduleEventCodecSpec extends Properties("ScheduleEventCodec") with ScalacheckShapeless {
  val encoder = PersistentEncoder[ScheduleEvent]
  val decoder = PersistentDecoder[ScheduleEvent]

  // OpenJDK 9+ offers more precise system clock than millisecond.
  // https://bugs.openjdk.java.net/browse/JDK-8068730
  def dropBelowMillis[A <: Temporal](t: A): A =
    t.`with`(ChronoField.MICRO_OF_SECOND, t.getLong(ChronoField.MILLI_OF_SECOND) * 1000L)
      .asInstanceOf[A]

  implicit val arbitraryLocalDateTime = Arbitrary(
    Gen.lzy(Gen.const(dropBelowMillis(LocalDateTime.now())))
  )
  implicit val arbitraryInstant = Arbitrary(Gen.lzy(Gen.const(dropBelowMillis(Instant.now()))))

  property("encode/decode") = forAll { e: ScheduleEvent =>
    val repr = encoder.encode(e)
    val decoded = decoder.decode(repr)
    decoded == Right(e)
  }

} 
Example 148
Source File: JavaTimeClock.scala    From aecor   with MIT License 5 votes vote down vote up
package aecor.util

import java.time.{ Instant, ZoneId }

import cats.effect.Sync

class JavaTimeClock[F[_]](underlying: java.time.Clock)(implicit F: Sync[F]) extends Clock[F] {
  override def zone: F[ZoneId] = F.delay(underlying.getZone)
  override def instant: F[Instant] = F.delay(underlying.instant())
}

object JavaTimeClock {
  def apply[F[_]: Sync](underlying: java.time.Clock): Clock[F] =
    new JavaTimeClock[F](underlying)
  def systemDefault[F[_]: Sync]: Clock[F] = apply(java.time.Clock.systemDefaultZone())
  def systemUTC[F[_]: Sync]: Clock[F] = apply(java.time.Clock.systemUTC())
} 
Example 149
Source File: DateEncoderTest.scala    From avro4s   with Apache License 2.0 5 votes vote down vote up
package com.sksamuel.avro4s.record.encoder

import java.sql.{Date, Timestamp}
import java.time.{Instant, LocalDate, LocalDateTime, LocalTime}

import com.sksamuel.avro4s.{AvroSchema, DefaultFieldMapper, Encoder, ImmutableRecord}
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers

//noinspection ScalaDeprecation
class DateEncoderTest extends AnyFunSuite with Matchers {

  test("encode LocalTime as TIME-MILLIS") {
    case class Foo(s: LocalTime)
    val schema = AvroSchema[Foo]
    Encoder[Foo].encode(Foo(LocalTime.of(12, 50, 45))) shouldBe ImmutableRecord(schema, Vector(java.lang.Long.valueOf(46245000000L)))
  }

  test("encode LocalDate as DATE") {
    case class Foo(s: LocalDate)
    val schema = AvroSchema[Foo]
    Encoder[Foo].encode(Foo(LocalDate.of(2018, 9, 10))) shouldBe ImmutableRecord(schema, Vector(java.lang.Integer.valueOf(17784)))
  }

  test("encode java.sql.Date as DATE") {
    case class Foo(s: Date)
    val schema = AvroSchema[Foo]
    Encoder[Foo].encode(Foo(Date.valueOf(LocalDate.of(2018, 9, 10)))) shouldBe ImmutableRecord(schema, Vector(java.lang.Integer.valueOf(17784)))
  }

  test("encode LocalDateTime as timestamp-nanos") {
    case class Foo(s: LocalDateTime)
    val schema = AvroSchema[Foo]
    Encoder[Foo].encode(Foo(LocalDateTime.of(2018, 9, 10, 11, 58, 59, 123))) shouldBe ImmutableRecord(schema, Vector(java.lang.Long.valueOf(1536580739000000123L)))
    Encoder[Foo].encode(Foo(LocalDateTime.of(2018, 9, 10, 11, 58, 59, 123009))) shouldBe ImmutableRecord(schema, Vector(java.lang.Long.valueOf(1536580739000123009L)))
    Encoder[Foo].encode(Foo(LocalDateTime.of(2018, 9, 10, 11, 58, 59, 328187943))) shouldBe ImmutableRecord(schema, Vector(java.lang.Long.valueOf(1536580739328187943L)))
  }

  test("encode Timestamp as TIMESTAMP-MILLIS") {
    case class Foo(s: Timestamp)
    val schema = AvroSchema[Foo]
    Encoder[Foo].encode(Foo(Timestamp.from(Instant.ofEpochMilli(1538312231000L)))) shouldBe ImmutableRecord(schema, Vector(java.lang.Long.valueOf(1538312231000L)))
  }

  test("encode Instant as TIMESTAMP-MILLIS") {
    case class Foo(s: Instant)
    val schema = AvroSchema[Foo]
    Encoder[Foo].encode(Foo(Instant.ofEpochMilli(1538312231000L))) shouldBe ImmutableRecord(schema, Vector(java.lang.Long.valueOf(1538312231000L)))
  }
} 
Example 150
Source File: DateDecoderTest.scala    From avro4s   with Apache License 2.0 5 votes vote down vote up
package com.sksamuel.avro4s.record.decoder

import java.sql.{Date, Timestamp}
import java.time.{Instant, LocalDate, LocalDateTime, LocalTime}

import com.sksamuel.avro4s.SchemaFor.TimestampNanosLogicalType
import com.sksamuel.avro4s.{AvroSchema, Decoder, SchemaFor}
import org.apache.avro.generic.GenericData
import org.apache.avro.{LogicalTypes, SchemaBuilder}
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers

//noinspection ScalaDeprecation
class DateDecoderTest extends AnyFunSuite with Matchers {

  case class WithLocalTime(z: LocalTime)
  case class WithLocalDate(z: LocalDate)
  case class WithDate(z: Date)
  case class WithLocalDateTime(z: LocalDateTime)
  case class WithTimestamp(z: Timestamp)
  case class WithInstant(z: Instant)

  test("decode int to LocalTime") {
    val schema = AvroSchema[WithLocalTime]
    val record = new GenericData.Record(schema)
    record.put("z", 46245000000L)
    Decoder[WithLocalTime].decode(record) shouldBe WithLocalTime(LocalTime.of(12, 50, 45))
  }

  test("decode int to LocalDate") {
    val schema = AvroSchema[WithLocalDate]
    val record = new GenericData.Record(schema)
    record.put("z", 17784)
    Decoder[WithLocalDate].decode(record) shouldBe WithLocalDate(LocalDate.of(2018, 9, 10))
  }

  test("decode int to java.sql.Date") {
    val schema = AvroSchema[WithDate]
    val record = new GenericData.Record(schema)
    record.put("z", 17784)
    Decoder[WithDate].decode(record) shouldBe WithDate(Date.valueOf(LocalDate.of(2018, 9, 10)))
  }

  test("decode timestamp-millis to LocalDateTime") {
    val dateSchema = LogicalTypes.timestampMillis().addToSchema(SchemaBuilder.builder.longType)
    val schema = SchemaBuilder.record("foo").fields().name("z").`type`(dateSchema).noDefault().endRecord()
    val record = new GenericData.Record(schema)
    record.put("z", 1572707106376L)
    Decoder[WithLocalDateTime].withSchema(SchemaFor(schema)).decode(record) shouldBe WithLocalDateTime(
      LocalDateTime.of(2019, 11, 2, 15, 5, 6, 376000000))
  }

  test("decode timestamp-micros to LocalDateTime") {
    val dateSchema = LogicalTypes.timestampMicros().addToSchema(SchemaBuilder.builder.longType)
    val schema = SchemaBuilder.record("foo").fields().name("z").`type`(dateSchema).noDefault().endRecord()
    val record = new GenericData.Record(schema)
    record.put("z", 1572707106376001L)
    Decoder[WithLocalDateTime].withSchema(SchemaFor(schema)).decode(record) shouldBe WithLocalDateTime(
      LocalDateTime.of(2019, 11, 2, 15, 5, 6, 376001000))
  }

  test("decode timestamp-nanos to LocalDateTime") {
    val dateSchema = TimestampNanosLogicalType.addToSchema(SchemaBuilder.builder.longType)
    val schema = SchemaBuilder.record("foo").fields().name("z").`type`(dateSchema).noDefault().endRecord()
    val record = new GenericData.Record(schema)
    record.put("z", 1572707106376000002L)
    Decoder[WithLocalDateTime].decode(record) shouldBe WithLocalDateTime(
      LocalDateTime.of(2019, 11, 2, 15, 5, 6, 376000002))
  }

  test("decode long to Timestamp") {
    val schema = AvroSchema[WithTimestamp]
    val record = new GenericData.Record(schema)
    record.put("z", 1538312231000L)
    Decoder[WithTimestamp].decode(record) shouldBe WithTimestamp(new Timestamp(1538312231000L))
  }

  test("decode long to Instant") {
    val schema = AvroSchema[WithInstant]
    val record = new GenericData.Record(schema)
    record.put("z", 1538312231000L)
    Decoder[WithInstant].decode(record) shouldBe WithInstant(Instant.ofEpochMilli(1538312231000L))
  }
} 
Example 151
Source File: DateSchemaTest.scala    From avro4s   with Apache License 2.0 5 votes vote down vote up
package com.sksamuel.avro4s.schema

import java.sql.{Date, Timestamp}
import java.time.{Instant, LocalDate, LocalDateTime, LocalTime}

import com.sksamuel.avro4s.AvroSchema
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers

class DateSchemaTest extends AnyFunSuite with Matchers {

  test("generate date logical type for LocalDate") {
    case class LocalDateTest(date: LocalDate)
    val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/localdate.json"))
    val schema = AvroSchema[LocalDateTest]
    schema.toString(true) shouldBe expected.toString(true)
  }

  test("generate date logical type for Date") {
    case class DateTest(date: Date)
    val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/date.json"))
    val schema = AvroSchema[DateTest]
    schema.toString(true) shouldBe expected.toString(true)
  }

  test("generate time logical type for LocalTime") {
    case class LocalTimeTest(time: LocalTime)
    val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/localtime.json"))
    val schema = AvroSchema[LocalTimeTest]
    schema.toString(true) shouldBe expected.toString(true)
  }

  test("generate timestamp-nanos for LocalDateTime") {
    case class LocalDateTimeTest(time: LocalDateTime)
    val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/localdatetime.json"))
    val schema = AvroSchema[LocalDateTimeTest]
    schema.toString(true) shouldBe expected.toString(true)
  }

  test("generate timestamp-millis logical type for Instant") {
    case class InstantTest(instant: Instant)
    val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/instant.json"))
    val schema = AvroSchema[InstantTest]
    schema.toString(true) shouldBe expected.toString(true)
  }

  test("generate timestamp-millis logical type for Timestamp") {
    case class TimestampTest(ts: Timestamp)
    val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/timestamp.json"))
    val schema = AvroSchema[TimestampTest]
    schema.toString(true) shouldBe expected.toString(true)
  }
} 
Example 152
Source File: MatchersProperties.scala    From cornichon   with Apache License 2.0 5 votes vote down vote up
package com.github.agourlay.cornichon.matchers

import java.time.Instant
import java.time.format.DateTimeFormatter

import com.github.agourlay.cornichon.matchers.Matchers._
import io.circe.Json
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import org.scalacheck._
import org.scalacheck.Prop._
import org.typelevel.claimant.Claim

object MatchersProperties extends Properties("Matchers") {

  val reasonablyRandomInstantGen: Gen[Instant] = for {
    randomOffset <- Arbitrary.arbLong.arbitrary
  } yield Instant.now().plusMillis(randomOffset % 1000000000000L)

  val instantGen: Gen[Instant] = for {
    randomOffset <- Arbitrary.arbLong.arbitrary
  } yield Instant.now().plusMillis(randomOffset)

  property("any-integer correct for any int") =
    forAll(Gen.size) { int =>
      Claim {
        anyInteger.predicate(Json.fromInt(int))
      }
    }

  property("any-integer incorrect for any alphanum string") =
    forAll(Gen.alphaNumStr) { alphanum =>
      Claim {
        !anyInteger.predicate(Json.fromString(alphanum))
      }
    }

  property("any-positive-integer correct for any positive int") =
    forAll(Gen.choose(1, Int.MaxValue)) { int =>
      Claim {
        anyPositiveInteger.predicate(Json.fromInt(int))
      }
    }

  property("any-positive-integer incorrect for any alphanum string") =
    forAll(Gen.alphaNumStr) { alphanum =>
      Claim {
        !anyPositiveInteger.predicate(Json.fromString(alphanum))
      }
    }

  property("any-negative-integer correct for any negative int") =
    forAll(Gen.negNum[Int]) { int =>
      Claim {
        anyNegativeInteger.predicate(Json.fromInt(int))
      }
    }

  property("any-negative-integer incorrect for any alphanum string") =
    forAll(Gen.alphaNumStr) { alphanum =>
      Claim {
        !anyNegativeInteger.predicate(Json.fromString(alphanum))
      }
    }

  property("any-uuid correct for any valid UUID") =
    forAll(Gen.uuid) { uuid =>
      Claim {
        anyUUID.predicate(Json.fromString(uuid.toString))
      }
    }

  property("any-uuid incorrect for any alphanum string") =
    forAll(Gen.alphaNumStr) { alphanum =>
      Claim {
        !anyUUID.predicate(Json.fromString(alphanum))
      }
    }

  property("any-date-time correct for all ISO-compliant values, including Y10K+ dates") =
    forAll(instantGen) { instant =>
      Claim {
        anyDateTime.predicate(Json.fromString(DateTimeFormatter.ISO_INSTANT.format(instant)))
      }
    }

  property("any-date-time correct in parallel") = {
    forAll(reasonablyRandomInstantGen) { instant =>
      val booleans = 1.to(64).map { _ =>
        Task.delay {
          anyDateTime.predicate(Json.fromString(DateTimeFormatter.ISO_INSTANT.format(instant)))
        }
      }

      val res = Task.parSequenceUnordered(booleans).runSyncUnsafe().foldLeft(List.empty[Boolean]) { case (acc, e) => e :: acc }

      Claim(res.forall(_ == true))
    }
  }
} 
Example 153
Source File: CustomScalars.scala    From graphql-gateway   with Apache License 2.0 5 votes vote down vote up
package sangria.gateway.schema

import java.time.format.DateTimeFormatter
import java.time.{Instant, OffsetDateTime, ZoneOffset, ZonedDateTime}

import sangria.schema._
import sangria.ast
import sangria.validation.ValueCoercionViolation

import scala.util.{Failure, Success, Try}

object CustomScalars {
  implicit val DateTimeType = ScalarType[ZonedDateTime]("DateTime",
    description = Some("DateTime is a scalar value that represents an ISO8601 formatted date and time."),
    coerceOutput = (date, _) ⇒ DateTimeFormatter.ISO_INSTANT.format(date),
    coerceUserInput = {
      case s: String ⇒ parseDateTime(s) match {
        case Success(date) ⇒ Right(date)
        case Failure(_) ⇒ Left(DateCoercionViolation)
      }
      case _ ⇒ Left(DateCoercionViolation)
    },
    coerceInput = {
      case ast.StringValue(s, _, _, _, _) ⇒ parseDateTime(s) match {
        case Success(date) ⇒ Right(date)
        case Failure(_) ⇒ Left(DateCoercionViolation)
      }
      case _ ⇒ Left(DateCoercionViolation)
    })

  def parseDateTime(s: String) = Try(DateTimeFormatter.ISO_ZONED_DATE_TIME.parse(s).asInstanceOf[ZonedDateTime])

  case object DateCoercionViolation extends ValueCoercionViolation("Date value expected")
} 
Example 154
Source File: AtomLogger.scala    From tofu   with Apache License 2.0 5 votes vote down vote up
package tofu.logging.atom
import java.time.Instant
import java.util.concurrent.TimeUnit

import cats.effect.Clock
import cats.{Applicative, FlatMap}
import tofu.concurrent.Atom
import tofu.higherKind.Embed
import tofu.logging.{LoggedValue, Logging, Logs}
import tofu.syntax.monadic._

import scala.reflect.{ClassTag, classTag}

final case class LogLine(
    loggerName: String,
    level: Logging.Level,
    message: String,
    timestamp: Instant,
    values: Vector[LoggedValue],
)

class AtomLogging[F[_]: FlatMap: Clock](log: Atom[F, Vector[LogLine]], name: String) extends Logging[F] {
  override def write(level: Logging.Level, message: String, values: LoggedValue*): F[Unit] =
    Clock[F].realTime(TimeUnit.MILLISECONDS).flatMap { time =>
      log.update(
        _ :+ LogLine(
          loggerName = name,
          level = level,
          message = message,
          timestamp = Instant.ofEpochMilli(time),
          values = values.toVector
        )
      )
    }

}

final case class AtomLogs[I[_]: Applicative, F[_]: FlatMap: Clock](flog: F[Atom[F, Vector[LogLine]]])
    extends Logs[I, F] {
  def forService[Svc: ClassTag]: I[Logging[F]] = byName(classTag[Svc].runtimeClass.getName)
  def byName(name: String): I[Logging[F]]      =
    Embed.of(flog.map[Logging[F]](new AtomLogging[F](_, name))).pure[I]
} 
Example 155
Source File: Merge.scala    From tofu   with Apache License 2.0 5 votes vote down vote up
package tofu.data
package derived

import java.time.{Instant, LocalDate, LocalDateTime, ZonedDateTime}

import cats.kernel.Semigroup
import magnolia.{CaseClass, Magnolia, SealedTrait}
import simulacrum.typeclass
import derevo.Derivation

@typeclass trait Merge[A] {
  def merge(a: A, b: A): A
}

trait MergeInstances1 {
  type Typeclass[A] = Merge[A]

  def combine[T](caseClass: CaseClass[Typeclass, T]): Typeclass[T] =
    (a, b) => caseClass.construct(p => p.typeclass.merge(p.dereference(a), p.dereference(b)))

  def dispatch[T](sealedTrait: SealedTrait[Typeclass, T]): Typeclass[T] =
    (a, b) => sealedTrait.dispatch(a) { h => if (h.cast.isDefinedAt(b)) h.typeclass.merge(h.cast(a), h.cast(b)) else a }

  implicit def instance[A]: Merge[A] = macro Magnolia.gen[A]
}

object Merge extends Derivation[Merge] with MergeInstances1 {
  implicit def optionInstance[A](implicit m: Merge[A]): Merge[Option[A]] =
    (ao, bo) => ao.fold(bo)(a => bo.fold(ao)(b => Some(m.merge(a, b))))

  implicit def primitiveInstance[A: Primitive]: Merge[A] = (a: A, _: A) => a

  sealed class Primitive[A]
  final implicit object primitiveByte          extends Primitive[Byte]
  final implicit object primitiveShort         extends Primitive[Short]
  final implicit object primitiveInt           extends Primitive[Int]
  final implicit object primitiveLong          extends Primitive[Long]
  final implicit object primitiveChar          extends Primitive[Char]
  final implicit object primitiveFloat         extends Primitive[Float]
  final implicit object primitiveDouble        extends Primitive[Double]
  final implicit object primitiveUnit          extends Primitive[Unit]
  final implicit object primitiveBigDecimal    extends Primitive[BigDecimal]
  final implicit object primitiveBigInt        extends Primitive[BigInt]
  final implicit object primitiveLocalDateTime extends Primitive[LocalDateTime]
  final implicit object primitiveZonedDateTime extends Primitive[ZonedDateTime]
  final implicit object primitiveLocalDate     extends Primitive[LocalDate]
  final implicit object primitiveInstant       extends Primitive[Instant]
  final implicit object primitiveString        extends Primitive[String]
}

object Merged {
  trait OpaqueTag extends Any
  type Base = Any { type MergedOpaque }

  type Mer[A] <: Base with OpaqueTag

  def apply[A](value: A): Mer[A] = value.asInstanceOf[Mer[A]]

  implicit final class MergedOps[A](private val mer: Mer[A]) extends AnyVal {
    def value: A = mer.asInstanceOf[A]
  }

  implicit def mergedSemigroup[A: Merge]: Semigroup[Merged[A]] =
    (x, y) => apply(Merge[A].merge(x.value, y.value))
} 
Example 156
Source File: PublisherImpl.scala    From c4proto   with Apache License 2.0 5 votes vote down vote up
package ee.cone.c4gate_server

import java.time.Instant

import com.typesafe.scalalogging.LazyLogging
import ee.cone.c4actor.LifeTypes.Alive
import ee.cone.c4actor.QProtocol.S_Firstborn
import ee.cone.c4actor.Types.SrcId
import ee.cone.c4actor.{Context, GetByPK, LEvent, MortalFactory, SleepUntilKey, LTxAdd, TxTransform, WithPK}
import ee.cone.c4assemble.Types.{Each, Values}
import ee.cone.c4assemble.{Assemble, c4assemble}
import ee.cone.c4di.{c4, c4multi, provide}
import ee.cone.c4gate.ByPathHttpPublication
import ee.cone.c4gate.HttpProtocol.{S_HttpPublicationV1, S_HttpPublicationV2, S_Manifest}

@c4multi("AbstractHttpGatewayApp") final case class PublicationPurgerTx(srcId: SrcId = "PublicationPurgerTx")(
  getS_Manifest: GetByPK[S_Manifest],
  txAdd: LTxAdd,
) extends TxTransform with LazyLogging {
  def transform(local: Context): Context = {
    val now = System.currentTimeMillis
    val events = getS_Manifest.ofA(local).values
      .filter(_.until<now).toSeq.sortBy(_.srcId).flatMap(LEvent.delete)
    txAdd.add(events).andThen(SleepUntilKey.set(Instant.ofEpochMilli(now+15*1000)))(local)
  }
}
@c4assemble("AbstractHttpGatewayApp") class PublicationPurgerAssembleBase(
  factory: PublicationPurgerTxFactory
) {
  def joinTx(
    key: SrcId,
    firstborn: Each[S_Firstborn]
  ): Values[(SrcId, TxTransform)] =
    List(WithPK(factory.create()))

  def noLife(
    key: SrcId,
    firstborn: Each[S_Firstborn]
  ): Values[(Alive, S_HttpPublicationV1)] = Nil
}

@c4("AbstractHttpGatewayApp") final class PublisherAssembles(mortal: MortalFactory) {
  @provide def subAssembles: Seq[Assemble] =
    List(mortal(classOf[S_HttpPublicationV1]), mortal(classOf[S_HttpPublicationV2]))
} 
Example 157
Source File: PurgerImpl.scala    From c4proto   with Apache License 2.0 5 votes vote down vote up
package ee.cone.c4gate_server

import java.nio.file.{Files, Path, Paths}
import java.time.Instant

import com.typesafe.scalalogging.LazyLogging
import ee.cone.c4actor.QProtocol.S_Firstborn
import ee.cone.c4actor.{Context, SleepUntilKey, TxTransform, WithPK}
import ee.cone.c4actor.Types.SrcId
import ee.cone.c4assemble.Types.{Each, Values}
import ee.cone.c4assemble.{Assemble, assemble, c4assemble}
import ee.cone.c4di.c4

object PurgerDefaultPolicy {
  def apply(): List[KeepPolicy] = {
    val millis = 1L
    val hour = 60L * 60L * 1000L * millis
    val day = 24L * hour
    val week = 7L * day
    List(KeepPolicy(millis, 8), KeepPolicy(hour, 23), KeepPolicy(day, 14), KeepPolicy(week, 14))
  }
}

case class KeepPolicy(period: Long, count: Int)

case class TimedPath(path: Path, mTime: Long)

trait Purger {
  def process(keepPolicyList: List[KeepPolicy]): Unit
}

@c4("SnapshotMakingApp") final class PurgerImpl(
  lister: SnapshotLister, baseDir: DataDir
) extends Purger with LazyLogging {
  def process(keepPolicyList: List[KeepPolicy]): Unit = {
    val files: List[TimedPath] = lister.list.map { snapshot =>
      val path = Paths.get(baseDir.value).resolve(snapshot.raw.relativePath)
      TimedPath(path, Files.getLastModifiedTime(path).toMillis)
    }
    val keepPaths = (for {
      keepPolicy <- keepPolicyList
      keepFile <- files.groupBy(file => file.mTime / keepPolicy.period).values
        .map(_.maxBy(_.mTime)).toList.sortBy(_.mTime).takeRight(keepPolicy.count)
    } yield keepFile.path).toSet

    for {
      path <- files.map(_.path).filterNot(keepPaths)
    } {
      if(Files.deleteIfExists(path)) logger.info(s"removed $path")
    }
    logger.debug("snapshots checked")
  }
}

case class PurgerTx(
  srcId: SrcId, keepPolicyList: List[KeepPolicy]
)(purger: Purger) extends TxTransform {
  def transform(local: Context): Context = {
    purger.process(keepPolicyList)
    SleepUntilKey.set(Instant.now.plusSeconds(60L))(local)
  }
}

@c4assemble("SnapshotMakingApp") class PurgerAssembleBase(purger: Purger)   {
  def joinPurger(
    key: SrcId,
    first: Each[S_Firstborn]
  ): Values[(SrcId,TxTransform)] =
    List(WithPK(PurgerTx("purger",PurgerDefaultPolicy())(purger)))
} 
Example 158
Source File: SSETest.scala    From c4proto   with Apache License 2.0 5 votes vote down vote up
package ee.cone.c4ui

import java.time.Instant

import com.typesafe.scalalogging.LazyLogging

import Function.chain
import ee.cone.c4actor.Types.SrcId
import ee.cone.c4actor._
import ee.cone.c4actor_branch._
import ee.cone.c4assemble.Types.{Each, Values}
import ee.cone.c4assemble.{Assemble, CallerAssemble, c4assemble}
import ee.cone.c4gate.AlienProtocol.U_FromAlienStatus
import ee.cone.c4di.{c4, c4multi, provide}

//println(s"visit http://localhost:${config.get("C4HTTP_PORT")}/sse.html")
@c4("TestSSEApp") final  class SSEFromAlienTaskAssembleBase {
  @provide def subAssembles: Seq[Assemble] =
    new FromAlienTaskAssemble("/sse.html") :: Nil
}

@c4assemble("TestSSEApp") class TestSSEAssembleBase(
  factory: TestSSEHandlerFactory
) extends LazyLogging {
  def joinView(
    key: SrcId,
    task: Each[BranchTask]
  ): Values[(SrcId,BranchHandler)] = {
    logger.info(s"joinView ${task}")
    List(WithPK(factory.create(task.branchKey, task)))
  }
}

@c4multi("TestSSEApp") final case class TestSSEHandler(branchKey: SrcId, task: BranchTask)(
  getU_FromAlienStatus: GetByPK[U_FromAlienStatus],
) extends BranchHandler with LazyLogging {
  def exchange: BranchMessage => Context => Context = message => local => {
    val now = Instant.now
    val (keepTo,freshTo) = task.sending(local)
    val send = chain(List(keepTo,freshTo).flatten.map(_("show",s"${now.getEpochSecond}")))
    logger.info(s"TestSSEHandler $keepTo $freshTo")
    getU_FromAlienStatus.ofA(local).values.foreach{ status =>
      logger.info(s"${status.isOnline} ... ${status.expirationSecond - now.getEpochSecond}")
    }
    SleepUntilKey.set(now.plusSeconds(1)).andThen(send)(local)
  }
  def seeds: Context => List[BranchProtocol.S_BranchResult] = _ => Nil
} 
Example 159
Source File: MonitoringImpl.scala    From c4proto   with Apache License 2.0 5 votes vote down vote up
package ee.cone.c4gate

import java.time.Instant
import java.util.UUID

import ee.cone.c4actor.QProtocol.S_Firstborn
import ee.cone.c4actor.Types.SrcId
import ee.cone.c4actor._
import ee.cone.c4assemble.Types.{Each, Values}
import ee.cone.c4assemble._
import ee.cone.c4di.{c4, c4multi}
import ee.cone.c4gate.ActorAccessProtocol.C_ActorAccessKey
import ee.cone.c4gate.AvailabilitySettingProtocol.C_AvailabilitySetting
import ee.cone.c4gate.HttpProtocol._
import ee.cone.c4proto.{Id, protocol}
import okio.ByteString

@protocol("ActorAccessApp") object ActorAccessProtocol {

  @Id(0x006A) case class C_ActorAccessKey(
    @Id(0x006B) srcId: String,
    @Id(0x006C) value: String
  )

}

@c4assemble("ActorAccessApp") class ActorAccessAssembleBase(
  actorAccessCreateTxFactory: ActorAccessCreateTxFactory,
){
  def join(
    key: SrcId,
    first: Each[S_Firstborn],
    accessKeys: Values[C_ActorAccessKey]
  ): Values[(SrcId, TxTransform)] =
    if (accessKeys.nonEmpty) Nil
    else List(WithPK(actorAccessCreateTxFactory.create(s"ActorAccessCreateTx-${first.srcId}", first)))
}

@c4multi("ActorAccessApp") final case class ActorAccessCreateTx(srcId: SrcId, first: S_Firstborn)(
  txAdd: LTxAdd,
) extends TxTransform {
  def transform(local: Context): Context =
    txAdd.add(LEvent.update(C_ActorAccessKey(first.srcId, s"${UUID.randomUUID}")))(local)
}



@c4("AvailabilityApp") final class Monitoring(
  publisher: Publisher,
  txAdd: LTxAdd,
) {
  def publish(
    time: Long, updatePeriod: Long, timeout: Long,
    path: String, headers: List[N_Header], body: okio.ByteString
  ): Context => Context = {
    val nextTime = time + updatePeriod
    val pubEvents = publisher.publish(ByPathHttpPublication(path, headers, body), _+updatePeriod+timeout)
    txAdd.add(pubEvents).andThen(SleepUntilKey.set(Instant.ofEpochMilli(nextTime)))
  }
}

@c4assemble("AvailabilityApp") class AvailabilityAssembleBase(updateDef: Long = 3000, timeoutDef: Long = 3000)(
  monitoring: Monitoring
) {
  def join(
    key: SrcId,
    first: Each[S_Firstborn],
    settings: Values[C_AvailabilitySetting]
  ): Values[(SrcId, TxTransform)] = {
    val (updatePeriod, timeout) = Single.option(settings.map(s => s.updatePeriod -> s.timeout)).getOrElse((updateDef, timeoutDef))
    List(WithPK(AvailabilityTx(s"AvailabilityTx-${first.srcId}", updatePeriod, timeout)(monitoring)))
  }
}

@protocol("AvailabilityApp") object AvailabilitySettingProtocol {

  @Id(0x00f0) case class C_AvailabilitySetting(
    @Id(0x0001) srcId: String,
    @Id(0x0002) updatePeriod: Long,
    @Id(0x0003) timeout: Long
  )

}

case class AvailabilityTx(srcId: SrcId, updatePeriod: Long, timeout: Long)(
  monitoring: Monitoring
) extends TxTransform {
  def transform(local: Context): Context =
    monitoring.publish(
      System.currentTimeMillis, updatePeriod, timeout,
      "/availability", Nil, ByteString.EMPTY
    )(local)
} 
Example 160
Source File: PrometheusPostImpl.scala    From c4proto   with Apache License 2.0 5 votes vote down vote up
package ee.cone.c4gate

import java.nio.charset.StandardCharsets
import java.time.Instant

import com.typesafe.scalalogging.LazyLogging
import ee.cone.c4actor.QProtocol.S_Firstborn
import ee.cone.c4actor.Types.SrcId
import ee.cone.c4actor.{Config, Context, ListConfig, SleepUntilKey, TxTransform, WithPK}
import ee.cone.c4assemble.Types.{Each, Values}
import ee.cone.c4assemble.{Single, byEq, c4assemble}
import ee.cone.c4di.{c4, c4multi, provide}
import ee.cone.c4proto.ToByteString

import scala.util.Try

@c4("PrometheusPostApp") final class PrometheusPostSettingsProvider(config: ListConfig) {
  def defaultPrometheusPostRefresh: Long = 30L * 1000L
  @provide def get: Seq[PrometheusPostSettings] =
    config.get("C4PROMETHEUS_POST_URL")
      .map(url => PrometheusPostSettings(url, defaultPrometheusPostRefresh))
}

object PrometheusMetricBuilder {
  def apply(metrics: List[Metric]): String =
    metrics.map(metricToString(_, "")).mkString("\n", "\n", "\n")

  //def withTimeStamp(metrics: List[Metric], time: Long): String =
  //  metrics.map(metricToString(_, time.toString)).mkString("\n", "\n", "\n")

  def metricToString(metric: Metric, extraInfo: String): String =
    s"${metric.name}${metric.labels.map(label => s"""${label.name}="${label.value}"""").mkString("{", ",", "}")} ${metric.value}${extraInfo.trim match { case "" => "" case a => s" $a"}}"
}

case class PrometheusPostSettings(url: String, refreshRate: Long)

object PrometheusPostSettingsObj {
  type PrometheusPushId = SrcId
  lazy val fixedSrcId: PrometheusPushId = "prometheus-post-tx"
}

import PrometheusPostSettingsObj._

@c4assemble("PrometheusPostApp") class PrometheusPostAssembleBase(defaultSettings: List[PrometheusPostSettings], factory: PrometheusPostTxFactory) {
  def joinStub(
    key: SrcId,
    first: Each[S_Firstborn]
  ): Values[(PrometheusPushId, PrometheusPostSettings)] =
    Nil

  def createPrometheusPost(
    key: SrcId,
    first: Each[S_Firstborn],
    @byEq[PrometheusPushId](fixedSrcId) settings: Values[PrometheusPostSettings]
  ): Values[(SrcId, TxTransform)] =
    Single.option(settings).orElse(Single.option(defaultSettings)).toList.map { settings =>
      WithPK(factory.create(fixedSrcId, settings))
    }
}

@c4multi("PrometheusPostApp") final case class PrometheusPostTx(srcId: SrcId, settings: PrometheusPostSettings)(metricsFactories: List[MetricsFactory], util: HttpUtil) extends TxTransform with LazyLogging {
  def transform(local: Context): Context = {
    val time = System.currentTimeMillis
    val metrics = metricsFactories.flatMap(_.measure(local))
    val bodyStr = PrometheusMetricBuilder(metrics)
    val bodyBytes = ToByteString(bodyStr.getBytes(StandardCharsets.UTF_8))
    logger.debug(s"Posted ${metrics.size} metrics to ${settings.url}")
    // mimeTypeOpt.map(mimeType => ("content-type", mimeType)).toList
    util.post(settings.url, Nil, bodyBytes, Option(5000), expectCode = 202)
    SleepUntilKey.set(Instant.ofEpochMilli(time + settings.refreshRate))(local)
  }
} 
Example 161
Source File: ProgressObserverImpl.scala    From c4proto   with Apache License 2.0 5 votes vote down vote up
package ee.cone.c4actor

import java.lang.management.ManagementFactory
import java.nio.charset.StandardCharsets.UTF_8
import java.nio.file.{Files, Path, Paths}
import java.time.Instant
import java.util.UUID

import com.typesafe.scalalogging.LazyLogging
import ee.cone.c4actor.QProtocol.S_Firstborn
import ee.cone.c4actor.Types.{NextOffset, SrcId}
import ee.cone.c4assemble.Types.{Each, Values}
import ee.cone.c4assemble.{Single, c4assemble}
import ee.cone.c4di.c4

import scala.annotation.tailrec
import scala.concurrent.Future

@c4("ServerCompApp") final class ProgressObserverFactoryImpl(
  inner: TxObserver, config: ListConfig,
  execution: Execution, getToStart: DeferredSeq[Executable]
) extends ProgressObserverFactory {
  def create(endOffset: NextOffset): Observer[RichContext] = {
    val lateExObserver: Observer[RichContext]  = new LateExecutionObserver(execution,getToStart.value,inner.value)
    val readyObserver = Single.option(config.get("C4ROLLING")).fold(lateExObserver)(path=>
      new ReadyObserverImpl(lateExObserver, Paths.get(path), 0L)
    )
    new ProgressObserverImpl(readyObserver,endOffset)
  }
}

// states:
//   loading
//   loading ready
//   master
// trans:
//   loading -> loading
//   loading -> loading ready
//   loading ready -> loading ready
//   loading ready -> master

class ProgressObserverImpl(inner: Observer[RichContext], endOffset: NextOffset, until: Long=0) extends Observer[RichContext] with LazyLogging {
  def activate(rawWorld: RichContext): Observer[RichContext] =
    if (rawWorld.offset < endOffset) {
      val now = System.currentTimeMillis
      if(now < until) this else {
        logger.debug(s"loaded ${rawWorld.offset}/$endOffset")
        new ProgressObserverImpl(inner, endOffset, now+1000)
      }
    } else {
      logger.info(s"Stats OK -- loaded ALL/$endOffset -- uptime ${ManagementFactory.getRuntimeMXBean.getUptime}ms")
      inner.activate(rawWorld)
    }
}

class ReadyObserverImpl(inner: Observer[RichContext], path: Path, until: Long=0) extends Observer[RichContext] with LazyLogging {
  private def ignoreTheSamePath(path: Path): Unit = ()
  def activate(rawWorld: RichContext): Observer[RichContext] = {
    if(until == 0) ignoreTheSamePath(Files.write(path.resolve("c4is-ready"),Array.empty[Byte]))
    val now = System.currentTimeMillis
    if(now < until) this
    else if(Files.exists(path.resolve("c4is-master"))) {
      logger.info(s"becoming master")
      inner.activate(rawWorld)
    } else {
      logger.debug(s"ready/waiting")
      new ReadyObserverImpl(inner, path, now+1000)
    }
  }

}


@c4("ServerCompApp") final class LocalElectorDeath(config: ListConfig, execution: Execution) extends Executable with Early {
  def run(): Unit =
    for(path <- config.get("C4ELECTOR_PROC_PATH")) iteration(Paths.get(path))
  @tailrec private def iteration(path: Path): Unit = {
    if(Files.notExists(path)) execution.complete()
    Thread.sleep(1000)
    iteration(path)
  }
}

////

@c4("ServerCompApp") final class ServerExecutionFilter(inner: ExecutionFilter)
  extends ExecutionFilter(e=>inner.check(e) && e.isInstanceOf[Early])

class LateExecutionObserver(
  execution: Execution, toStart: Seq[Executable], inner: Observer[RichContext]
) extends Observer[RichContext] with LazyLogging {
  def activate(world: RichContext): Observer[RichContext] = {
    logger.info(s"tracking ${toStart.size} late services")
    toStart.filterNot(_.isInstanceOf[Early]).foreach(f => execution.fatal(Future(f.run())(_)))
    inner.activate(world)
  }
} 
Example 162
Source File: ApiInfo.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.web.api.domain.utils

import java.time.Instant

import gospeak.core.domain.User
import gospeak.core.domain.utils.{BasicCtx, Info}
import gospeak.web.api.domain.ApiUser
import play.api.libs.json.{Json, Writes}

final case class ApiInfo(createdAt: Instant,
                         createdBy: ApiUser.Embed,
                         updatedAt: Instant,
                         updatedBy: ApiUser.Embed)

object ApiInfo {
  def from(i: Info, users: Seq[User])(implicit ctx: BasicCtx): ApiInfo =
    new ApiInfo(
      createdAt = i.createdAt,
      createdBy = ApiUser.embed(i.createdBy, users),
      updatedAt = i.updatedAt,
      updatedBy = ApiUser.embed(i.updatedBy, users))

  implicit val writes: Writes[ApiInfo] = Json.writes[ApiInfo]
} 
Example 163
Source File: ApiResult.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.web.api.domain.utils

import java.time.Instant

import gospeak.core.domain.utils.BasicCtx
import gospeak.web.utils.BasicReq
import gospeak.libs.scala.domain.Page
import play.api.http.Status
import play.api.libs.json._

sealed trait ApiResult[+A] extends Product with Serializable {
  def data: A

  val execMs: Long
}

object ApiResult {
  def of[A](p: A)(implicit ctx: BasicCtx): ItemResult[A] =
    ItemResult[A](
      data = p,
      execMs = Instant.now().toEpochMilli - ctx.now.toEpochMilli)

  def of[A, B](p: Page[A], f: A => B)(implicit ctx: BasicCtx): PageResult[Seq[B]] =
    PageResult[Seq[B]](
      data = p.items.map(f),
      totalItems = p.total.value,
      pageSize = p.params.pageSize.value,
      pageNo = p.params.page.value,
      execMs = Instant.now().toEpochMilli - ctx.now.toEpochMilli)

  def notFound(message: String)(implicit ctx: BasicCtx): ErrorResult = err(Status.NOT_FOUND, message)

  def badRequest(message: String)(implicit ctx: BasicCtx): ErrorResult = err(Status.BAD_REQUEST, message)

  def badRequest(errors: Seq[(JsPath, Seq[JsonValidationError])])(implicit req: BasicReq[JsValue]): ErrorResult =
    badRequest("Invalid request body:" + errors.map { case (path, errs) => s"\n  - ${path.toJsonString}: ${errs.map(req.format).mkString(", ")}" }.mkString)

  def forbidden(message: String)(implicit ctx: BasicCtx): ErrorResult = err(Status.FORBIDDEN, message)

  def internalServerError(message: String)(implicit ctx: BasicCtx): ErrorResult = err(Status.INTERNAL_SERVER_ERROR, message)

  private def err(status: Int, message: String)(implicit ctx: BasicCtx): ErrorResult =
    ErrorResult(
      status = status,
      message = message,
      execMs = Instant.now().toEpochMilli - ctx.now.toEpochMilli)
}

final case class ItemResult[A](data: A,
                               execMs: Long) extends ApiResult[A]

object ItemResult {
  implicit def writes[A](implicit a: Writes[A]): Writes[ItemResult[A]] = Json.writes[ItemResult[A]]
}

final case class PageResult[A](data: A,
                               totalItems: Long,
                               pageSize: Int,
                               pageNo: Int,
                               execMs: Long) extends ApiResult[A]

object PageResult {
  implicit def writes[A](implicit a: Writes[A]): Writes[PageResult[A]] = Json.writes[PageResult[A]]
}

final case class ErrorResult(status: Int,
                             message: String,
                             execMs: Long) extends ApiResult[Nothing] {
  override def data: Nothing = throw new NoSuchElementException("ErrorResponse.data")
}

object ErrorResult {
  implicit val writes: Writes[ErrorResult] = Json.writes[ErrorResult]
} 
Example 164
Source File: ApiGroup.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.web.api.domain

import java.time.Instant

import gospeak.core.domain.Group
import gospeak.core.domain.utils.BasicCtx
import gospeak.web.api.domain.utils.ApiPlace
import play.api.libs.json.{Json, Writes}

object ApiGroup {

  // data to display publicly
  final case class Published(slug: String,
                             name: String,
                             contact: Option[String],
                             description: String,
                             location: Option[ApiPlace],
                             tags: Seq[String],
                             created: Instant)

  object Published {
    implicit val writes: Writes[Published] = Json.writes[Published]
  }

  def published(group: Group.Full)(implicit ctx: BasicCtx): Published =
    new Published(
      slug = group.slug.value,
      name = group.name.value,
      contact = group.contact.map(_.value),
      description = group.description.value,
      location = group.location.map(ApiPlace.from),
      tags = group.tags.map(_.value),
      created = group.info.createdAt)

  // embedded data in other models, should be public
  final case class Embed(slug: String,
                         name: String,
                         contact: Option[String],
                         description: String,
                         location: Option[ApiPlace],
                         tags: Seq[String])

  object Embed {
    implicit val writes: Writes[Embed] = Json.writes[Embed]
  }

  def embed(group: Group)(implicit ctx: BasicCtx): Embed =
    new Embed(
      slug = group.slug.value,
      name = group.name.value,
      contact = group.contact.map(_.value),
      description = group.description.value,
      location = group.location.map(ApiPlace.from),
      tags = group.tags.map(_.value))

} 
Example 165
Source File: ApiComment.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.web.api.domain

import java.time.Instant

import gospeak.core.domain.Comment
import gospeak.core.domain.utils.BasicCtx
import play.api.libs.json.{Json, Writes}

final case class ApiComment(id: String,
                            answers: Option[String],
                            text: String,
                            createdAt: Instant,
                            createdBy: ApiUser.Embed)

object ApiComment {
  implicit val writes: Writes[ApiComment] = Json.writes[ApiComment]

  def from(c: Comment.Full)(implicit ctx: BasicCtx): ApiComment =
    new ApiComment(
      id = c.id.value,
      answers = c.answers.map(_.value),
      text = c.text,
      createdAt = c.createdAt,
      createdBy = ApiUser.embed(c.createdBy))
} 
Example 166
Source File: ApiAttendee.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.web.api.domain

import java.time.Instant

import gospeak.core.domain.utils.BasicCtx
import gospeak.core.services.meetup.domain.{MeetupAttendee, MeetupGroup}
import gospeak.libs.scala.domain.Image
import play.api.libs.json.{Json, Writes}

object ApiAttendee {

  // data to display publicly
  final case class Published(name: String,
                             avatar: String,
                             meetupProfile: String,
                             host: Boolean,
                             response: String,
                             updated: Instant)

  object Published {
    implicit val writes: Writes[Published] = Json.writes[Published]
  }

  def published(attendee: MeetupAttendee, group: MeetupGroup.Slug)(implicit ctx: BasicCtx): Published =
    new Published(
      name = attendee.name,
      avatar = attendee.avatar.map(_.value).getOrElse(Image.AdorableUrl(attendee.id.value.toString, None).value),
      meetupProfile = s"https://www.meetup.com/${group.value}/members/${attendee.id.value}",
      host = attendee.host,
      response = attendee.response,
      updated = attendee.updated)
} 
Example 167
Source File: SocialProfile.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.web.auth.domain

import java.time.Instant

import com.mohiva.play.silhouette.impl.providers.CommonSocialProfile
import gospeak.core.domain.User
import gospeak.core.domain.utils.SocialAccounts
import gospeak.libs.scala.Extensions._
import gospeak.libs.scala.StringUtils
import gospeak.libs.scala.domain.{Avatar, CustomException, EmailAddress, Url}

object SocialProfile {
  val setEmailUrls: Map[String, String] = Map(
    "twitter" -> "https://twitter.com/settings/email")

  def toUserData(profile: CommonSocialProfile, defaultAvatar: (EmailAddress, User.Slug) => Avatar, now: Instant): Either[CustomException, User.Data] =
    for {
      email <- profile.email.map(EmailAddress.from)
        .getOrElse(Left(CustomException(s"<b>No email available from your ${profile.loginInfo.providerID} account.</b><br>" +
          s"${setEmailUrls.get(profile.loginInfo.providerID).map(url => "<a href=\"" + url + "\" target=\"_blank\">Add your email</a>").getOrElse("Add your email")} " +
          s"and try again or choose an other login option.")))
      avatarOpt <- getAvatar(profile)
      slug <- User.Slug.from(StringUtils.slugify(profile.firstName.getOrElse(email.nickName)))
      (first, last) = email.guessNames
    } yield User.Data(
      slug = slug,
      status = User.Status.Public,
      firstName = profile.firstName.getOrElse(first),
      lastName = profile.lastName.getOrElse(last),
      email = email,
      avatar = avatarOpt.getOrElse(defaultAvatar(email, slug)),
      title = None,
      bio = None,
      mentoring = None,
      company = None,
      location = None,
      phone = None,
      website = None,
      social = SocialAccounts.fromUrls())

  def getAvatar(profile: CommonSocialProfile): Either[CustomException, Option[Avatar]] =
    profile.avatarURL.map(Url.from).sequence.map(_.map(Avatar))
} 
Example 168
Source File: SchedulerSrv.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.web.services

import java.time.Instant

import cats.effect.{IO, Timer}
import cron4s.CronExpr
import eu.timepit.fs2cron.awakeEveryCron
import fs2.Stream
import gospeak.core.domain.utils.{AdminCtx, Constants}
import gospeak.core.services.storage.AdminVideoRepo
import gospeak.core.services.twitter.TwitterSrv
import gospeak.libs.scala.Extensions._
import gospeak.libs.scala.TimeUtils
import gospeak.web.services.SchedulerSrv.{Conf, Exec, Scheduler}

import scala.collection.mutable
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.FiniteDuration
import scala.util.control.NonFatal

class SchedulerSrv(videoRepo: AdminVideoRepo,
                   twitterSrv: TwitterSrv)(implicit ec: ExecutionContext) {
  implicit private val timer: Timer[IO] = IO.timer(ec)
  private val schedulers = mutable.ListBuffer[Scheduler]()
  private val execs: mutable.ListBuffer[Exec] = mutable.ListBuffer[Exec]()

  def getSchedulers: List[Scheduler] = schedulers.toList

  def getExecs: List[Exec] = execs.toList

  def init(conf: Conf): Unit = {
    schedule("tweet random video", conf.tweetRandomVideo, tweetRandomVideo())
  }

  def exec(name: String)(implicit ctx: AdminCtx): IO[Option[Exec]] =
    schedulers.find(_.name == name).map(exec(_, s"manual (${ctx.user.name.value})")).sequence

  private def tweetRandomVideo(): IO[(String, Option[String])] = for {
    video <- videoRepo.findRandom()
    tweet <- (for {
      v <- video.toRight("No video available")
    } yield twitterSrv.tweet(s"#OneDayOneTalk [${v.lang}] ${v.title} on ${v.channel.name} in ${v.publishedAt.getYear(Constants.defaultZoneId)} ${v.url.value}")).sequence
  } yield (tweet.map(t => s"Tweet sent: ${t.text}").getOrElse("Tweet not sent"), tweet.swap.toOption)

  // TODO be able to stop/start a scheduler
  private def schedule(name: String, cron: CronExpr, task: IO[(String, Option[String])]): Unit = {
    schedulers.find(_.name == name).map(_ => ()).getOrElse {
      val scheduler = Scheduler(name, cron, Some(Instant.now()), task)
      schedulers += scheduler
      val stream = awakeEveryCron[IO](cron).flatMap { _ => Stream.eval(exec(scheduler, "auto")) }
      stream.compile.drain.unsafeRunAsyncAndForget
    }
  }

  private def exec(s: Scheduler, source: String): IO[Exec] = IO(Instant.now()).flatMap { start =>
    s.task.map {
      case (res, None) => Exec(s.name, source, start, Instant.now(), res, None)
      case (res, Some(err)) => Exec(s.name, source, start, Instant.now(), res, Some(err))
    }.recover {
      case NonFatal(e) => Exec(s.name, source, start, Instant.now(), s"Finished with ${e.getClass.getSimpleName}", Some(e.getMessage))
    }.map { e =>
      execs += e
      e
    }
  }
}

object SchedulerSrv {

  final case class Conf(tweetRandomVideo: CronExpr)

  final case class Scheduler(name: String,
                             schedule: CronExpr,
                             started: Option[Instant],
                             private[SchedulerSrv] val task: IO[(String, Option[String])])

  final case class Exec(name: String,
                        source: String,
                        started: Instant,
                        finished: Instant,
                        result: String,
                        error: Option[String]) {
    def duration: FiniteDuration = TimeUtils.toFiniteDuration(started, finished)
  }

} 
Example 169
Source File: packageTest.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.web.utils

import java.time.Instant

import gospeak.web.testingutils.{BaseSpec, Values}
import play.api.mvc.AnyContent

import scala.concurrent.duration._

class packageTest extends BaseSpec {
  private val i = Instant.ofEpochMilli(1549115209899L)
  private implicit val req: UserReq[AnyContent] = Values.userReq

  describe("utils") {
    describe("RichInstant") {
      it("should format as date") {
        i.asDate shouldBe "02 Feb 2019"
      }
      it("should format as datetime") {
        i.asDatetime shouldBe "02 Feb 2019 at 14:46 (UTC)"
      }
    }
    describe("RichFiniteDuration") {
      it("should round to highest unit") {
        Duration(3, DAYS).plus(Duration(1, HOURS)).round shouldBe Duration(3, DAYS)
      }
      it("should also round negative numbers") {
        Duration(-3, DAYS).plus(Duration(-1, HOURS)).round shouldBe Duration(-3, DAYS)
        Duration(-3, DAYS).plus(Duration(1, HOURS)).round shouldBe Duration(-2, DAYS)
      }
      it("should display durations") {
        3.millis.asTime shouldBe "0:00"
        5.seconds.asTime shouldBe "0:05"
        46.seconds.asTime shouldBe "0:46"
        5.minutes.plus(16.seconds).asTime shouldBe "5:16"
        12.minutes.plus(2.seconds).asTime shouldBe "12:02"
        2.hours.plus(23.minutes).plus(18.seconds).asTime shouldBe "2:23:18"
        11.hours.plus(3.minutes).plus(7.seconds).asTime shouldBe "11:03:07"
      }
    }
    describe("private functions") {
      describe("timeAgo") {
        it("should display time for any unit") {
          timeAgo(Duration(5, NANOSECONDS)) shouldBe "just now"
          timeAgo(Duration(5, MICROSECONDS)) shouldBe "just now"
          timeAgo(Duration(5, MILLISECONDS)) shouldBe "just now"
          timeAgo(Duration(5, SECONDS)) shouldBe "in 5 seconds"
          timeAgo(Duration(5, MINUTES)) shouldBe "in 5 minutes"
          timeAgo(Duration(5, HOURS)) shouldBe "in 5 hours"
          timeAgo(Duration(5, DAYS)) shouldBe "in 5 days"
          timeAgo(Duration(18, DAYS)) shouldBe "in 2 weeks"
          timeAgo(Duration(63, DAYS)) shouldBe "in 2 months"
          timeAgo(Duration(900, DAYS)) shouldBe "in 2 years"
        }
        it("should manage singular/plural and positive/negative durations") {
          timeAgo(Duration(-2, SECONDS)) shouldBe "2 seconds ago"
          timeAgo(Duration(-1, SECONDS)) shouldBe "1 second ago"
          timeAgo(Duration(0, SECONDS)) shouldBe "just now"
          timeAgo(Duration(1, SECONDS)) shouldBe "in 1 second"
          timeAgo(Duration(2, SECONDS)) shouldBe "in 2 seconds"
        }
      }
    }
  }
} 
Example 170
Source File: SponsorRepoSql.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.infra.services.storage.sql

import java.time.Instant

import cats.effect.IO
import doobie.implicits._
import doobie.util.fragment.Fragment
import gospeak.core.domain._
import gospeak.core.domain.utils.OrgaCtx
import gospeak.core.services.storage.SponsorRepo
import gospeak.infra.services.storage.sql.SponsorRepoSql._
import gospeak.infra.services.storage.sql.utils.DoobieUtils.Mappings._
import gospeak.infra.services.storage.sql.utils.DoobieUtils._
import gospeak.infra.services.storage.sql.utils.GenericRepo
import gospeak.libs.scala.Extensions._
import gospeak.libs.scala.domain.{Done, Page}

class SponsorRepoSql(protected[sql] val xa: doobie.Transactor[IO]) extends GenericRepo with SponsorRepo {
  override def create(data: Sponsor.Data)(implicit ctx: OrgaCtx): IO[Sponsor] =
    insert(Sponsor(ctx.group.id, data, ctx.info)).run(xa)

  override def edit(sponsor: Sponsor.Id, data: Sponsor.Data)(implicit ctx: OrgaCtx): IO[Done] =
    update(ctx.group.id, sponsor)(data, ctx.user.id, ctx.now).run(xa)

  override def remove(sponsor: Sponsor.Id)(implicit ctx: OrgaCtx): IO[Done] = delete(ctx.group.id, sponsor).run(xa)

  override def find(sponsor: Sponsor.Id)(implicit ctx: OrgaCtx): IO[Option[Sponsor]] = selectOne(ctx.group.id, sponsor).runOption(xa)

  override def listFull(params: Page.Params)(implicit ctx: OrgaCtx): IO[Page[Sponsor.Full]] = selectPage(params).run(xa)

  override def listCurrentFull(group: Group.Id, now: Instant): IO[Seq[Sponsor.Full]] = selectCurrent(group, now).runList(xa)

  override def listAll(implicit ctx: OrgaCtx): IO[Seq[Sponsor]] = selectAll(ctx.group.id).runList(xa)

  override def listAll(contact: Contact.Id)(implicit ctx: OrgaCtx): IO[Seq[Sponsor]] = selectAll(ctx.group.id, contact).runList(xa)

  override def listAllFull(partner: Partner.Id)(implicit ctx: OrgaCtx): IO[Seq[Sponsor.Full]] = selectAllFull(ctx.group.id, partner).runList(xa)
}

object SponsorRepoSql {
  private val _ = sponsorIdMeta // for intellij not remove DoobieUtils.Mappings import
  private val table = Tables.sponsors
  val tableFull: Table = table
    .join(Tables.sponsorPacks, _.sponsor_pack_id -> _.id).get
    .join(Tables.partners, _.partner_id -> _.id).get
    .joinOpt(Tables.contacts, _.contact_id -> _.id).get
    .copy(filters = Seq(
      Filter.Bool.fromNow("active", "Is active", "s.start", "s.finish")))

  private[sql] def insert(e: Sponsor): Insert[Sponsor] = {
    val values = fr0"${e.id}, ${e.group}, ${e.partner}, ${e.pack}, ${e.contact}, ${e.start}, ${e.finish}, ${e.paid}, ${e.price.amount}, ${e.price.currency}, ${e.info.createdAt}, ${e.info.createdBy}, ${e.info.updatedAt}, ${e.info.updatedBy}"
    table.insert(e, _ => values)
  }

  private[sql] def update(group: Group.Id, sponsor: Sponsor.Id)(data: Sponsor.Data, by: User.Id, now: Instant): Update = {
    val fields = fr0"partner_id=${data.partner}, sponsor_pack_id=${data.pack}, contact_id=${data.contact}, start=${data.start}, finish=${data.finish}, paid=${data.paid}, price=${data.price.amount}, currency=${data.price.currency}, updated_at=$now, updated_by=$by"
    table.update(fields, where(group, sponsor))
  }

  private[sql] def delete(group: Group.Id, sponsor: Sponsor.Id): Delete =
    table.delete(where(group, sponsor))

  private[sql] def selectOne(group: Group.Id, pack: Sponsor.Id): Select[Sponsor] =
    table.select[Sponsor](where(group, pack))

  private[sql] def selectPage(params: Page.Params)(implicit ctx: OrgaCtx): SelectPage[Sponsor.Full, OrgaCtx] =
    tableFull.selectPage[Sponsor.Full, OrgaCtx](params, fr0"WHERE s.group_id=${ctx.group.id}")

  private[sql] def selectCurrent(group: Group.Id, now: Instant): Select[Sponsor.Full] =
    tableFull.select[Sponsor.Full](fr0"WHERE s.group_id=$group AND s.start < $now AND s.finish > $now")

  private[sql] def selectAll(group: Group.Id): Select[Sponsor] =
    table.select[Sponsor](fr0"WHERE s.group_id=$group")

  private[sql] def selectAll(group: Group.Id, contact: Contact.Id): Select[Sponsor] =
    table.select[Sponsor](fr0"WHERE s.group_id=$group AND s.contact_id=$contact")

  private[sql] def selectAllFull(group: Group.Id, partner: Partner.Id): Select[Sponsor.Full] =
    tableFull.select[Sponsor.Full](fr0"WHERE s.group_id=$group AND s.partner_id=$partner")

  private def where(group: Group.Id, sponsor: Sponsor.Id): Fragment =
    fr0"WHERE s.group_id=$group AND s.id=$sponsor"
} 
Example 171
Source File: ContactRepoSql.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.infra.services.storage.sql

import java.time.Instant

import cats.effect.IO
import doobie.implicits._
import doobie.util.fragment.Fragment
import gospeak.core.domain._
import gospeak.core.domain.utils.OrgaCtx
import gospeak.core.services.storage.ContactRepo
import gospeak.infra.services.storage.sql.ContactRepoSql._
import gospeak.infra.services.storage.sql.utils.DoobieUtils.Mappings._
import gospeak.infra.services.storage.sql.utils.DoobieUtils.{Delete, Insert, Select, Update}
import gospeak.infra.services.storage.sql.utils.GenericRepo
import gospeak.libs.scala.domain.{Done, EmailAddress}

class ContactRepoSql(protected[sql] val xa: doobie.Transactor[IO]) extends GenericRepo with ContactRepo {
  override def create(data: Contact.Data)(implicit ctx: OrgaCtx): IO[Contact] = insert(Contact(data, ctx.info)).run(xa)

  override def edit(contact: Contact.Id, data: Contact.Data)(implicit ctx: OrgaCtx): IO[Done] = update(contact, data)(ctx.user.id, ctx.now).run(xa)

  override def remove(partner: Partner.Id, contact: Contact.Id)(implicit ctx: OrgaCtx): IO[Done] = delete(ctx.group.id, partner, contact)(ctx.user.id, ctx.now).run(xa)

  override def find(id: Contact.Id): IO[Option[Contact]] = selectOne(id).runOption(xa)

  override def list(partner: Partner.Id): IO[Seq[Contact]] = selectAll(partner).runList(xa)

  override def exists(partner: Partner.Id, email: EmailAddress): IO[Boolean] = selectOne(partner, email).runExists(xa)
}

object ContactRepoSql {
  private val _ = contactIdMeta // for intellij not remove DoobieUtils.Mappings import
  private val table = Tables.contacts

  private[sql] def insert(e: Contact): Insert[Contact] = {
    val values = fr0"${e.id}, ${e.partner}, ${e.firstName}, ${e.lastName}, ${e.email}, ${e.notes}, ${e.info.createdAt}, ${e.info.createdBy}, ${e.info.updatedAt}, ${e.info.updatedBy}"
    table.insert[Contact](e, _ => values)
  }

  private[sql] def update(contact: Contact.Id, data: Contact.Data)(by: User.Id, now: Instant): Update = {
    val fields = fr0"first_name=${data.firstName}, last_name=${data.lastName}, email=${data.email}, notes=${data.notes}, updated_at=$now, updated_by=$by"
    table.update(fields, where(contact))
  }

  private[sql] def delete(group: Group.Id, partner: Partner.Id, contact: Contact.Id)(by: User.Id, now: Instant): Delete =
    table.delete(where(contact))

  private[sql] def selectAll(partner: Partner.Id): Select[Contact] =
    table.select[Contact](where(partner))

  private[sql] def selectOne(id: Contact.Id): Select[Contact] =
    table.select[Contact](where(id))

  private[sql] def selectOne(partner: Partner.Id, email: EmailAddress): Select[Contact] =
    table.select[Contact](where(partner, email))

  private def where(partner: Partner.Id): Fragment = fr0"WHERE ct.partner_id=$partner"

  private def where(partner: Partner.Id, email: EmailAddress): Fragment = fr0"WHERE ct.partner_id=$partner AND ct.email=$email"

  private def where(id: Contact.Id): Fragment = fr0"WHERE ct.id=$id"
} 
Example 172
Source File: ExternalCfpRepoSql.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.infra.services.storage.sql

import java.time.Instant

import cats.data.NonEmptyList
import cats.effect.IO
import doobie.implicits._
import gospeak.core.domain._
import gospeak.core.domain.utils.{Info, UserAwareCtx, UserCtx}
import gospeak.core.services.storage.ExternalCfpRepo
import gospeak.infra.services.storage.sql.ExternalCfpRepoSql._
import gospeak.infra.services.storage.sql.utils.DoobieUtils.Mappings._
import gospeak.infra.services.storage.sql.utils.DoobieUtils._
import gospeak.infra.services.storage.sql.utils.GenericRepo
import gospeak.libs.scala.Extensions._
import gospeak.libs.scala.domain.{Done, Page}

class ExternalCfpRepoSql(protected[sql] val xa: doobie.Transactor[IO]) extends GenericRepo with ExternalCfpRepo {
  override def create(event: ExternalEvent.Id, data: ExternalCfp.Data)(implicit ctx: UserCtx): IO[ExternalCfp] =
    insert(ExternalCfp(data, event, Info(ctx.user.id, ctx.now))).run(xa)

  override def edit(cfp: ExternalCfp.Id)(data: ExternalCfp.Data)(implicit ctx: UserCtx): IO[Done] =
    update(cfp)(data, ctx.user.id, ctx.now).run(xa)

  override def listAllIds(): IO[Seq[ExternalCfp.Id]] = selectAllIds().runList(xa)

  override def listAll(event: ExternalEvent.Id): IO[Seq[ExternalCfp]] = selectAll(event).runList(xa)

  override def listIncoming(params: Page.Params)(implicit ctx: UserAwareCtx): IO[Page[CommonCfp]] = selectCommonPageIncoming(params).run(xa)

  override def listDuplicatesFull(p: ExternalCfp.DuplicateParams): IO[Seq[ExternalCfp.Full]] = selectDuplicatesFull(p).runList(xa)

  override def findFull(cfp: ExternalCfp.Id): IO[Option[ExternalCfp.Full]] = selectOneFull(cfp).runOption(xa)

  override def findCommon(cfp: Cfp.Slug): IO[Option[CommonCfp]] = selectOneCommon(cfp).runOption(xa)

  override def findCommon(cfp: ExternalCfp.Id): IO[Option[CommonCfp]] = selectOneCommon(cfp).runOption(xa)
}

object ExternalCfpRepoSql {
  private val _ = externalCfpIdMeta // for intellij not remove DoobieUtils.Mappings import
  private val table = Tables.externalCfps
  private val tableFull = table
    .join(Tables.externalEvents.dropFields(_.name.startsWith("location_")), _.event_id -> _.id).get
  private val commonTable = Table(
    name = "((SELECT c.name, g.logo, c.begin, c.close, g.location, c.description, c.tags, null as ext_id, null  as ext_url, null    as ext_event_start, null     as ext_event_finish, null  as ext_event_url, null          as ext_tickets_url, null         as ext_videos_url, null as twitter_account, null as twitter_hashtag, c.slug as int_slug, g.id as group_id, g.slug as group_slug FROM cfps c INNER JOIN groups g ON c.group_id=g.id) " +
      "UNION (SELECT e.name, e.logo, c.begin, c.close, e.location, c.description, e.tags, c.id as ext_id, c.url as ext_url, e.start as ext_event_start, e.finish as ext_event_finish, e.url as ext_event_url, e.tickets_url as ext_tickets_url, e.videos_url as ext_videos_url,       e.twitter_account,       e.twitter_hashtag, null   as int_slug, null as group_id,   null as group_slug FROM external_cfps c INNER JOIN external_events e ON c.event_id=e.id))",
    prefix = "c",
    joins = Seq(),
    fields = Seq(
      "name", "logo", "begin", "close", "location", "description", "tags",
      "ext_id", "ext_url", "ext_event_start", "ext_event_finish", "ext_event_url", "ext_tickets_url", "ext_videos_url", "twitter_account", "twitter_hashtag",
      "int_slug", "group_id", "group_slug").map(Field(_, "c")),
    aggFields = Seq(),
    customFields = Seq(),
    sorts = Sorts("close", "close date", Field("close", "c"), Field("name", "c")),
    search = Seq("name", "description", "tags").map(Field(_, "c")),
    filters = Seq())

  private[sql] def insert(e: ExternalCfp): Insert[ExternalCfp] = {
    val values = fr0"${e.id}, ${e.event}, ${e.description}, ${e.begin}, ${e.close}, ${e.url}, ${e.info.createdAt}, ${e.info.createdBy}, ${e.info.updatedAt}, ${e.info.updatedBy}"
    table.insert[ExternalCfp](e, _ => values)
  }

  private[sql] def update(id: ExternalCfp.Id)(e: ExternalCfp.Data, by: User.Id, now: Instant): Update = {
    val fields = fr0"description=${e.description}, begin=${e.begin}, close=${e.close}, url=${e.url}, updated_at=$now, updated_by=$by"
    table.update(fields, fr0"WHERE id=$id")
  }

  private[sql] def selectAllIds(): Select[ExternalCfp.Id] =
    table.select[ExternalCfp.Id](Seq(Field("id", "ec")))

  private[sql] def selectAll(id: ExternalEvent.Id): Select[ExternalCfp] =
    table.select[ExternalCfp](fr0"WHERE ec.event_id=$id")

  private[sql] def selectOneFull(id: ExternalCfp.Id): Select[ExternalCfp.Full] =
    tableFull.selectOne[ExternalCfp.Full](fr0"WHERE ec.id=$id")

  private[sql] def selectOneCommon(slug: Cfp.Slug): Select[CommonCfp] =
    commonTable.selectOne[CommonCfp](fr0"WHERE c.slug=$slug")

  private[sql] def selectOneCommon(id: ExternalCfp.Id): Select[CommonCfp] =
    commonTable.selectOne[CommonCfp](fr0"WHERE c.id=$id")

  private[sql] def selectCommonPageIncoming(params: Page.Params)(implicit ctx: UserAwareCtx): SelectPage[CommonCfp, UserAwareCtx] =
    commonTable.selectPage[CommonCfp, UserAwareCtx](params, fr0"WHERE (c.close IS NULL OR c.close >= ${ctx.now})")

  private[sql] def selectDuplicatesFull(p: ExternalCfp.DuplicateParams): Select[ExternalCfp.Full] = {
    val filters = Seq(
      p.cfpUrl.map(v => fr0"ec.url LIKE ${"%" + v + "%"}"),
      p.cfpEndDate.map(v => fr0"ec.close=$v")
    ).flatten
    if (filters.isEmpty) {
      tableFull.select[ExternalCfp.Full](fr0"WHERE ec.id='no-match'")
    } else {
      tableFull.select[ExternalCfp.Full](fr0"WHERE " ++ filters.reduce(_ ++ fr0" OR " ++ _))
    }
  }
} 
Example 173
Source File: ExternalEventRepoSql.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.infra.services.storage.sql

import java.time.Instant

import cats.effect.IO
import doobie.implicits._
import gospeak.core.domain.utils.{Info, UserAwareCtx, UserCtx}
import gospeak.core.domain.{CommonEvent, Event, ExternalEvent, User}
import gospeak.core.services.storage.ExternalEventRepo
import gospeak.infra.services.storage.sql.ExternalEventRepoSql._
import gospeak.infra.services.storage.sql.utils.DoobieUtils.Mappings._
import gospeak.infra.services.storage.sql.utils.DoobieUtils._
import gospeak.infra.services.storage.sql.utils.{GenericQuery, GenericRepo}
import gospeak.libs.scala.domain.{Done, Logo, Page, Tag}

class ExternalEventRepoSql(protected[sql] val xa: doobie.Transactor[IO]) extends GenericRepo with ExternalEventRepo {
  override def create(data: ExternalEvent.Data)(implicit ctx: UserCtx): IO[ExternalEvent] =
    insert(ExternalEvent(data, Info(ctx.user.id, ctx.now))).run(xa)

  override def edit(id: ExternalEvent.Id)(data: ExternalEvent.Data)(implicit ctx: UserCtx): IO[Done] =
    update(id)(data, ctx.user.id, ctx.now).run(xa)

  override def listAllIds()(implicit ctx: UserAwareCtx): IO[Seq[ExternalEvent.Id]] = selectAllIds().runList(xa)

  override def list(params: Page.Params)(implicit ctx: UserCtx): IO[Page[ExternalEvent]] = selectPage(params).run(xa)

  override def listCommon(params: Page.Params)(implicit ctx: UserAwareCtx): IO[Page[CommonEvent]] = selectPageCommon(params).run(xa)

  override def find(id: ExternalEvent.Id): IO[Option[ExternalEvent]] = selectOne(id).runOption(xa)

  override def listTags(): IO[Seq[Tag]] = selectTags().runList(xa).map(_.flatten.distinct)

  override def listLogos(): IO[Seq[Logo]] = selectLogos().runList(xa).map(_.flatten.distinct)
}

object ExternalEventRepoSql {

  import GenericQuery._

  private val _ = externalEventIdMeta // for intellij not remove DoobieUtils.Mappings import
  val table: Table = Tables.externalEvents.copy(filters = Seq(
    Filter.Enum.fromEnum("type", "Type", "ee.kind", Seq(
      "conference" -> Event.Kind.Conference.value,
      "meetup" -> Event.Kind.Meetup.value,
      "training" -> Event.Kind.Training.value,
      "private event" -> Event.Kind.PrivateEvent.value)),
    Filter.Bool.fromNullable("video", "With video", "ee.videos_url")))
  private val tableSelect = table.dropFields(_.name.startsWith("location_"))
  val commonTable: Table = Table(
    name = "((SELECT e.name, e.kind, e.start, v.address as location, g.social_twitter as twitter_account, null as twitter_hashtag, e.tags, null as ext_id, null   as ext_logo, null          as ext_description, null  as ext_url, null          as ext_tickets, null         as ext_videos, e.id as int_id, e.slug as int_slug, e.description as int_description, g.id as int_group_id, g.slug as int_group_slug, g.name as int_group_name, g.logo as int_group_logo, c.id as int_cfp_id, c.slug as int_cfp_slug, c.name as int_cfp_name, v.id as int_venue_id, p.name as int_venue_name, p.logo as int_venue_logo, e.created_at, e.created_by, e.updated_at, e.updated_by FROM events e INNER JOIN groups g ON e.group_id=g.id LEFT OUTER JOIN cfps c ON e.cfp_id=c.id LEFT OUTER JOIN venues v ON e.venue=v.id LEFT OUTER JOIN partners p ON v.partner_id=p.id WHERE e.published IS NOT NULL) " +
      "UNION (SELECT e.name, e.kind, e.start,            e.location,                   e.twitter_account,       e.twitter_hashtag, e.tags, e.id as ext_id, e.logo as ext_logo, e.description as ext_description, e.url as ext_url, e.tickets_url as ext_tickets, e.videos_url as ext_videos, null as int_id, null   as int_slug, null          as int_description, null as int_group_id, null   as int_group_slug, null   as int_group_name, null   as int_group_logo, null as int_cfp_id, null   as int_cfp_slug, null   as int_cfp_name, null as int_venue_id, null   as int_venue_name, null   as int_venue_logo, e.created_at, e.created_by, e.updated_at, e.updated_by FROM external_events e))",
    prefix = "e",
    joins = Seq(),
    fields = Seq(
      "name", "kind", "start", "location", "twitter_account", "twitter_hashtag", "tags",
      "ext_id", "ext_logo", "ext_description", "ext_url", "ext_tickets", "ext_videos",
      "int_id", "int_slug", "int_description", "int_group_id", "int_group_slug", "int_group_name", "int_group_logo", "int_cfp_id", "int_cfp_slug", "int_cfp_name", "int_venue_id", "int_venue_name", "int_venue_logo",
      "created_at", "created_by", "updated_at", "updated_by").map(Field(_, "e")),
    aggFields = Seq(),
    customFields = Seq(),
    sorts = Sorts("start", Field("-start", "e"), Field("-created_at", "e")),
    search = Seq("name", "kind", "location", "twitter_account", "tags", "int_group_name", "int_cfp_name", "int_description", "ext_description").map(Field(_, "e")),
    filters = Seq(
      Filter.Enum.fromEnum("type", "Type", "e.kind", Seq(
        "conference" -> Event.Kind.Conference.value,
        "meetup" -> Event.Kind.Meetup.value,
        "training" -> Event.Kind.Training.value,
        "private event" -> Event.Kind.PrivateEvent.value)),
      Filter.Bool.fromNullable("video", "With video", "e.ext_videos"),
      Filter.Bool("past", "Is past", aggregation = false, ctx => fr0"e.start < ${ctx.now}", ctx => fr0"e.start > ${ctx.now}")))

  private[sql] def insert(e: ExternalEvent): Insert[ExternalEvent] = {
    val values = fr0"${e.id}, ${e.name}, ${e.kind}, ${e.logo}, ${e.description}, ${e.start}, ${e.finish}, " ++ insertLocation(e.location) ++ fr0", ${e.url}, ${e.tickets}, ${e.videos}, ${e.twitterAccount}, ${e.twitterHashtag}, ${e.tags}, " ++ insertInfo(e.info)
    table.insert[ExternalEvent](e, _ => values)
  }

  private[sql] def update(id: ExternalEvent.Id)(e: ExternalEvent.Data, by: User.Id, now: Instant): Update = {
    val fields = fr0"name=${e.name}, kind=${e.kind}, logo=${e.logo}, description=${e.description}, start=${e.start}, finish=${e.finish}, " ++ updateLocation(e.location) ++ fr0", url=${e.url}, tickets_url=${e.tickets}, videos_url=${e.videos}, twitter_account=${e.twitterAccount}, twitter_hashtag=${e.twitterHashtag}, tags=${e.tags}, updated_at=$now, updated_by=$by"
    table.update(fields, fr0"WHERE id=$id")
  }

  private[sql] def selectOne(id: ExternalEvent.Id): Select[ExternalEvent] =
    tableSelect.selectOne[ExternalEvent](fr0"WHERE ee.id=$id")

  private[sql] def selectAllIds()(implicit ctx: UserAwareCtx): Select[ExternalEvent.Id] =
    table.select[ExternalEvent.Id](Seq(Field("id", "ee")))

  private[sql] def selectPage(params: Page.Params)(implicit ctx: UserCtx): SelectPage[ExternalEvent, UserCtx] =
    tableSelect.selectPage[ExternalEvent, UserCtx](params)

  private[sql] def selectPageCommon(params: Page.Params)(implicit ctx: UserAwareCtx): SelectPage[CommonEvent, UserAwareCtx] =
    commonTable.selectPage[CommonEvent, UserAwareCtx](params)

  private[sql] def selectTags(): Select[Seq[Tag]] =
    table.select[Seq[Tag]](Seq(Field("tags", "ee")))

  private[sql] def selectLogos(): Select[Option[Logo]] =
    table.select[Option[Logo]](Seq(Field("logo", "ee")), fr0"WHERE ee.logo IS NOT NULL")
} 
Example 174
Source File: SponsorPackRepoSql.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.infra.services.storage.sql

import java.time.Instant

import cats.data.NonEmptyList
import cats.effect.IO
import doobie.Fragments
import doobie.implicits._
import doobie.util.fragment.Fragment
import gospeak.core.domain.utils.OrgaCtx
import gospeak.core.domain.{Group, SponsorPack, User}
import gospeak.core.services.storage.SponsorPackRepo
import gospeak.infra.services.storage.sql.SponsorPackRepoSql._
import gospeak.infra.services.storage.sql.utils.DoobieUtils.Mappings._
import gospeak.infra.services.storage.sql.utils.DoobieUtils.{Insert, Select, Update}
import gospeak.infra.services.storage.sql.utils.GenericRepo
import gospeak.libs.scala.domain.{CustomException, Done}

class SponsorPackRepoSql(protected[sql] val xa: doobie.Transactor[IO]) extends GenericRepo with SponsorPackRepo {
  override def create(data: SponsorPack.Data)(implicit ctx: OrgaCtx): IO[SponsorPack] =
    insert(SponsorPack(ctx.group.id, data, ctx.info)).run(xa)

  override def edit(pack: SponsorPack.Slug, data: SponsorPack.Data)(implicit ctx: OrgaCtx): IO[Done] = {
    if (data.slug != pack) {
      find(data.slug).flatMap {
        case None => update(ctx.group.id, pack)(data, ctx.user.id, ctx.now).run(xa)
        case _ => IO.raiseError(CustomException(s"You already have a sponsor pack with slug ${data.slug}"))
      }
    } else {
      update(ctx.group.id, pack)(data, ctx.user.id, ctx.now).run(xa)
    }
  }

  override def disable(pack: SponsorPack.Slug)(implicit ctx: OrgaCtx): IO[Done] =
    setActive(ctx.group.id, pack)(active = false, ctx.user.id, ctx.now).run(xa)

  override def enable(pack: SponsorPack.Slug)(implicit ctx: OrgaCtx): IO[Done] =
    setActive(ctx.group.id, pack)(active = true, ctx.user.id, ctx.now).run(xa)

  override def find(pack: SponsorPack.Slug)(implicit ctx: OrgaCtx): IO[Option[SponsorPack]] = selectOne(ctx.group.id, pack).runOption(xa)

  override def listAll(group: Group.Id): IO[Seq[SponsorPack]] = selectAll(group).runList(xa)

  override def listAll(implicit ctx: OrgaCtx): IO[Seq[SponsorPack]] = selectAll(ctx.group.id).runList(xa)

  override def listActives(group: Group.Id): IO[Seq[SponsorPack]] = selectActives(group).runList(xa)

  override def listActives(implicit ctx: OrgaCtx): IO[Seq[SponsorPack]] = selectActives(ctx.group.id).runList(xa)
}

object SponsorPackRepoSql {
  private val _ = sponsorPackIdMeta // for intellij not remove DoobieUtils.Mappings import
  private val table = Tables.sponsorPacks

  private[sql] def insert(e: SponsorPack): Insert[SponsorPack] = {
    val values = fr0"${e.id}, ${e.group}, ${e.slug}, ${e.name}, ${e.description}, ${e.price.amount}, ${e.price.currency}, ${e.duration}, ${e.active}, ${e.info.createdAt}, ${e.info.createdBy}, ${e.info.updatedAt}, ${e.info.updatedBy}"
    table.insert(e, _ => values)
  }

  private[sql] def update(group: Group.Id, pack: SponsorPack.Slug)(data: SponsorPack.Data, by: User.Id, now: Instant): Update = {
    val fields = fr0"slug=${data.slug}, name=${data.name}, description=${data.description}, price=${data.price.amount}, currency=${data.price.currency}, duration=${data.duration}, updated_at=$now, updated_by=$by"
    table.update(fields, where(group, pack))
  }

  private[sql] def setActive(group: Group.Id, pack: SponsorPack.Slug)(active: Boolean, by: User.Id, now: Instant): Update =
    table.update(fr0"active=$active, updated_at=$now, updated_by=$by", where(group, pack))

  private[sql] def selectOne(group: Group.Id, pack: SponsorPack.Slug): Select[SponsorPack] =
    table.select[SponsorPack](where(group, pack))

  private[sql] def selectAll(ids: NonEmptyList[SponsorPack.Id]): Select[SponsorPack] =
    table.select[SponsorPack](fr0"WHERE " ++ Fragments.in(fr"sp.id", ids))

  private[sql] def selectAll(group: Group.Id): Select[SponsorPack] =
    table.select[SponsorPack](where(group))

  private[sql] def selectActives(group: Group.Id): Select[SponsorPack] = {
    val active = true
    table.select[SponsorPack](where(group) ++ fr0" AND sp.active=$active")
  }

  private def where(group: Group.Id, slug: SponsorPack.Slug): Fragment =
    fr0"WHERE sp.group_id=$group AND sp.slug=$slug"

  private def where(group: Group.Id): Fragment =
    fr0"WHERE sp.group_id=$group"
} 
Example 175
Source File: CloudinaryClient.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.libs.cloudinary

import java.time.Instant

import cats.effect.IO
import gospeak.libs.cloudinary.CloudinaryJson._
import gospeak.libs.cloudinary.domain.{CloudinaryUploadRequest, CloudinaryUploadResponse}
import gospeak.libs.http.HttpClient
import gospeak.libs.scala.Crypto
import gospeak.libs.scala.Extensions._
import gospeak.libs.scala.domain.Creds
import io.circe.parser.decode

class CloudinaryClient(conf: CloudinaryClient.Conf) {
  private val baseUrl = "https://api.cloudinary.com/v1_1"
  private val ignoreOnSign = Set("api_key", "file")

  // see https://cloudinary.com/documentation/upload_images#generating_authentication_signatures
  def sign(params: Map[String, String]): Either[String, String] =
    withCreds((_, creds) => sign(creds, params))

  // see https://cloudinary.com/documentation/upload_images#uploading_with_a_direct_call_to_the_api
  def upload(req: CloudinaryUploadRequest): IO[Either[String, CloudinaryUploadResponse]] = {
    withCreds { (cloudName, creds) =>
      val uploadUrl = s"$baseUrl/$cloudName/image/upload"
      val allParams = req.toMap ++ Map(
        "api_key" -> creds.key,
        "timestamp" -> Instant.now().getEpochSecond.toString)
      val signature = sign(creds, allParams)
      HttpClient.postForm(uploadUrl, allParams ++ Map("signature" -> signature))
        .map(r => decode[CloudinaryUploadResponse](r.body).leftMap(_.getMessage))
    }.sequence.map(_.flatMap(identity))
  }

  private def sign(creds: Creds, queryParams: Map[String, String]): String = {
    val params = queryParams
      .filterKeys(!ignoreOnSign.contains(_))
      .toList.sortBy(_._1)
      .map { case (key, value) => s"$key=$value" }.mkString("&")
    Crypto.sha1(params + creds.secret.decode)
  }

  private def withCreds[A](block: (String, Creds) => A): Either[String, A] =
    conf match {
      case CloudinaryClient.Conf(cloudName, _, Some(creds)) => Right(block(cloudName, creds))
      case _: CloudinaryClient.Conf => Left("No credentials defined for cloudinary")
    }
}

object CloudinaryClient {

  final case class Conf(cloudName: String,
                        uploadPreset: Option[String],
                        creds: Option[Creds])

} 
Example 176
Source File: SlackJson.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.libs.slack

import java.time.Instant

import cats.implicits._
import gospeak.libs.slack.domain._
import gospeak.libs.scala.CirceUtils.decodeSingleValueClass
import io.circe.Decoder
import io.circe.generic.semiauto.deriveDecoder

object SlackJson {
  private val _: Decoder[SlackChannel.Id] = decodeSingleValueClass // to keep gospeak.infra.utils.CirceUtils._ import
  private implicit val instantDecoder: Decoder[Instant] = Decoder.decodeLong.emap { timestampSecs =>
    Either.catchNonFatal(Instant.ofEpochSecond(timestampSecs)).leftMap(e => s"Bad Instant: ${e.getMessage}")
  }

  implicit val slackErrorDecoder: Decoder[SlackError] = deriveDecoder[SlackError]
  implicit val slackInfoDecoder: Decoder[SlackTokenInfo] = deriveDecoder[SlackTokenInfo]

  private implicit val slackChannelPurposeDecoder: Decoder[SlackChannel.Purpose] = deriveDecoder[SlackChannel.Purpose]
  private implicit val slackChannelTopicDecoder: Decoder[SlackChannel.Topic] = deriveDecoder[SlackChannel.Topic]
  private implicit val slackChannelDecoder: Decoder[SlackChannel] = deriveDecoder[SlackChannel]
  implicit val slackChannelSingleDecoder: Decoder[SlackChannel.Single] = deriveDecoder[SlackChannel.Single]
  implicit val slackChannelListDecoder: Decoder[SlackChannel.List] = deriveDecoder[SlackChannel.List]

  private implicit val slackUserProfileDecoder: Decoder[SlackUser.Profile] = deriveDecoder[SlackUser.Profile]
  private implicit val slackUserDecoder: Decoder[SlackUser] = deriveDecoder[SlackUser]
  implicit val slackUserListDecoder: Decoder[SlackUser.List] = deriveDecoder[SlackUser.List]

  private implicit val slackMessageDecoder: Decoder[SlackMessage] = deriveDecoder[SlackMessage]
  implicit val slackMessagePostedDecoder: Decoder[SlackMessage.Posted] = deriveDecoder[SlackMessage.Posted]
} 
Example 177
Source File: SlackChannel.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.libs.slack.domain

import java.time.Instant

// cf https://api.slack.com/types/channel
final case class SlackChannel(id: SlackChannel.Id,
                              name: SlackChannel.Name,
                              is_channel: Boolean,
                              created: Instant,
                              creator: SlackUser.Id,
                              is_archived: Boolean,
                              is_general: Boolean,
                              name_normalized: String,
                              is_shared: Boolean,
                              is_org_shared: Boolean,
                              is_member: Boolean,
                              is_private: Boolean,
                              is_mpim: Boolean,
                              members: Seq[SlackUser.Id],
                              topic: SlackChannel.Topic,
                              purpose: SlackChannel.Purpose,
                              num_members: Option[Int],
                              previous_names: Seq[String])

object SlackChannel {

  sealed trait Ref {
    val value: String
  }

  final case class Id(value: String) extends Ref

  final case class Name(value: String) extends Ref

  final case class Topic(value: String,
                         creator: SlackUser.Id,
                         last_set: Instant)

  final case class Purpose(value: String,
                           creator: SlackUser.Id,
                           last_set: Instant)

  final case class List(channels: Seq[SlackChannel],
                        ok: Boolean)

  final case class Single(channel: SlackChannel,
                          ok: Boolean)

} 
Example 178
Source File: SlackUser.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.libs.slack.domain

import java.time.Instant

// cf https://api.slack.com/types/user
final case class SlackUser(id: SlackUser.Id,
                           team_id: SlackTeam.Id,
                           name: String,
                           deleted: Boolean,
                           color: Option[String],
                           real_name: Option[String],
                           profile: SlackUser.Profile,
                           status: Option[String],
                           is_bot: Boolean,
                           is_app_user: Boolean,
                           is_admin: Option[Boolean],
                           is_owner: Option[Boolean],
                           is_stranger: Option[Boolean],
                           has_2fa: Option[Boolean],
                           tz: Option[String],
                           tz_label: Option[String],
                           tz_offset: Option[Int],
                           locale: Option[String],
                           updated: Instant)

object SlackUser {

  final case class Id(value: String) extends AnyVal

  final case class Profile(team: SlackTeam.Id,
                           real_name: String,
                           real_name_normalized: String,
                           first_name: Option[String],
                           last_name: Option[String],
                           email: Option[String],
                           phone: Option[String],
                           skype: Option[String],
                           status_text: String,
                           status_emoji: String,
                           avatar_hash: String,
                           image_24: String,
                           image_32: String,
                           image_48: String,
                           image_72: String,
                           image_192: String)

  final case class List(members: Seq[SlackUser],
                        ok: Boolean)

} 
Example 179
Source File: YoutubePlaylist.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.libs.youtube.domain

import java.time.Instant

import com.google.api.services.youtube.{model => google}
import gospeak.libs.scala.domain.Url
import gospeak.libs.youtube.utils.YoutubeParser

import scala.collection.JavaConverters._

final case class YoutubePlaylist(id: Url.Videos.Playlist.Id,
                                 channelId: Url.Videos.Channel.Id,
                                 channelTitle: String,
                                 title: String,
                                 description: Option[String],
                                 publishedAt: Instant,
                                 lang: Option[String],
                                 tags: List[String],
                                 items: Long)

object YoutubePlaylist {
  // see https://developers.google.com/youtube/v3/docs/playlists#resource-representation
  def from(p: google.Playlist): Either[YoutubeErrors, YoutubePlaylist] = for {
    snippet <- Option(p.getSnippet).toRight(YoutubeErrors(s"Missing snippet for playlist ${p.getId}"))
    contentDetails <- Option(p.getContentDetails).toRight(YoutubeErrors(s"Missing content details for playlist ${p.getId}"))
    channelId <- Option(snippet.getChannelId).toRight(YoutubeErrors(s"Missing channelId for playlist ${p.getId}"))
    channelTitle <- Option(snippet.getChannelTitle).toRight(YoutubeErrors(s"Missing channelTitle for playlist ${p.getId}"))
    title <- Option(snippet.getTitle).toRight(YoutubeErrors(s"Missing title for playlist ${p.getId}"))
    publishedAt <- Option(snippet.getPublishedAt).map(YoutubeParser.toInstant).toRight(YoutubeErrors(s"Missing publishedAt for playlist ${p.getId}"))
    items <- Option(contentDetails.getItemCount).toRight(YoutubeErrors(s"Missing itemCount for playlist ${p.getId}"))
  } yield new YoutubePlaylist(
    id = Url.Videos.Playlist.Id(p.getId),
    channelId = Url.Videos.Channel.Id(channelId),
    channelTitle = channelTitle,
    title = title,
    description = Option(snippet.getDescription).filter(_.nonEmpty),
    publishedAt = publishedAt,
    lang = Option(snippet.getDefaultLanguage),
    tags = Option(snippet.getTags).map(_.asScala.toList).getOrElse(List()),
    items = items)
} 
Example 180
Source File: YoutubeChannel.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.libs.youtube.domain

import java.time.Instant

import com.google.api.services.youtube.{model => google}
import gospeak.libs.youtube.utils.YoutubeParser

import scala.collection.JavaConverters._

final case class YoutubeChannel(id: String,
                                title: String,
                                customUrl: Option[String],
                                description: Option[String],
                                country: Option[String],
                                publishedAt: Instant,
                                categories: List[String],
                                videos: Option[Long],
                                subscribers: Option[Long],
                                views: Option[Long],
                                comments: Option[Long])

object YoutubeChannel {
  // see https://developers.google.com/youtube/v3/docs/channels#resource-representation
  def from(c: google.Channel): Either[YoutubeErrors, YoutubeChannel] = for {
    snippet <- Option(c.getSnippet).toRight(YoutubeErrors(s"Missing snippet for channel ${c.getId}"))
    topics <- Option(c.getTopicDetails).toRight(YoutubeErrors(s"Missing topicDetails for channel ${c.getId}"))
    statistics <- Option(c.getStatistics).toRight(YoutubeErrors(s"Missing statistics for channel ${c.getId}"))
    title <- Option(snippet.getTitle).toRight(YoutubeErrors(s"Missing title for channel ${c.getId}"))
    publishedAt <- Option(snippet.getPublishedAt).map(YoutubeParser.toInstant).toRight(YoutubeErrors(s"Missing publishedAt for channel ${c.getId}"))
  } yield new YoutubeChannel(
    id = c.getId,
    title = title,
    customUrl = Option(snippet.getCustomUrl),
    description = Option(snippet.getDescription),
    country = Option(snippet.getCountry),
    publishedAt = publishedAt,
    categories = Option(topics.getTopicCategories).map(_.asScala.toList).getOrElse(List()),
    videos = Option(statistics.getVideoCount).map(_.longValue()),
    subscribers = Option(statistics.getSubscriberCount).map(_.longValue()),
    views = Option(statistics.getViewCount).map(_.longValue()),
    comments = Option(statistics.getCommentCount).map(_.longValue()))
} 
Example 181
Source File: YoutubeParserSpec.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.libs.youtube.utils

import java.time.Instant

import com.google.api.client.util.DateTime
import gospeak.libs.testingutils.BaseSpec

class YoutubeParserSpec extends BaseSpec {
  describe("YoutubeParser") {
    describe("toInstant") {
      it("should parse google DateTime") {
        val date = DateTime.parseRfc3339("2013-06-17T08:51:45.000Z")
        val instant = Instant.parse("2013-06-17T08:51:45.000Z")
        YoutubeParser.toInstant(date) shouldBe instant
      }
    }
  }
} 
Example 182
Source File: MeetupAttendee.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.core.services.meetup.domain

import java.time.Instant

import gospeak.libs.scala.domain.Avatar

final case class MeetupAttendee(id: MeetupUser.Id,
                                name: String,
                                bio: Option[String],
                                avatar: Option[Avatar],
                                host: Boolean,
                                response: String,
                                guests: Int,
                                updated: Instant) {
  def meetupUrl(group: MeetupGroup.Slug): String = s"https://www.meetup.com/${group.value}/members/${id.value}/profile"
} 
Example 183
Source File: MeetupEvent.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.core.services.meetup.domain

import java.time.Instant

import gospeak.libs.scala.domain.CustomException

import scala.util.Try

final case class MeetupEvent(id: Long,
                             name: String,
                             status: String,
                             visibility: String,
                             // FIXME add local date
                             description: Option[String],
                             venue: Option[MeetupVenue],
                             rsvp_limit: Option[Int],
                             created: Instant)

object MeetupEvent {

  final case class Id(value: Long) extends AnyVal

  object Id {
    def from(in: String): Either[CustomException, Id] =
      Try(in.toLong).map(new Id(_)).toEither
        .left.map(e => CustomException(s"'$in' is an invalid MeetupEvent.Id: ${e.getMessage}"))
  }

  final case class Ref(group: MeetupGroup.Slug, event: Id) {
    def link: String = s"https://www.meetup.com/${group.value}/events/${event.value}"
  }

} 
Example 184
Source File: Sponsor.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.core.domain

import java.time.{Instant, LocalDate}

import gospeak.core.domain.utils.{Constants, Info}
import gospeak.libs.scala.Extensions._
import gospeak.libs.scala.domain.{DataClass, IId, Price, UuidIdBuilder}

case class Sponsor(id: Sponsor.Id,
                   group: Group.Id,
                   partner: Partner.Id,
                   pack: SponsorPack.Id,
                   contact: Option[Contact.Id],
                   start: LocalDate,
                   finish: LocalDate,
                   paid: Option[LocalDate],
                   price: Price,
                   info: Info) {
  def data: Sponsor.Data = Sponsor.Data(this)

  def isCurrent(now: Instant): Boolean =
    start.atStartOfDay().toInstant(Constants.defaultZoneId).isBefore(now) &&
      finish.atStartOfDay().toInstant(Constants.defaultZoneId).isAfter(now)
}

object Sponsor {
  def apply(group: Group.Id, data: Data, info: Info): Sponsor =
    new Sponsor(Id.generate(), group, data.partner, data.pack, data.contact, data.start, data.finish, data.paid, data.price, info)

  final class Id private(value: String) extends DataClass(value) with IId

  object Id extends UuidIdBuilder[Id]("Sponsor.Id", new Id(_))

  final case class Full(sponsor: Sponsor, pack: SponsorPack, partner: Partner, contact: Option[Contact]) {
    def isCurrent(now: Instant): Boolean = sponsor.isCurrent(now)

    def id: Id = sponsor.id

    def start: LocalDate = sponsor.start

    def finish: LocalDate = sponsor.finish

    def price: Price = sponsor.price

    def paid: Option[LocalDate] = sponsor.paid

    def hasContact(id: Contact.Id): Boolean = contact.exists(_.id == id)
  }

  final case class Data(partner: Partner.Id,
                        pack: SponsorPack.Id,
                        contact: Option[Contact.Id],
                        start: LocalDate,
                        finish: LocalDate,
                        paid: Option[LocalDate],
                        price: Price)

  object Data {
    def apply(s: Sponsor): Data = new Data(s.partner, s.pack, s.contact, s.start, s.finish, s.paid, s.price)
  }

} 
Example 185
Source File: Info.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.core.domain.utils

import java.time.Instant

import gospeak.core.domain.User

final case class Info(createdAt: Instant,
                      createdBy: User.Id,
                      updatedAt: Instant,
                      updatedBy: User.Id) {
  def users: List[User.Id] = List(createdBy, updatedBy).distinct
}

object Info {
  def apply(by: User.Id, now: Instant): Info =
    new Info(now, by, now, by)
} 
Example 186
Source File: CommonCfp.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.core.domain

import java.time.temporal.ChronoUnit
import java.time.{Instant, LocalDateTime}

import gospeak.core.domain.utils.Constants
import gospeak.core.domain.utils.SocialAccounts.SocialAccount.TwitterAccount
import gospeak.libs.scala.Extensions._
import gospeak.libs.scala.domain._

final case class CommonCfp(name: String,
                           logo: Option[Logo],
                           begin: Option[LocalDateTime],
                           close: Option[LocalDateTime],
                           location: Option[GMapPlace],
                           description: Markdown,
                           tags: Seq[Tag],
                           extra: Either[CommonCfp.External, CommonCfp.Internal]) {
  def closesInDays(nb: Int, now: Instant): Boolean = close.exists(_.toInstant(Constants.defaultZoneId).isBefore(now.minus(nb, ChronoUnit.DAYS)))

  def fold[A](f: CommonCfp.External => A)(g: CommonCfp.Internal => A): A = extra.fold(f, g)

  def internal: Option[CommonCfp.Internal] = extra.right.toOption

  def external: Option[CommonCfp.External] = extra.left.toOption
}

object CommonCfp {
  def apply(group: Group, cfp: Cfp): CommonCfp = new CommonCfp(
    name = cfp.name.value,
    logo = group.logo,
    begin = cfp.begin,
    close = cfp.close,
    location = group.location,
    description = cfp.description,
    tags = cfp.tags,
    extra = Right(Internal(
      slug = cfp.slug,
      group = InternalGroup(
        id = group.id,
        slug = group.slug))))

  def apply(cfp: ExternalCfp.Full): CommonCfp = new CommonCfp(
    name = cfp.event.name.value,
    logo = cfp.event.logo,
    begin = cfp.begin,
    close = cfp.close,
    location = cfp.event.location,
    description = cfp.description,
    tags = cfp.event.tags,
    extra = Left(External(
      id = cfp.id,
      url = cfp.url,
      event = ExternalExternalEvent(
        start = cfp.event.start,
        finish = cfp.event.finish,
        url = cfp.event.url,
        tickets = cfp.event.tickets,
        videos = cfp.event.videos,
        twitterAccount = cfp.event.twitterAccount,
        twitterHashtag = cfp.event.twitterHashtag))))

  final case class InternalGroup(id: Group.Id,
                                 slug: Group.Slug)

  final case class Internal(slug: Cfp.Slug,
                            group: InternalGroup)

  final case class ExternalExternalEvent(start: Option[LocalDateTime],
                                         finish: Option[LocalDateTime],
                                         url: Option[Url],
                                         tickets: Option[Url],
                                         videos: Option[Url],
                                         twitterAccount: Option[TwitterAccount],
                                         twitterHashtag: Option[TwitterHashtag])

  final case class External(id: ExternalCfp.Id,
                            url: Url,
                            event: ExternalExternalEvent)

} 
Example 187
Source File: SagaInfo.scala    From zio-saga   with MIT License 5 votes vote down vote up
package com.vladkopanev.zio.saga.example.model

import java.time.Instant
import java.util.UUID

import io.circe.Json

case class SagaInfo(id: Long,
                    initiator: UUID,
                    createdAt: Instant,
                    finishedAt: Option[Instant],
                    data: Json,
                    `type`: String)

case class OrderSagaData(userId: UUID, orderId: BigInt, money: BigDecimal, bonuses: Double)

object OrderSagaData {
  import io.circe._, io.circe.generic.semiauto._
  implicit val decoder: Decoder[OrderSagaData] = deriveDecoder[OrderSagaData]
  implicit val encoder: Encoder[OrderSagaData] = deriveEncoder[OrderSagaData]
} 
Example 188
Source File: JobHistoryMarshaller.scala    From metronome   with Apache License 2.0 5 votes vote down vote up
package dcos.metronome
package repository.impl.kv.marshaller

import java.time.Instant

import dcos.metronome.model.{JobHistory, JobId, JobRunInfo}
import dcos.metronome.repository.impl.kv.EntityMarshaller
import mesosphere.marathon.core.task.Task
import org.slf4j.LoggerFactory

import scala.collection.JavaConverters._
import scala.collection.mutable

object JobHistoryMarshaller extends EntityMarshaller[JobHistory] {
  import JobHistoryConversions._

  lazy val log = LoggerFactory.getLogger(getClass)

  override def toBytes(jobHistory: JobHistory): IndexedSeq[Byte] = {
    val builder = Protos.JobHistory.newBuilder

    builder.setJobSpecId(jobHistory.jobSpecId.toString)
    builder.setSuccessCount(jobHistory.successCount)
    builder.setFailureCount(jobHistory.failureCount)
    builder.addAllSuccessfulRuns(jobHistory.successfulRuns.toProto.asJava)
    builder.addAllFailedRuns(jobHistory.failedRuns.toProto.asJava)

    jobHistory.lastSuccessAt.foreach(lastSuccessAt => builder.setLastSuccessAt(lastSuccessAt.toEpochMilli))
    jobHistory.lastFailureAt.foreach(lastFailureAt => builder.setLastFailureAt(lastFailureAt.toEpochMilli))

    builder.build.toByteArray.to[IndexedSeq]
  }

  override def fromBytes(bytes: IndexedSeq[Byte]): Option[JobHistory] =
    safeConversion { fromProto(Protos.JobHistory.parseFrom(bytes.toArray)) }

  private def fromProto(proto: Protos.JobHistory) = {
    val lastSuccessAt =
      if (proto.hasLastSuccessAt) Some(Instant.ofEpochMilli(proto.getLastSuccessAt)) else None

    val lastFailureAt =
      if (proto.hasLastFailureAt) Some(Instant.ofEpochMilli(proto.getLastFailureAt)) else None

    JobHistory(
      jobSpecId = JobId(proto.getJobSpecId),
      successCount = proto.getSuccessCount,
      failureCount = proto.getFailureCount,
      lastSuccessAt = lastSuccessAt,
      lastFailureAt = lastFailureAt,
      successfulRuns = proto.getSuccessfulRunsList.asScala.toModel,
      failedRuns = proto.getFailedRunsList.asScala.toModel
    )
  }
}

object JobHistoryConversions {

  implicit class JobRunInfoToProto(val jobRunInfos: Seq[JobRunInfo]) extends AnyVal {
    import JobRunConversions.JobRunIdToProto

    def toProto: Seq[Protos.JobHistory.JobRunInfo] =
      jobRunInfos.map { jobRunInfo =>
        val proto = Protos.JobHistory.JobRunInfo
          .newBuilder()
          .setJobRunId(jobRunInfo.id.toProto)
          .setCreatedAt(jobRunInfo.createdAt.toEpochMilli)
          .setFinishedAt(jobRunInfo.finishedAt.toEpochMilli)

        proto.addAllTasks(jobRunInfo.tasks.map(_.idString).asJava)

        proto.build()
      }
  }

  implicit class ProtoToJobRunInfo(val protos: mutable.Buffer[Protos.JobHistory.JobRunInfo]) extends AnyVal {
    import JobRunConversions.ProtoToJobRunId

    def toModel: Seq[JobRunInfo] =
      protos.map { proto =>
        JobRunInfo(
          id = proto.getJobRunId.toModel,
          createdAt = Instant.ofEpochMilli(proto.getCreatedAt),
          finishedAt = Instant.ofEpochMilli(proto.getFinishedAt),
          tasks = proto.getTasksList.asScala.map(Task.Id(_)).to[Seq]
        )
      }.toList
  }
} 
Example 189
Source File: JobHistory.scala    From metronome   with Apache License 2.0 5 votes vote down vote up
package dcos.metronome
package model

import java.time.{Clock, Instant}

import mesosphere.marathon.core.task.Task

case class JobHistorySummary(
    jobSpecId: JobId,
    successCount: Long,
    failureCount: Long,
    lastSuccessAt: Option[Instant],
    lastFailureAt: Option[Instant]
)
object JobHistorySummary {
  def apply(h: JobHistory): JobHistorySummary = {
    JobHistorySummary(h.jobSpecId, h.successCount, h.failureCount, h.lastSuccessAt, h.lastFailureAt)
  }
  def empty(id: JobId): JobHistorySummary = JobHistorySummary(id, 0, 0, None, None)
}

case class JobHistory(
    jobSpecId: JobId,
    successCount: Long,
    failureCount: Long,
    lastSuccessAt: Option[Instant],
    lastFailureAt: Option[Instant],
    successfulRuns: Seq[JobRunInfo],
    failedRuns: Seq[JobRunInfo]
)

object JobHistory {
  def empty(id: JobId): JobHistory = JobHistory(id, 0, 0, None, None, Seq.empty, Seq.empty)
}

case class JobRunInfo(id: JobRunId, createdAt: Instant, finishedAt: Instant, tasks: Seq[Task.Id])
object JobRunInfo {
  def apply(run: JobRun): JobRunInfo = {
    JobRunInfo(run.id, run.createdAt, Clock.systemUTC().instant(), run.tasks.keys.to[Seq])
  }
} 
Example 190
Source File: JobRun.scala    From metronome   with Apache License 2.0 5 votes vote down vote up
package dcos.metronome
package model

import java.time.Instant

import dcos.metronome.scheduler.TaskState
import mesosphere.marathon.core.task.Task

import scala.concurrent.duration.Duration

case class JobRun(
    id: JobRunId,
    jobSpec: JobSpec,
    status: JobRunStatus,
    createdAt: Instant,
    completedAt: Option[Instant],
    startingDeadline: Option[Duration],
    tasks: Map[Task.Id, JobRunTask]
)

case class JobRunTask(id: Task.Id, startedAt: Instant, completedAt: Option[Instant], status: TaskState)

object JobRunTask {
  def apply(task: Task): JobRunTask = {
    // Note: Terminal LaunchedEphemeral tasks are expunged from the repo
    // so it is somewhat safe to derive that completedAt for these tasks is always None!
    JobRunTask(
      id = task.taskId,
      startedAt = Instant.ofEpochMilli(task.status.stagedAt.millis),
      completedAt = None,
      status = TaskState(task)
    )
  }
}

sealed trait JobRunStatus
object JobRunStatus {

  
  case object Failed extends JobRunStatus

  val names: Map[String, JobRunStatus] =
    Map("INITIAL" -> Initial, "STARTING" -> Starting, "ACTIVE" -> Active, "SUCCESS" -> Success, "FAILED" -> Failed)
  val statusNames: Map[JobRunStatus, String] = names.map { case (a, b) => (b, a) }

  def name(status: JobRunStatus): String = statusNames(status)
  def unapply(name: String): Option[JobRunStatus] = names.get(name)
  def isDefined(name: String): Boolean = names.contains(name)
} 
Example 191
Source File: ScheduleSpec.scala    From metronome   with Apache License 2.0 5 votes vote down vote up
package dcos.metronome
package model

import java.time.{Clock, Instant, ZoneId, ZonedDateTime}

import com.wix.accord.Validator
import com.wix.accord.dsl._

import scala.concurrent.duration._

case class ScheduleSpec(
    id: String,
    cron: CronSpec,
    timeZone: ZoneId = ScheduleSpec.DefaultTimeZone,
    startingDeadline: Duration = ScheduleSpec.DefaultStartingDeadline,
    concurrencyPolicy: ConcurrencyPolicy = ScheduleSpec.DefaultConcurrencyPolicy,
    enabled: Boolean = ScheduleSpec.DefaultEnabled
) {
  def clock: Clock = ScheduleSpec.DefaultClock

  def nextExecution(after: Instant): Instant = {
    val localAfter = ZonedDateTime.ofInstant(after, timeZone)
    val localNext = cron.nextExecution(localAfter)
    localNext.toInstant
  }

  def nextExecution(): Instant = nextExecution(clock.instant())
}

object ScheduleSpec {
  val DefaultTimeZone = ZoneId.of("UTC")
  val DefaultStartingDeadline = 15.minutes
  val DefaultConcurrencyPolicy = ConcurrencyPolicy.Allow
  val DefaultEnabled = true
  val DefaultClock = Clock.systemUTC()

  implicit lazy val validScheduleSpec: Validator[ScheduleSpec] = validator[ScheduleSpec] { spec =>
    spec.startingDeadline >= 1.minute
  }
} 
Example 192
Source File: Event.scala    From metronome   with Apache License 2.0 5 votes vote down vote up
package dcos.metronome
package model

import java.time.{Clock, Instant}

trait Event {
  val eventType: String
  val timestamp: Instant
}

object Event {

  trait JobSpecEvent extends Event
  case class JobSpecCreated(
      job: JobSpec,
      eventType: String = "job_created",
      timestamp: Instant = Clock.systemUTC().instant()
  ) extends JobSpecEvent

  case class JobSpecUpdated(
      job: JobSpec,
      eventType: String = "job_updated",
      timestamp: Instant = Clock.systemUTC().instant()
  ) extends JobSpecEvent

  case class JobSpecDeleted(
      job: JobSpec,
      eventType: String = "job_deleted",
      timestamp: Instant = Clock.systemUTC().instant()
  ) extends JobSpecEvent

  trait JobRunEvent extends Event
  case class JobRunStarted(
      jobRun: JobRun,
      eventType: String = "job_run_started",
      timestamp: Instant = Clock.systemUTC().instant()
  ) extends JobRunEvent

  case class JobRunUpdate(
      jobRun: JobRun,
      eventType: String = "job_run_updated",
      timestamp: Instant = Clock.systemUTC().instant()
  ) extends JobRunEvent

  case class JobRunFinished(
      jobRun: JobRun,
      eventType: String = "job_run_finished",
      timestamp: Instant = Clock.systemUTC().instant()
  ) extends JobRunEvent

  case class JobRunFailed(
      jobRun: JobRun,
      eventType: String = "job_run_failed",
      timestamp: Instant = Clock.systemUTC().instant()
  ) extends JobRunEvent

  trait ReconciliationEvent extends Event
  case class ReconciliationFinished(
      eventType: String = "job_run_failed",
      timestamp: Instant = Clock.systemUTC().instant()
  ) extends ReconciliationEvent

} 
Example 193
Source File: JobSpecSchedulerActor.scala    From metronome   with Apache License 2.0 5 votes vote down vote up
package dcos.metronome
package jobspec.impl

import java.time.{Clock, Instant}
import java.util.concurrent.TimeUnit

import akka.actor._
import dcos.metronome.jobrun.JobRunService
import dcos.metronome.model.{JobSpec, ScheduleSpec}

import scala.concurrent.duration._


class JobSpecSchedulerActor(initSpec: JobSpec, clock: Clock, runService: JobRunService)
    extends Actor
    with Stash
    with ActorLogging {

  import JobSpecSchedulerActor._
  import context.dispatcher

  private[impl] var spec = initSpec
  private[impl] var nextSchedule: Option[Cancellable] = None
  private[impl] var scheduledAt: Option[Instant] = None

  override def preStart(): Unit = {
    scheduleNextRun()
  }

  override def postStop(): Unit = {
    cancelSchedule()
  }

  override def receive: Receive = {
    case StartJob(schedule) => runJob(schedule)
    case UpdateJobSpec(newSpec) => updateSpec(newSpec)
  }

  def updateSpec(newSpec: JobSpec): Unit = {
    log.info(s"JobSpec ${newSpec.id} has been updated. Reschedule.")
    spec = newSpec
    scheduledAt = None
    scheduleNextRun()
  }

  def runJob(schedule: ScheduleSpec): Unit = {
    log.info(s"Start next run of job ${spec.id}, which was scheduled for $scheduledAt")
    runService.startJobRun(spec, Some(schedule))
    scheduleNextRun()
  }

  def scheduleNextRun(): Unit = {
    val lastScheduledAt = scheduledAt
    cancelSchedule()
    // TODO: only reschedule for one specific schedule!
    spec.schedules.foreach { schedule =>
      val now = clock.instant()
      val from = lastScheduledAt.getOrElse(now)
      val nextTime = schedule.nextExecution(from)
      scheduledAt = Some(nextTime)
      // 60 secs is the smallest unit of reschedule time for cron
      val inSeconds = Math.max(java.time.Duration.between(now, nextTime).getSeconds, 60)
      nextSchedule =
        Some(context.system.scheduler.scheduleOnce(Duration(inSeconds, TimeUnit.SECONDS), self, StartJob(schedule)))
      log.info(s"Spec ${spec.id}: next run is scheduled for: $nextTime (in $inSeconds seconds)")
    }
  }

  def cancelSchedule(): Unit = {
    nextSchedule.foreach { c => if (!c.isCancelled) c.cancel() }
    nextSchedule = None
    scheduledAt = None
  }
}

object JobSpecSchedulerActor {

  case class StartJob(schedule: ScheduleSpec)
  case class UpdateJobSpec(newSpec: JobSpec)

  def props(spec: JobSpec, clock: Clock, runService: JobRunService): Props = {
    Props(new JobSpecSchedulerActor(spec, clock, runService))
  }
} 
Example 194
Source File: SettableClock.scala    From metronome   with Apache License 2.0 5 votes vote down vote up
package dcos.metronome

import java.time.{Clock, Instant, LocalDateTime, ZoneOffset, ZoneId, Duration}

import scala.concurrent.duration.FiniteDuration

object SettableClock {
  private val defaultJavaClock =
    Clock.fixed(LocalDateTime.of(2015, 4, 9, 12, 30, 0).toInstant(ZoneOffset.UTC), ZoneOffset.UTC)

  def ofNow() = new SettableClock(Clock.fixed(Instant.now(), ZoneOffset.UTC))
}

class SettableClock(private[this] var clock: Clock = SettableClock.defaultJavaClock) extends Clock {
  private[this] var subscribers: List[() => Unit] = Nil
  def onChange(fn: () => Unit): Unit =
    synchronized {
      subscribers = fn :: subscribers
    }

  override def getZone: ZoneId = clock.getZone

  override def instant(): Instant = clock.instant()

  override def withZone(zoneId: ZoneId): Clock = new SettableClock(clock.withZone(zoneId))

  def +=(duration: FiniteDuration): Unit = plus(duration)

  def plus(duration: FiniteDuration): this.type =
    plus(Duration.ofMillis(duration.toMillis))

  def plus(duration: Duration): this.type = {
    clock = Clock.offset(clock, duration)
    subscribers.foreach(_())
    this
  }

  def at(instant: Instant): this.type = {
    clock = Clock.fixed(instant, clock.getZone)
    subscribers.foreach(_())
    this
  }
} 
Example 195
Source File: ScheduleSpecTest.scala    From metronome   with Apache License 2.0 5 votes vote down vote up
package dcos.metronome.jobspec.impl

import java.time.{Instant, ZoneId}

import dcos.metronome.model.{ConcurrencyPolicy, CronSpec, ScheduleSpec}
import dcos.metronome.utils.test.Mockito
import org.scalatest.{FunSuite, GivenWhenThen, Matchers}

import scala.concurrent.duration._


class ScheduleSpecTest extends FunSuite with Matchers with Mockito with GivenWhenThen {
  test("nextExecution when close to daylight savings") {
    val schedule = ScheduleSpec(
      id = "default",
      cron = CronSpec("55 23 * * *"),
      timeZone = ZoneId.of("Europe/Rome"),
      startingDeadline = 900.seconds,
      concurrencyPolicy = ConcurrencyPolicy.Allow,
      enabled = true
    )

    Given("a schedule that was last run at 22:55")
    val lastScheduledAt = Instant.parse("2019-03-30T22:55:00.000Z")

    When("we are now close to midnight and compute the next scheduled time")
    val now = Instant.parse("2019-03-30T23:54:59.000Z")
    val nextTime = schedule.nextExecution(lastScheduledAt)
    // 60 secs is the smallest unit of reschedule time for cron
    val inSeconds = Math.max(java.time.Duration.between(now, nextTime).getSeconds, 60)
    println(s"now is $now, nextScheduleIn = $inSeconds seconds, next run is scheduled for: $nextTime")

    Then("The next run should be scheduled on the 31st")
    val expected = Instant.parse("2019-03-31T21:55:00Z")
    nextTime shouldEqual expected
  }
} 
Example 196
Source File: Implicits.scala    From scala-cass   with MIT License 5 votes vote down vote up
package com.weather.scalacass.jdk8

import com.weather.scalacass.{ CassFormatDecoder, CassFormatEncoder }
import com.weather.scalacass.CassFormatDecoderVersionSpecific.codecCassFormatDecoder
import CassFormatEncoder.sameTypeCassFormatEncoder
import java.time.{ Instant, LocalDate, LocalTime, ZonedDateTime }

import com.datastax.driver.core.{ Cluster, DataType }
import com.google.common.reflect.TypeToken

object Implicits {
  implicit val timeEncoder: CassFormatEncoder[LocalTime] = sameTypeCassFormatEncoder(DataType.time)
  implicit val timeDecoder: CassFormatDecoder[LocalTime] = codecCassFormatDecoder(TypeToken.of(classOf[LocalTime]))

  implicit val dateEncoder: CassFormatEncoder[LocalDate] = sameTypeCassFormatEncoder(DataType.date)
  implicit val dateDecoder: CassFormatDecoder[LocalDate] = codecCassFormatDecoder(TypeToken.of(classOf[LocalDate]))

  implicit val instantEncoder: CassFormatEncoder[Instant] = sameTypeCassFormatEncoder(DataType.timestamp)
  implicit val instantDecoder: CassFormatDecoder[Instant] = codecCassFormatDecoder(TypeToken.of(classOf[Instant]))

  implicit def zonedDateTimeEncoder(implicit cluster: Cluster): CassFormatEncoder[ZonedDateTime] =
    sameTypeCassFormatEncoder(cluster.getMetadata.newTupleType(DataType.timestamp, DataType.varchar))
  implicit val zonedDateTimeDecoder: CassFormatDecoder[ZonedDateTime] = codecCassFormatDecoder(TypeToken.of(classOf[ZonedDateTime]))
} 
Example 197
Source File: CallStatsAggregatorSpec.scala    From cloudflow   with Apache License 2.0 5 votes vote down vote up
package carly.aggregator

import java.time.Instant

import carly.data._
import cloudflow.spark.testkit._
import cloudflow.spark.sql.SQLImplicits._
import org.scalatest.OptionValues

class CallStatsAggregatorSpec extends SparkScalaTestSupport with OptionValues {

  val streamlet = new CallStatsAggregator()
  val testKit = SparkStreamletTestkit(session).withConfigParameterValues(ConfigParameterValue(streamlet.GroupByWindow, "1 minute"),
                                                                         ConfigParameterValue(streamlet.Watermark, "1 minute"))

  "CallStatsAggregator" should {
    "produce elements to its outlet" in {

      // setup inlet tap on inlet port
      val in = testKit.inletAsTap[CallRecord](streamlet.in)

      // setup outlet tap on outlet port
      val out = testKit.outletAsTap[AggregatedCallStats](streamlet.out)

      val ts = Instant.now.toEpochMilli / 1000
      val crs = (1 to 10).toList.map { i ⇒
        CallRecord(
          s"user-1",
          s"user-2",
          (if (i % 2 == 0) "incoming" else "outgoing"),
          i * 10,
          ts
        )
      }

      in.addData(crs)

      val run = testKit.run(streamlet, Seq(in), Seq(out))

      // get data from outlet tap
      val results = out.asCollection(session)

      // assert
      val aggregate = results.headOption.value
      aggregate.totalCallDuration must be(550)
      aggregate.avgCallDuration must (be > 54.9 and be < 55.1)
      run.totalRows must be > 0L
    }
  }
} 
Example 198
Source File: CallRecordSplitSpec.scala    From cloudflow   with Apache License 2.0 5 votes vote down vote up
package carly.ingestor

import java.time.Instant
import java.time.temporal.ChronoUnit

import akka.actor._
import akka.stream.scaladsl._
import akka.testkit._
import org.scalatest._
import org.scalatest.concurrent._

import cloudflow.akkastream.testkit.scaladsl._
import carly.data._

class CallRecordSplitSpec extends WordSpec with MustMatchers with ScalaFutures with BeforeAndAfterAll {

  private implicit val system = ActorSystem("CallRecordSplitSpec")

  override def afterAll: Unit =
    TestKit.shutdownActorSystem(system)

  "A CallRecordSplit" should {
    "merge incoming data" in {
      val testkit   = AkkaStreamletTestKit(system)
      val streamlet = new CallRecordSplit

      val instant = Instant.now.toEpochMilli / 1000
      val past    = Instant.now.minus(5000, ChronoUnit.DAYS).toEpochMilli / 1000

      val cr1 = CallRecord("user-1", "user-2", "f", 10L, instant)
      val cr2 = CallRecord("user-1", "user-2", "f", 15L, instant)
      val cr3 = CallRecord("user-1", "user-2", "f", 18L, instant)

      val source = Source(Vector(cr1, cr2, cr3))

      val in   = testkit.inletFromSource(streamlet.in, source)
      val left  = testkit.outletAsTap(streamlet.left)
      val right = testkit.outletAsTap(streamlet.right)

      testkit.run(
        streamlet,
        List(in),
        List(left, right),
        () ⇒ {
          right.probe.expectMsg(("user-1", cr1))
          right.probe.expectMsg(("user-1", cr2))
          right.probe.expectMsg(("user-1", cr3))
        }
      )

      right.probe.expectMsg(Completed)
    }

    "split incoming data into valid call records and those outside the time range" in {
      val testkit   = AkkaStreamletTestKit(system)
      val streamlet = new CallRecordSplit()

      val instant = Instant.now.toEpochMilli / 1000
      val past    = Instant.now.minus(5000, ChronoUnit.DAYS).toEpochMilli / 1000

      val cr1 = CallRecord("user-1", "user-2", "f", 10L, instant)
      val cr2 = CallRecord("user-1", "user-2", "f", 15L, instant)
      val cr3 = CallRecord("user-1", "user-2", "f", 18L, instant)
      val cr4 = CallRecord("user-1", "user-2", "f", 40L, past)
      val cr5 = CallRecord("user-1", "user-2", "f", 70L, past)

      val source = Source(Vector(cr1, cr2, cr3, cr4, cr5))

      val in = testkit.inletFromSource(streamlet.in, source)

      val left  = testkit.outletAsTap(streamlet.left)
      val right = testkit.outletAsTap(streamlet.right)

      testkit.run(
        streamlet,
        List(in),
        List(left, right),
        () ⇒ {
          right.probe.expectMsg(("user-1", cr1))
          right.probe.expectMsg(("user-1", cr2))
          right.probe.expectMsg(("user-1", cr3))
          left.probe.expectMsg((cr4.toString, InvalidRecord(cr4.toString, "Timestamp outside range!")))
          left.probe.expectMsg((cr5.toString, InvalidRecord(cr5.toString, "Timestamp outside range!")))
        }
      )

      left.probe.expectMsg(Completed)
      right.probe.expectMsg(Completed)
    }
  }
} 
Example 199
Source File: JsonFormats.scala    From cloudflow   with Apache License 2.0 5 votes vote down vote up
//tag::code[]
package sensordata

import java.time.Instant
import java.util.UUID

import scala.util.Try

import spray.json._

trait UUIDJsonSupport extends DefaultJsonProtocol {
  implicit object UUIDFormat extends JsonFormat[UUID] {
    def write(uuid: UUID) = JsString(uuid.toString)

    def read(json: JsValue): UUID = json match {
      case JsString(uuid) ⇒ Try(UUID.fromString(uuid)).getOrElse(deserializationError(s"Expected valid UUID but got '$uuid'."))
      case other          ⇒ deserializationError(s"Expected UUID as JsString, but got: $other")
    }
  }
}

trait InstantJsonSupport extends DefaultJsonProtocol {
  implicit object InstantFormat extends JsonFormat[Instant] {
    def write(instant: Instant) = JsNumber(instant.toEpochMilli)

    def read(json: JsValue): Instant = json match {
      case JsNumber(value) ⇒ Instant.ofEpochMilli(value.toLong)
      case other           ⇒ deserializationError(s"Expected Instant as JsNumber, but got: $other")
    }
  }
}

object MeasurementsJsonSupport extends DefaultJsonProtocol {
  implicit val measurementFormat = jsonFormat3(Measurements.apply)
}

object SensorDataJsonSupport extends DefaultJsonProtocol with UUIDJsonSupport with InstantJsonSupport {
  import MeasurementsJsonSupport._
  implicit val sensorDataFormat = jsonFormat3(SensorData.apply)
}
//end::code[] 
Example 200
Source File: QueryGuardEvent.scala    From gimel   with Apache License 2.0 5 votes vote down vote up
package com.paypal.gimel.common.query.guard

import java.time.Instant
import java.util.concurrent.{Delayed, TimeUnit}

import com.google.common.base.Objects
import com.google.common.primitives.Ints
import org.joda.time.DateTime

import com.paypal.gimel.logger.Logger

private[query] sealed trait QueryGuardEvent

private[query] trait QueryGuardDelayedEvent extends QueryGuardEvent with Delayed

private[query] case class JobSubmitted(jobId: Int,
                                       jobType: String,
                                       startTime: Long =
                                         Instant.now().toEpochMilli,
                                       estimatedJobEndTime: Long,
                                       estimatedDelayEndTime: Long)
    extends QueryGuardDelayedEvent {
  private val logger = Logger(this.getClass.getName)

  override def getDelay(unit: TimeUnit): Long = {
    val currentInstant = Instant.now().toEpochMilli
    val diff = estimatedDelayEndTime - currentInstant
    logger.info(
      s"[JobSubmitted] Comparing Job with ID: $jobId diff: $diff with end time:" +
        s" ${new DateTime(estimatedDelayEndTime)}, and current instant:" +
        s" ${new DateTime(currentInstant)}"
    )
    unit.convert(diff, TimeUnit.MILLISECONDS)
  }

  override def compareTo(o: Delayed): Int = {
    Ints.saturatedCast(
      this.estimatedDelayEndTime - o
        .asInstanceOf[JobSubmitted]
        .estimatedDelayEndTime
    )
  }

  override def toString: String =
    Objects
      .toStringHelper(this)
      .add("jobId", jobId)
      .add("jobType", jobType)
      .add("startTime", startTime)
      .add("estimatedJobEndTime", estimatedJobEndTime)
      .add("estimatedDelayEndTime", estimatedDelayEndTime)
      .toString
}

object JobSubmitted {
  def apply(jobId: Int,
            jobType: String,
            startTime: Long,
            jobTtl: Int,
            delayTtl: Int): JobSubmitted =
    new JobSubmitted(
      jobId,
      jobType,
      startTime,
      startTime + jobTtl,
      startTime + delayTtl
    )

  def apply(job: JobSubmitted, jobTtl: Int, delayTime: Long): JobSubmitted =
    new JobSubmitted(
      jobId = job.jobId,
      jobType = job.jobType,
      startTime = job.startTime,
      estimatedJobEndTime = job.startTime + jobTtl,
      estimatedDelayEndTime = delayTime
    )
}

private[query] case class JobKill(jobId: Int, jobType: String, reason: String)
    extends QueryGuardEvent {
  override def toString: String =
    Objects
      .toStringHelper(this)
      .add("jobId", jobId)
      .add("jobType", jobType)
      .add("reason", reason)
      .toString
}