spray.json.JsObject Scala Examples

The following examples show how to use spray.json.JsObject. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: CosmosDBUtil.scala    From openwhisk   with Apache License 2.0 6 votes vote down vote up
package org.apache.openwhisk.core.database.cosmosdb

import com.microsoft.azure.cosmosdb.internal.Constants.Properties.{AGGREGATE, E_TAG, ID, SELF_LINK}
import org.apache.openwhisk.core.database.cosmosdb.CosmosDBConstants._
import org.apache.openwhisk.core.database.StoreUtils.transform
import spray.json.{JsObject, JsString}

private[cosmosdb] object CosmosDBConstants {

  
  def escapeId(id: String): String = {
    require(!id.contains("|"), s"Id [$id] should not contain '|'")
    id.replace("/", "|")
  }

  def unescapeId(id: String): String = {
    require(!id.contains("/"), s"Escaped Id [$id] should not contain '/'")
    id.replace("|", "/")
  }

  def toWhiskJsonDoc(js: JsObject, id: String, etag: Option[JsString]): JsObject = {
    val fieldsToAdd = Seq((_id, Some(JsString(unescapeId(id)))), (_rev, etag))
    transform(stripInternalFields(js), fieldsToAdd, Seq.empty)
  }

  private def stripInternalFields(js: JsObject) = {
    //Strip out all field name starting with '_' which are considered as db specific internal fields
    JsObject(js.fields.filter { case (k, _) => !k.startsWith("_") && k != cid })
  }

}

private[cosmosdb] object CosmosDBUtil extends CosmosDBUtil 
Example 2
Source File: ValidatorTask.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.connector.validation

import java.util
import java.util.concurrent.TimeUnit

import oharastream.ohara.client.configurator.InspectApi.{RdbInfo, RdbQuery}
import oharastream.ohara.client.configurator.{ErrorApi, InspectApi}
import oharastream.ohara.client.database.DatabaseClient
import oharastream.ohara.common.data.Serializer
import oharastream.ohara.common.util.VersionUtils
import org.apache.kafka.connect.data.Schema
import org.apache.kafka.connect.source.{SourceRecord, SourceTask}
import spray.json.{JsObject, _}

import scala.jdk.CollectionConverters._
class ValidatorTask extends SourceTask {
  private[this] var done                       = false
  private[this] var props: Map[String, String] = _
  private[this] val topic: String              = InspectApi.INTERNAL_TOPIC_KEY.topicNameOnKafka
  private[this] var requestId: String          = _
  override def start(props: util.Map[String, String]): Unit = {
    this.props = props.asScala.toMap
    requestId = require(InspectApi.REQUEST_ID)
  }

  override def poll(): util.List[SourceRecord] =
    if (done) {
      // just wait the configurator to close this connector
      TimeUnit.SECONDS.sleep(2)
      null
    } else
      try information match {
        case query: RdbQuery => toSourceRecord(validate(query))
      } catch {
        case e: Throwable => toSourceRecord(ErrorApi.of(e))
      } finally done = true

  override def stop(): Unit = {
    // do nothing
  }

  override def version(): String = VersionUtils.VERSION

  private[this] def validate(query: RdbQuery): RdbInfo = {
    val client = DatabaseClient.builder.url(query.url).user(query.user).password(query.password).build
    try RdbInfo(
      name = client.databaseType,
      tables = client.tableQuery
        .catalog(query.catalogPattern.orNull)
        .schema(query.schemaPattern.orNull)
        .tableName(query.tableName.orNull)
        .execute()
    )
    finally client.close()
  }

  private[this] def toJsObject: JsObject = props(InspectApi.SETTINGS_KEY).parseJson.asJsObject
  private[this] def information = require(InspectApi.TARGET_KEY) match {
    case InspectApi.RDB_KIND => InspectApi.RDB_QUERY_FORMAT.read(toJsObject)
    case other: String =>
      throw new IllegalArgumentException(
        s"valid targets are ${InspectApi.RDB_KIND}. current is $other"
      )
  }

  private[this] def toSourceRecord(data: Object): util.List[SourceRecord] =
    util.Arrays.asList(
      new SourceRecord(
        null,
        null,
        topic,
        Schema.BYTES_SCHEMA,
        Serializer.STRING.to(requestId),
        Schema.BYTES_SCHEMA,
        Serializer.OBJECT.to(data)
      )
    )

  private[this] def require(key: String): String =
    props.getOrElse(key, throw new IllegalArgumentException(s"the $key is required"))
} 
Example 3
Source File: ClusterRequest.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.client.configurator
import oharastream.ohara.common.annotations.Optional
import oharastream.ohara.common.setting.ObjectKey
import oharastream.ohara.common.util.CommonUtils
import spray.json.DefaultJsonProtocol._
import spray.json.{JsArray, JsNumber, JsObject, JsString, JsValue}

import scala.jdk.CollectionConverters._
import scala.collection.mutable


  protected def key: ObjectKey = ObjectKey.of(
    settings.get(GROUP_KEY).map(_.convertTo[String]).getOrElse(GROUP_DEFAULT),
    settings(NAME_KEY).convertTo[String]
  )

  protected val settings: mutable.Map[String, JsValue] = mutable.Map()

  @Optional("default key is a random string. But it is required in updating")
  def key(key: ObjectKey): ClusterRequest.this.type = {
    setting(NAME_KEY, JsString(key.name()))
    setting(GROUP_KEY, JsString(key.group()))
  }

  @Optional("default name is a random string. But it is required in updating")
  def name(name: String): ClusterRequest.this.type =
    setting(NAME_KEY, JsString(CommonUtils.requireNonEmpty(name)))
  @Optional("default is GROUP_DEFAULT")
  def group(group: String): ClusterRequest.this.type =
    setting(GROUP_KEY, JsString(CommonUtils.requireNonEmpty(group)))
  def nodeName(nodeName: String): ClusterRequest.this.type = nodeNames(Set(CommonUtils.requireNonEmpty(nodeName)))
  def nodeNames(nodeNames: Set[String]): ClusterRequest.this.type =
    setting(NODE_NAMES_KEY, JsArray(CommonUtils.requireNonEmpty(nodeNames.asJava).asScala.map(JsString(_)).toVector))

  @Optional("default value is empty array")
  def routes(routes: Map[String, String]): ClusterRequest.this.type =
    setting(ROUTES_KEY, JsObject(routes.map {
      case (k, v) => k -> JsString(v)
    }))

  @Optional("default value is 1024")
  def initHeap(sizeInMB: Int): ClusterRequest.this.type =
    setting(INIT_HEAP_KEY, JsNumber(CommonUtils.requirePositiveInt(sizeInMB)))

  @Optional("default value is 1024")
  def maxHeap(sizeInMB: Int): ClusterRequest.this.type =
    setting(MAX_HEAP_KEY, JsNumber(CommonUtils.requirePositiveInt(sizeInMB)))

  @Optional("extra settings is empty by default")
  def setting(key: String, value: JsValue): ClusterRequest.this.type =
    settings(Map(key -> value))
  @Optional("extra settings is empty by default")
  def settings(settings: Map[String, JsValue]): ClusterRequest.this.type = {
    // We don't have to check the settings is empty here for the following reasons:
    // 1) we may want to use the benefit of default creation without specify settings
    // 2) actual checking will be done in the json parser phase of creation or update
    this.settings ++= settings
    this
  }
} 
Example 4
Source File: DomainJsonEncoder.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.http.json

import com.daml.http.domain
import com.daml.ledger.api.{v1 => lav1}
import scalaz.\/
import scalaz.syntax.bitraverse._
import scalaz.syntax.show._
import scalaz.syntax.traverse._
import spray.json.{JsObject, JsValue, JsonWriter}

class DomainJsonEncoder(
    val apiRecordToJsObject: lav1.value.Record => JsonError \/ JsObject,
    val apiValueToJsValue: lav1.value.Value => JsonError \/ JsValue
) {

  import com.daml.http.util.ErrorOps._

  def encodeExerciseCommand(
      cmd: domain.ExerciseCommand[lav1.value.Value, domain.ContractLocator[lav1.value.Value]])(
      implicit ev: JsonWriter[domain.ExerciseCommand[JsValue, domain.ContractLocator[JsValue]]])
    : JsonError \/ JsValue =
    for {
      x <- cmd.bitraverse(
        arg => apiValueToJsValue(arg),
        ref => ref.traverse(a => apiValueToJsValue(a))
      ): JsonError \/ domain.ExerciseCommand[JsValue, domain.ContractLocator[JsValue]]

      y <- SprayJson.encode(x).liftErr(JsonError)

    } yield y

  object implicits {
    implicit val ApiValueJsonWriter: JsonWriter[lav1.value.Value] = (obj: lav1.value.Value) =>
      apiValueToJsValue(obj).valueOr(e => spray.json.serializationError(e.shows))

    implicit val ApiRecordJsonWriter: JsonWriter[lav1.value.Record] = (obj: lav1.value.Record) =>
      apiRecordToJsObject(obj).valueOr(e => spray.json.serializationError(e.shows))
  }
} 
Example 5
Source File: ApiValueToJsValueConverter.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.http.json

import JsonProtocol.LfValueCodec
import com.daml.http.util.ApiValueToLfValueConverter
import com.daml.ledger.api.{v1 => lav1}
import scalaz.std.list._
import scalaz.syntax.show._
import scalaz.syntax.traverse._
import scalaz.{\/, \/-}
import spray.json.{JsObject, JsValue}

class ApiValueToJsValueConverter(apiToLf: ApiValueToLfValueConverter.ApiValueToLfValue) {

  def apiValueToJsValue(a: lav1.value.Value): JsonError \/ JsValue =
    apiToLf(a)
      .map(LfValueCodec.apiValueToJsValue)
      .leftMap(x => JsonError(x.shows))

  def apiRecordToJsObject(a: lav1.value.Record): JsonError \/ JsObject = {
    a.fields.toList.traverse(convertField).map(fs => JsObject(fs.toMap))
  }

  private def convertField(field: lav1.value.RecordField): JsonError \/ (String, JsValue) =
    field.value match {
      case None => \/-(field.label -> JsObject.empty)
      case Some(v) => apiValueToJsValue(v).map(field.label -> _)
    }
} 
Example 6
Source File: TlsTest.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.http

import HttpServiceTestFixture.UseTls
import akka.http.scaladsl.model.{StatusCodes, Uri}
import org.scalatest.{Assertion, AsyncFreeSpec, Inside, Matchers}
import spray.json.{JsArray, JsObject}

import scala.concurrent.Future

@SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements"))
class TlsTest
    extends AsyncFreeSpec
    with Matchers
    with Inside
    with AbstractHttpServiceIntegrationTestFuns {

  override def jdbcConfig = None

  override def staticContentConfig = None

  override def useTls = UseTls.Tls

  "connect normally with tls on" in withHttpService { (uri: Uri, _, _) =>
    getRequest(uri = uri.withPath(Uri.Path("/v1/query")))
      .flatMap {
        case (status, output) =>
          status shouldBe StatusCodes.OK
          assertStatus(output, StatusCodes.OK)
          inside(output) {
            case JsObject(fields) =>
              inside(fields.get("result")) {
                case Some(JsArray(vector)) => vector should have size 0L
              }
          }
      }: Future[Assertion]
  }
} 
Example 7
Source File: JsonVariant.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.lf.value.json

import spray.json.{JsObject, JsString, JsValue}

object JsonVariant {
  def apply(tag: String, body: JsValue): JsObject =
    JsObject("tag" -> JsString(tag), "value" -> body)

  def unapply(o: JsObject): Option[(String, JsValue)] =
    (o.fields.size, o.fields.get("tag"), o.fields.get("value")) match {
      case (2, Some(JsString(tag)), Some(nv)) => Some((tag, nv))
      case _ => None
    }
} 
Example 8
Source File: SessionJsonProtocolTest.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.navigator

import com.daml.navigator.model.PartyState
import org.scalatest.{FlatSpec, Matchers}
import SessionJsonProtocol.userWriter
import com.daml.ledger.api.refinements.ApiTypes
import spray.json.{JsBoolean, JsObject, JsString}

class SessionJsonProtocolTest extends FlatSpec with Matchers {

  val userClassName = User.getClass.getSimpleName
  val party = ApiTypes.Party("party")

  behavior of s"JsonCodec[$userClassName]"

  it should s"encode $userClassName without role" in {
    val user = User(id = "id", party = new PartyState(party, false), canAdvanceTime = true)
    val userJson = JsObject(
      "id" -> JsString("id"),
      "party" -> JsString("party"),
      "canAdvanceTime" -> JsBoolean(true))
    userWriter.write(user) shouldEqual userJson
  }

  it should s"encode $userClassName with role" in {
    val user = User(
      id = "id",
      party = new PartyState(party, false),
      role = Some("role"),
      canAdvanceTime = false)
    val userJson = JsObject(
      "id" -> JsString("id"),
      "role" -> JsString("role"),
      "party" -> JsString("party"),
      "canAdvanceTime" -> JsBoolean(false))
    userWriter.write(user) shouldEqual userJson
  }
} 
Example 9
Source File: HydraIngestJsonSupport.scala    From hydra   with Apache License 2.0 5 votes vote down vote up
package hydra.ingest.http

import hydra.core.ingest.IngestionReport
import hydra.core.marshallers.HydraJsonSupport
import hydra.core.protocol.IngestorStatus
import hydra.core.transport.ValidationStrategy
import hydra.ingest.IngestorInfo
import hydra.ingest.services.IngestionFlowV2.V2IngestRequest
import hydra.kafka.algebras.KafkaClientAlgebra.PublishResponse
import spray.json.JsObject

private object HydraIngestJsonSupport {
  private final case class IntermediateV2IngestRequest(
                                                        key: JsObject,
                                                        value: Option[JsObject],
                                                        validationStrategy: Option[ValidationStrategy]
                                                      )
}

trait HydraIngestJsonSupport extends HydraJsonSupport {

  import HydraIngestJsonSupport._
  import spray.json._

  private val publishResponseApply: (Int, Option[Long]) => PublishResponse = PublishResponse.apply
  implicit val publishResponseFormat: RootJsonFormat[PublishResponse] = jsonFormat2(publishResponseApply)

  implicit object ValidationStrategyFormat extends RootJsonFormat[ValidationStrategy] {

    def read(json: JsValue): ValidationStrategy = json match {
      case JsString(s) if s.toLowerCase == "strict" => ValidationStrategy.Strict
      case JsString(s) if s.toLowerCase == "relaxed" => ValidationStrategy.Relaxed
      case _ =>
        import scala.reflect.runtime.{universe => ru}
        val tpe = ru.typeOf[ValidationStrategy]
        val clazz = tpe.typeSymbol.asClass
        throw DeserializationException(
          s"expected a ValidationStrategy of ${clazz.knownDirectSubclasses}, but got $json"
        )
    }

    def write(obj: ValidationStrategy): JsValue = {
      JsString(obj.toString)
    }
  }

  private implicit val intermediateV2IngestRequestFormat: JsonFormat[IntermediateV2IngestRequest] =
    jsonFormat3(IntermediateV2IngestRequest)

  implicit object V2IngestRequestFormat extends RootJsonFormat[V2IngestRequest] {
    override def read(json: JsValue): V2IngestRequest = {
      val int = intermediateV2IngestRequestFormat.read(json)
      V2IngestRequest(int.key.compactPrint, int.value.map(_.compactPrint), int.validationStrategy)
    }

    // Intentionally unimplemented, `V2IngestRequest` is only a request not a response
    override def write(obj: V2IngestRequest): JsValue = ???
  }

  implicit val ingestorInfoFormat = jsonFormat4(IngestorInfo)

  implicit object IngestorStatusFormat extends RootJsonFormat[IngestorStatus] {

    override def write(obj: IngestorStatus): JsValue = {
      JsObject(
        Map(
          "code" -> JsNumber(obj.statusCode.intValue()),
          "message" -> JsString(obj.message)
        )
      )
    }

    override def read(json: JsValue): IngestorStatus = ???
  }

  implicit object IngestionReportFormat
      extends RootJsonFormat[IngestionReport] {

    def writeState[T <: IngestorStatus: JsonWriter](t: T) = t.toJson

    override def write(obj: IngestionReport): JsValue = {

      val ingestors = obj.ingestors.map(h => h._1 -> writeState(h._2))
      val response = Map(
        "correlationId" -> JsString(obj.correlationId),
        "ingestors" -> JsObject(ingestors)
      )

      JsObject(response)
    }

    override def read(json: JsValue): IngestionReport = ???

  }

} 
Example 10
Source File: HydraKafkaJsonSupport.scala    From hydra   with Apache License 2.0 5 votes vote down vote up
package hydra.kafka.marshallers

import akka.http.scaladsl.marshalling.{Marshaller, Marshalling}
import akka.http.scaladsl.model.ContentTypes
import akka.util.ByteString
import hydra.core.marshallers.HydraJsonSupport
import org.apache.kafka.common.{Node, PartitionInfo}
import spray.json.{JsNumber, JsObject, JsString, JsValue, JsonFormat}

import scala.concurrent.Future


trait HydraKafkaJsonSupport extends HydraJsonSupport {

  implicit object NodeJsonFormat extends JsonFormat[Node] {

    override def write(node: Node): JsValue = {
      JsObject(
        "id" -> JsNumber(node.idString),
        "host" -> JsString(node.host),
        "port" -> JsNumber(node.port)
      )
    }

    override def read(json: JsValue): Node = {
      json.asJsObject.getFields("id", "host", "port") match {
        case Seq(id, host, port) =>
          new Node(
            id.convertTo[Int],
            host.convertTo[String],
            port.convertTo[Int]
          )
        case other =>
          spray.json.deserializationError(
            "Cannot deserialize Node. Invalid input: " + other
          )
      }
    }
  }

  implicit object PartitionInfoJsonFormat extends JsonFormat[PartitionInfo] {

    import spray.json._

    override def write(p: PartitionInfo): JsValue = {
      JsObject(
        "partition" -> JsNumber(p.partition()),
        "leader" -> p.leader().toJson,
        "isr" -> JsArray(p.inSyncReplicas().toJson)
      )
    }

    override def read(json: JsValue): PartitionInfo = ???
  }

  implicit val stringFormat = Marshaller[String, ByteString] { ec ⇒ s =>
    Future.successful {
      List(
        Marshalling.WithFixedContentType(
          ContentTypes.`application/json`,
          () => ByteString(s)
        )
      )
    }
  }
} 
Example 11
Source File: JsonSupport.scala    From darwin   with Apache License 2.0 5 votes vote down vote up
package it.agilelab.darwin.server.rest

import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
import org.apache.avro.Schema
import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, JsonParser, PrettyPrinter, RootJsonFormat}

trait JsonSupport extends SprayJsonSupport with DefaultJsonProtocol {
  implicit val printer: PrettyPrinter.type = PrettyPrinter

  implicit val schemaFormat: RootJsonFormat[Schema] = new RootJsonFormat[Schema] {

    override def write(obj: Schema): JsValue = JsonParser(obj.toString(true))

    override def read(json: JsValue): Schema = new Schema.Parser().parse(json.prettyPrint)
  }

  implicit val schemaWithIdFormat: RootJsonFormat[(Long, Schema)] = new RootJsonFormat[(Long, Schema)] {

    override def write(obj: (Long, Schema)): JsValue = JsObject(Map(
      "id" -> JsString(obj._1.toString),
      "schema" -> schemaFormat.write(obj._2)
    ))

    override def read(json: JsValue): (Long, Schema) = json match {
      case JsObject(fields) =>
        val id = fields.get("id") match {
          case Some(JsString(number)) => number
          case _ => throw new Exception("Id field should be a long")
        }

        val schema = fields.get("schema") match {
          case Some(x@JsObject(_)) => x
          case _ => throw new Exception("schema should be an object")
        }

        (id.toLong, schemaFormat.read(schema))
      case _ => throw new Exception("should be an object")
    }
  }
} 
Example 12
Source File: Boot.scala    From reactive-consul   with MIT License 5 votes vote down vote up
package stormlantern.consul.example

import java.net.URL

import akka.actor.ActorSystem
import akka.io.IO
import akka.pattern._
import akka.util.Timeout
import spray.can.Http
import spray.json.{ JsString, JsObject }
import stormlantern.consul.client.discovery.{ ConnectionStrategy, ServiceDefinition, ConnectionProvider }
import stormlantern.consul.client.loadbalancers.RoundRobinLoadBalancer
import stormlantern.consul.client.ServiceBroker
import stormlantern.consul.client.DNS

import scala.concurrent.Future
import scala.concurrent.duration._

object Boot extends App {
  implicit val system = ActorSystem("reactive-consul")
  implicit val executionContext = system.dispatcher

  val service = system.actorOf(ReactiveConsulHttpServiceActor.props(), "webservice")

  implicit val timeout = Timeout(5.seconds)

  IO(Http) ? Http.Bind(service, interface = "0.0.0.0", port = 8080)

  def connectionProviderFactory = (host: String, port: Int) ⇒ new ConnectionProvider {
    val client = new SprayExampleServiceClient(new URL(s"http://$host:$port"))
    override def getConnection: Future[Any] = Future.successful(client)
  }
  val connectionStrategy1 = ConnectionStrategy("example-service-1", connectionProviderFactory)
  val connectionStrategy2 = ConnectionStrategy("example-service-2", connectionProviderFactory)

  val services = Set(connectionStrategy1, connectionStrategy2)
  val serviceBroker = ServiceBroker(DNS.lookup("consul-8500.service.consul"), services)

  system.scheduler.schedule(5.seconds, 5.seconds) {
    serviceBroker.withService("example-service-1") { client: SprayExampleServiceClient ⇒
      client.identify
    }.foreach(println)
    serviceBroker.withService("example-service-2") { client: SprayExampleServiceClient ⇒
      client.identify
    }.foreach(println)
  }
} 
Example 13
Source File: ControllerInstanceIdTests.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.entity.test

import org.junit.runner.RunWith
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import org.scalatest.junit.JUnitRunner
import org.apache.openwhisk.core.entity.{ControllerInstanceId, InstanceId}
import spray.json.{JsObject, JsString}

import scala.util.Success

@RunWith(classOf[JUnitRunner])
class ControllerInstanceIdTests extends FlatSpec with Matchers {

  behavior of "ControllerInstanceId"

  it should "accept usable characters" in {
    Seq("a", "1", "a.1", "a_1").foreach { s =>
      ControllerInstanceId(s).asString shouldBe s

    }
  }

  it should "reject unusable characters" in {
    Seq(" ", "!", "$", "a" * 129).foreach { s =>
      an[IllegalArgumentException] shouldBe thrownBy {
        ControllerInstanceId(s)
      }
    }
  }

  it should "serialize and deserialize ControllerInstanceId" in {
    val i = ControllerInstanceId("controller0")
    i.serialize shouldBe JsObject("asString" -> JsString(i.asString), "instanceType" -> JsString(i.instanceType)).compactPrint
    i.serialize shouldBe i.toJson.compactPrint
    InstanceId.parse(i.serialize) shouldBe Success(i)
  }

} 
Example 14
Source File: InvokerInstanceIdTests.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.entity.test

import org.apache.openwhisk.core.entity.size.SizeInt
import org.apache.openwhisk.core.entity.{ByteSize, InstanceId, InvokerInstanceId}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{FlatSpec, Matchers}
import spray.json.{JsNumber, JsObject, JsString}

import scala.util.Success

@RunWith(classOf[JUnitRunner])
class InvokerInstanceIdTests extends FlatSpec with Matchers {

  behavior of "InvokerInstanceIdTests"

  val defaultUserMemory: ByteSize = 1024.MB
  it should "serialize and deserialize InvokerInstanceId" in {
    val i = InvokerInstanceId(0, userMemory = defaultUserMemory)
    i.serialize shouldBe JsObject(
      "instance" -> JsNumber(i.instance),
      "userMemory" -> JsString(i.userMemory.toString),
      "instanceType" -> JsString(i.instanceType)).compactPrint
    i.serialize shouldBe i.toJson.compactPrint
    InstanceId.parse(i.serialize) shouldBe Success(i)
  }

  it should "serialize and deserialize InvokerInstanceId with optional field" in {
    val i1 = InvokerInstanceId(0, uniqueName = Some("uniqueInvoker"), userMemory = defaultUserMemory)
    i1.serialize shouldBe JsObject(
      "instance" -> JsNumber(i1.instance),
      "userMemory" -> JsString(i1.userMemory.toString),
      "instanceType" -> JsString(i1.instanceType),
      "uniqueName" -> JsString(i1.uniqueName.getOrElse(""))).compactPrint
    i1.serialize shouldBe i1.toJson.compactPrint
    InstanceId.parse(i1.serialize) shouldBe Success(i1)

    val i2 = InvokerInstanceId(
      0,
      uniqueName = Some("uniqueInvoker"),
      displayedName = Some("displayedInvoker"),
      userMemory = defaultUserMemory)
    i2.serialize shouldBe JsObject(
      "instance" -> JsNumber(i2.instance),
      "userMemory" -> JsString(i2.userMemory.toString),
      "instanceType" -> JsString(i2.instanceType),
      "uniqueName" -> JsString(i2.uniqueName.getOrElse("")),
      "displayedName" -> JsString(i2.displayedName.getOrElse(""))).compactPrint
    i2.serialize shouldBe i2.toJson.compactPrint
    InstanceId.parse(i2.serialize) shouldBe Success(i2)
  }
} 
Example 15
Source File: ElasticSearchActivationStoreBehaviorBase.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.database.elasticsearch

import org.scalatest.FlatSpec
import org.apache.openwhisk.core.controller.test.WhiskAuthHelpers
import org.apache.openwhisk.core.database.UserContext
import org.apache.openwhisk.core.database.test.behavior.ActivationStoreBehaviorBase
import org.apache.openwhisk.core.entity.{ActivationResponse, Parameters, WhiskActivation}
import org.testcontainers.elasticsearch.ElasticsearchContainer
import pureconfig.loadConfigOrThrow
import spray.json.{JsObject, JsString}

trait ElasticSearchActivationStoreBehaviorBase extends FlatSpec with ActivationStoreBehaviorBase {
  val imageName = loadConfigOrThrow[String]("whisk.elasticsearch.docker-image")
  val container = new ElasticsearchContainer(imageName)
  container.start()

  override def afterAll = {
    container.close()
    super.afterAll()
  }

  override def storeType = "ElasticSearch"

  val creds = WhiskAuthHelpers.newIdentity()
  override val context = UserContext(creds)

  override lazy val activationStore = {
    val storeConfig =
      ElasticSearchActivationStoreConfig("http", container.getHttpHostAddress, "unittest-%s", "fake", "fake")
    new ElasticSearchActivationStore(None, storeConfig, true)
  }

  // add result and annotations
  override def newActivation(ns: String, actionName: String, start: Long): WhiskActivation = {
    super
      .newActivation(ns, actionName, start)
      .copy(
        response = ActivationResponse.success(Some(JsObject("name" -> JsString("whisker")))),
        annotations = Parameters("database", "elasticsearch") ++ Parameters("type", "test"))
  }
} 
Example 16
Source File: JsHelpers.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
// This is a copy of the JsHelpers as a trait to allow
// catalog tests to still work until they are migrated
package common

import spray.json.JsObject
import spray.json.JsValue


@Deprecated
trait JsHelpers {
  implicit class JsObjectHelper(js: JsObject) {
    def getFieldPath(path: String*): Option[JsValue] = {
      org.apache.openwhisk.utils.JsHelpers.getFieldPath(js, path.toList)
    }

    def fieldPathExists(path: String*): Boolean = {
      org.apache.openwhisk.utils.JsHelpers.fieldPathExists(js, path.toList)
    }
  }
} 
Example 17
Source File: JsHelpers.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.utils

import spray.json.JsObject
import spray.json.JsValue

object JsHelpers {
  def getFieldPath(js: JsObject, path: List[String]): Option[JsValue] = {
    path match {
      case Nil      => Option(js)
      case p :: Nil => js.fields.get(p)
      case p :: tail =>
        js.fields.get(p) match {
          case Some(o: JsObject) => getFieldPath(o, tail)
          case Some(_)           => None // head exists but value is not an object so cannot project further
          case None              => None // head doesn't exist, cannot project further
        }
    }
  }

  def getFieldPath(js: JsObject, path: String*): Option[JsValue] = {
    getFieldPath(js, path.toList)
  }

  def fieldPathExists(js: JsObject, path: List[String]): Boolean = getFieldPath(js, path).isDefined
  def fieldPathExists(js: JsObject, path: String*): Boolean = fieldPathExists(js, path.toList)
} 
Example 18
Source File: NoopActivationStore.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.database.memory

import java.time.Instant

import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import org.apache.openwhisk.common.{Logging, TransactionId, WhiskInstants}
import org.apache.openwhisk.core.database.{
  ActivationStore,
  ActivationStoreProvider,
  CacheChangeNotification,
  UserContext
}
import org.apache.openwhisk.core.entity.{ActivationId, DocInfo, EntityName, EntityPath, Subject, WhiskActivation}
import spray.json.{JsNumber, JsObject}

import scala.concurrent.Future

object NoopActivationStore extends ActivationStore with WhiskInstants {
  private val emptyInfo = DocInfo("foo")
  private val emptyCount = JsObject("activations" -> JsNumber(0))
  private val dummyActivation = WhiskActivation(
    EntityPath("testnamespace"),
    EntityName("activation"),
    Subject(),
    ActivationId.generate(),
    start = Instant.now.inMills,
    end = Instant.now.inMills)

  override def store(activation: WhiskActivation, context: UserContext)(
    implicit transid: TransactionId,
    notifier: Option[CacheChangeNotification]): Future[DocInfo] = Future.successful(emptyInfo)

  override def get(activationId: ActivationId, context: UserContext)(
    implicit transid: TransactionId): Future[WhiskActivation] = {
    val activation = dummyActivation.copy(activationId = activationId)
    Future.successful(activation)
  }

  override def delete(activationId: ActivationId, context: UserContext)(
    implicit transid: TransactionId,
    notifier: Option[CacheChangeNotification]): Future[Boolean] = Future.successful(true)

  override def countActivationsInNamespace(namespace: EntityPath,
                                           name: Option[EntityPath],
                                           skip: Int,
                                           since: Option[Instant],
                                           upto: Option[Instant],
                                           context: UserContext)(implicit transid: TransactionId): Future[JsObject] =
    Future.successful(emptyCount)

  override def listActivationsMatchingName(
    namespace: EntityPath,
    name: EntityPath,
    skip: Int,
    limit: Int,
    includeDocs: Boolean,
    since: Option[Instant],
    upto: Option[Instant],
    context: UserContext)(implicit transid: TransactionId): Future[Either[List[JsObject], List[WhiskActivation]]] =
    Future.successful(Right(List.empty))

  override def listActivationsInNamespace(
    namespace: EntityPath,
    skip: Int,
    limit: Int,
    includeDocs: Boolean,
    since: Option[Instant],
    upto: Option[Instant],
    context: UserContext)(implicit transid: TransactionId): Future[Either[List[JsObject], List[WhiskActivation]]] =
    Future.successful(Right(List.empty))
}

object NoopActivationStoreProvider extends ActivationStoreProvider {
  override def instance(actorSystem: ActorSystem, actorMaterializer: ActorMaterializer, logging: Logging) =
    NoopActivationStore
} 
Example 19
Source File: YARNComponentActor.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.yarn

import akka.actor.{Actor, ActorSystem}
import akka.http.scaladsl.model.{HttpMethods, StatusCodes}
import akka.stream.ActorMaterializer
import org.apache.openwhisk.common.Logging
import org.apache.openwhisk.core.entity.ExecManifest.ImageName
import org.apache.openwhisk.core.yarn.YARNComponentActor.{CreateContainerAsync, RemoveContainer}
import spray.json.{JsArray, JsNumber, JsObject, JsString}

import scala.concurrent.ExecutionContext


object YARNComponentActor {
  case object CreateContainerAsync
  case class RemoveContainer(component_instance_name: String)
}

class YARNComponentActor(actorSystem: ActorSystem,
                         logging: Logging,
                         yarnConfig: YARNConfig,
                         serviceName: String,
                         imageName: ImageName)
    extends Actor {

  implicit val as: ActorSystem = actorSystem
  implicit val materializer: ActorMaterializer = ActorMaterializer()
  implicit val ec: ExecutionContext = actorSystem.dispatcher

  //Adding a container via the YARN REST API is actually done by flexing the component's container pool to a certain size.
  // This actor must track the current containerCount in order to make the correct scale-up request.
  var containerCount: Int = 0

  def receive: PartialFunction[Any, Unit] = {
    case CreateContainerAsync =>
      sender ! createContainerAsync

    case RemoveContainer(component_instance_name) =>
      sender ! removeContainer(component_instance_name)

    case input =>
      throw new IllegalArgumentException("Unknown input: " + input)
      sender ! false
  }

  def createContainerAsync(): Unit = {
    logging.info(this, s"Using YARN to create a container with image ${imageName.name}...")

    val body = JsObject("number_of_containers" -> JsNumber(containerCount + 1)).compactPrint
    val response = YARNRESTUtil.submitRequestWithAuth(
      yarnConfig.authType,
      HttpMethods.PUT,
      s"${yarnConfig.masterUrl}/app/v1/services/$serviceName/components/${imageName.name}",
      body)
    response match {
      case httpresponse(StatusCodes.OK, content) =>
        logging.info(this, s"Added container: ${imageName.name}. Response: $content")
        containerCount += 1

      case httpresponse(_, _) => YARNRESTUtil.handleYARNRESTError(logging)
    }
  }

  def removeContainer(component_instance_name: String): Unit = {
    logging.info(this, s"Removing ${imageName.name} container: $component_instance_name ")
    if (containerCount <= 0) {
      logging.warn(this, "Already at 0 containers")
    } else {
      val body = JsObject(
        "components" -> JsArray(
          JsObject(
            "name" -> JsString(imageName.name),
            "decommissioned_instances" -> JsArray(JsString(component_instance_name))))).compactPrint
      val response = YARNRESTUtil.submitRequestWithAuth(
        yarnConfig.authType,
        HttpMethods.PUT,
        s"${yarnConfig.masterUrl}/app/v1/services/$serviceName",
        body)
      response match {
        case httpresponse(StatusCodes.OK, content) =>
          logging.info(
            this,
            s"Successfully removed ${imageName.name} container: $component_instance_name. Response: $content")
          containerCount -= 1

        case httpresponse(_, _) => YARNRESTUtil.handleYARNRESTError(logging)
      }
    }
  }
} 
Example 20
Source File: JsonSupport.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.tensor

import spray.json.DefaultJsonProtocol._
import spray.json.{JsObject, _}

import scala.reflect.ClassTag


trait JsonSupport {
  implicit def mleapArrayFormat[T: JsonFormat: ClassTag]: RootJsonFormat[Array[T]] = new RootJsonFormat[Array[T]] {
    val base = implicitly[JsonFormat[T]]

    override def write(obj: Array[T]): JsValue = {
      JsArray(obj.map(base.write): _*)
    }

    override def read(json: JsValue): Array[T] = json match {
      case json: JsArray =>
        val elements = json.elements
        val size = elements.size
        val values = new Array[T](size)
        (0 until size).foreach(i => values(i) = base.read(elements(i)))
        values
      case _ => deserializationError("invalid array")
    }
  }

  implicit def mleapDenseTensorFormat[T: JsonFormat: ClassTag]: RootJsonFormat[DenseTensor[T]] = jsonFormat[Array[T], Seq[Int], DenseTensor[T]](DenseTensor[T], "values", "dimensions")
  implicit def mleapSparseTensorFormat[T: JsonFormat: ClassTag]: RootJsonFormat[SparseTensor[T]] = jsonFormat[Seq[Seq[Int]], Array[T], Seq[Int], SparseTensor[T]](SparseTensor[T], "indices", "values", "dimensions")
  implicit def mleapTensorFormat[T: JsonFormat: ClassTag]: RootJsonFormat[Tensor[T]] = new RootJsonFormat[Tensor[T]] {
    override def write(obj: Tensor[T]): JsValue = obj match {
      case obj: DenseTensor[_] => obj.asInstanceOf[DenseTensor[T]].toJson
      case obj: SparseTensor[_] => obj.asInstanceOf[SparseTensor[T]].toJson
    }

    override def read(json: JsValue): Tensor[T] = json match {
      case json: JsObject =>
        if(json.fields.contains("indices")) {
          mleapSparseTensorFormat[T].read(json)
        } else {
          mleapDenseTensorFormat[T].read(json)
        }
      case _ => deserializationError("invalid tensor")
    }
  }
}
object JsonSupport extends JsonSupport 
Example 21
Source File: StandardFormatsSpec.scala    From sjson-new   with Apache License 2.0 5 votes vote down vote up
package sjsonnew
package support.spray

import spray.json.{ JsValue, JsNumber, JsString, JsNull, JsTrue, JsFalse, JsObject }
import org.specs2.mutable._
import scala.Right

class StandardFormatsSpec extends Specification with BasicJsonProtocol {
  case class Person(name: Option[String], value: Option[Int])
  implicit object PersonFormat extends JsonFormat[Person] {
    def write[J](x: Person, builder: Builder[J]): Unit = {
      builder.beginObject()
      builder.addField("name", x.name)
      builder.addField("value", x.value)
      builder.endObject()
    }
    def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Person =
      jsOpt match {
        case Some(js) =>
          unbuilder.beginObject(js)
          val name = unbuilder.readField[Option[String]]("name")
          val value = unbuilder.readField[Option[Int]]("value")
          unbuilder.endObject()
          Person(name, value)
        case None =>
          deserializationError("Expected JsObject but found None")
      }
  }

  "The optionFormat" should {
    "convert None to JsNull" in {
      Converter.toJsonUnsafe(None.asInstanceOf[Option[Int]]) mustEqual JsNull
    }
    "convert JsNull to None" in {
      Converter.fromJsonUnsafe[Option[Int]](JsNull) mustEqual None
    }
    "convert Some(Hello) to JsString(Hello)" in {
      Converter.toJsonUnsafe(Some("Hello").asInstanceOf[Option[String]]) mustEqual JsString("Hello")
    }
    "convert JsString(Hello) to Some(Hello)" in {
      Converter.fromJsonUnsafe[Option[String]](JsString("Hello")) mustEqual Some("Hello")
    }
    "omit None fields" in {
      Converter.toJsonUnsafe(Person(None, None)) mustEqual JsObject()
    }
  }

  "The eitherFormat" should {
    val a: Either[Int, String] = Left(42)
    val b: Either[Int, String] = Right("Hello")

    "convert the left side of an Either value to Json" in {
      Converter.toJsonUnsafe(a) mustEqual JsNumber(42)
    }
    "convert the right side of an Either value to Json" in {
      Converter.toJsonUnsafe(b) mustEqual JsString("Hello")
    }
    "convert the left side of an Either value from Json" in {
      Converter.fromJsonUnsafe[Either[Int, String]](JsNumber(42)) mustEqual Left(42)
    }
    "convert the right side of an Either value from Json" in {
      Converter.fromJsonUnsafe[Either[Int, String]](JsString("Hello")) mustEqual Right("Hello")
    }
  }
} 
Example 22
Source File: LListFormatSpec.scala    From sjson-new   with Apache License 2.0 5 votes vote down vote up
package sjsonnew
package support.spray

import org.specs2.mutable._
import java.util.Arrays
import spray.json.{ JsArray, JsNumber, JsString, JsObject }

class LListFormatsSpec extends Specification with BasicJsonProtocol {

  "The llistFormat" should {
    val empty = LNil
    val emptyObject = JsObject()
    val list = ("Z", 2) :*: ("a", 1) :*: LNil
    val obj = JsObject("$fields" -> JsArray(JsString("Z"), JsString("a")), "Z" -> JsNumber(2), "a" -> JsNumber(1))
    val nested = ("b", list) :*: LNil
    val nestedObj = JsObject("$fields" -> JsArray(JsString("b")), "b" -> obj)
    "convert an empty list to JObject" in {
      Converter.toJsonUnsafe(empty) mustEqual emptyObject
    }
    "convert a list to JObject" in {
      Converter.toJsonUnsafe(list) mustEqual obj
    }
    "convert a nested list to JObject" in {
      Converter.toJsonUnsafe(nested) mustEqual nestedObj
    }
    "convert a JObject to list" in {
      Converter.fromJsonUnsafe[Int :*: Int :*: LNil](obj) mustEqual list
    }
    "convert a nested JObject to list" in {
      Converter.fromJsonUnsafe[(Int :*: Int :*: LNil) :*: LNil](nestedObj) mustEqual nested
    }

    val obj2 = JsObject("$fields" -> JsArray(JsString("f")), "f" -> JsString("foo"))
    val nested2Obj = JsObject("$fields" -> JsArray(JsString("b"), JsString("c")), "b" -> obj, "c" -> obj2)

    val list2 = ("f", "foo") :*: LNil
    val nested2 = ("b", list) :*: ("c", list2) :*: LNil

    "convert a 2 nested JObjects to list" in {
      Converter.fromJsonUnsafe[(Int :*: Int :*: LNil) :*: (String :*: LNil) :*: LNil](nested2Obj) mustEqual nested2
    }
  }
} 
Example 23
Source File: JavaPrimitiveSpec.scala    From sjson-new   with Apache License 2.0 5 votes vote down vote up
package sjsonnew
package support.spray

import spray.json.{ JsValue, JsNumber, JsString, JsNull, JsTrue, JsFalse, JsObject }
import org.specs2.mutable._
import java.lang.{ Integer => JInteger, Long => JLong, Boolean => JBoolean,
  Float => JFloat, Double => JDouble, Byte => JByte, Short => JShort,
  Character => JCharacter }

class JavaPrimitiveFormatsSpec extends Specification with BasicJsonProtocol {
  "The JIntegerJsonFormat" should {
    "convert an JInteger to a JsNumber" in {
      Converter.toJsonUnsafe[JInteger](42: JInteger) mustEqual JsNumber(42)
    }
    "convert a JsNumber to an Int" in {
      Converter.fromJsonUnsafe[JInteger](JsNumber(42)) mustEqual (42: JInteger)
    }
  }

  "The JLongJsonFormat" should {
    "convert a JLong to a JsNumber" in {
      Converter.toJsonUnsafe[JLong](7563661897011259335L: JLong) mustEqual JsNumber(7563661897011259335L)
    }
    "convert a JsNumber to a JLong" in {
      Converter.fromJsonUnsafe[JLong](JsNumber(7563661897011259335L)) mustEqual (7563661897011259335L: JLong)
    }
  }

  "The JFloatJsonFormat" should {
    "convert a JFloat to a JsNumber" in {
      Converter.toJsonUnsafe[JFloat](4.2f: JFloat) mustEqual JsNumber(4.2f)
    }
    "convert a JsNumber to a JFloat" in {
      Converter.fromJsonUnsafe[JFloat](JsNumber(4.2f)) mustEqual (4.2f: JFloat)
    }
  }

  "The JDoubleJsonFormat" should {
    "convert a JDouble to a JsNumber" in {
      Converter.toJsonUnsafe[JDouble](4.2: JDouble) mustEqual JsNumber(4.2)
    }
    "convert a JsNumber to a JDouble" in {
      Converter.fromJsonUnsafe[JDouble](JsNumber(4.2)) mustEqual (4.2: JDouble)
    }
  }

  "The JByteJsonFormat" should {
    "convert a JByte to a JsNumber" in {
      Converter.toJsonUnsafe[JByte](42.toByte: JByte) mustEqual JsNumber(42)
    }
    "convert a JsNumber to a JByte" in {
      Converter.fromJsonUnsafe[JByte](JsNumber(42)) mustEqual (42.toByte: JByte)
    }
  }

  "The JShortJsonFormat" should {
    "convert a JShort to a JsNumber" in {
      Converter.toJsonUnsafe(42.toShort: JShort) mustEqual JsNumber(42)
    }
    "convert a JsNumber to a JShort" in {
      Converter.fromJsonUnsafe[JShort](JsNumber(42)) mustEqual (42.toShort: JShort)
    }
  }

  "The JBooleanJsonFormat" should {
    "convert true to a JsTrue" in { Converter.toJsonUnsafe[JBoolean](true: JBoolean) mustEqual JsTrue }
    "convert false to a JsFalse" in { Converter.toJsonUnsafe[JBoolean](false: JBoolean) mustEqual JsFalse }
    "convert a JsTrue to true" in { Converter.fromJsonUnsafe[JBoolean](JsTrue) mustEqual true }
    "convert a JsFalse to false" in { Converter.fromJsonUnsafe[JBoolean](JsFalse) mustEqual false }
  }

  "The JCharacterJsonFormat" should {
    "convert a JCharacter to a JsString" in {
      Converter.toJsonUnsafe[JCharacter]('c': JCharacter) mustEqual JsString("c")
    }
    "convert a JsString to a JCharacter" in {
      Converter.fromJsonUnsafe[JCharacter](JsString("c")) mustEqual ('c': JCharacter)
    }
  }
} 
Example 24
Source File: JavaExtraFormatsSpec.scala    From sjson-new   with Apache License 2.0 5 votes vote down vote up
package sjsonnew
package support.spray

import spray.json.{ JsValue, JsNumber, JsString, JsNull, JsTrue, JsFalse, JsObject }
import org.specs2.mutable._
import java.util.{ UUID, Optional }
import java.net.{ URI, URL }
import java.io.File

class JavaExtraFormatsSpec extends Specification with BasicJsonProtocol {
  case class Person(name: Optional[String], value: Optional[Int])
  implicit object PersonFormat extends JsonFormat[Person] {
    def write[J](x: Person, builder: Builder[J]): Unit = {
      builder.beginObject()
      builder.addField("name", x.name)
      builder.addField("value", x.value)
      builder.endObject()
    }
    def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Person =
      jsOpt match {
        case Some(js) =>
          unbuilder.beginObject(js)
          val name = unbuilder.readField[Optional[String]]("name")
          val value = unbuilder.readField[Optional[Int]]("value")
          unbuilder.endObject()
          Person(name, value)
        case None =>
          deserializationError("Expected JsObject but found None")
      }
  }

  "The uuidStringIso" should {
    val uuid = UUID.fromString("abc220ea-2a01-11e6-b67b-9e71128cae77")
    "convert a UUID to JsString" in {
      Converter.toJsonUnsafe(uuid) mustEqual JsString("abc220ea-2a01-11e6-b67b-9e71128cae77")
    }
    "convert the JsString back to the UUID" in {
      Converter.fromJsonUnsafe[UUID](JsString("abc220ea-2a01-11e6-b67b-9e71128cae77")) mustEqual uuid
    }
  }

  "The uriStringIso" should {
    val uri = new URI("http://localhost")
    "convert a URI to JsString" in {
      Converter.toJsonUnsafe(uri) mustEqual JsString("http://localhost")
    }
    "convert the JsString back to the URI" in {
      Converter.fromJsonUnsafe[URI](JsString("http://localhost")) mustEqual uri
    }
  }

  "The urlStringIso" should {
    val url = new URL("http://localhost")
    "convert a URL to JsString" in {
      Converter.toJsonUnsafe(url) mustEqual JsString("http://localhost")
    }
    "convert the JsString back to the URI" in {
      Converter.fromJsonUnsafe[URL](JsString("http://localhost")) mustEqual url
    }
  }

  "The fileStringIso" should {
    val f = new File("/tmp")
    val f2 = new File(new File("src"), "main")
    "convert a File to JsString" in {
      Converter.toJsonUnsafe(f) mustEqual JsString("file:///tmp/")
    }
    "convert a relative path to JsString" in {
      // https://tools.ietf.org/html/rfc3986#section-4.2
      Converter.toJsonUnsafe(f2) mustEqual JsString("src/main")
    }
    "convert the JsString back to the File" in {
      Converter.fromJsonUnsafe[File](JsString("file:///tmp/")) mustEqual f
    }
    "convert the JsString back to the relative path" in {
      Converter.fromJsonUnsafe[File](JsString("src/main")) mustEqual f2
    }
  }

  "The optionalFormat" should {
    "convert Optional.empty to JsNull" in {
      Converter.toJsonUnsafe(Optional.empty[Int]) mustEqual JsNull
    }
    "convert JsNull to None" in {
      Converter.fromJsonUnsafe[Optional[Int]](JsNull) mustEqual Optional.empty[Int]
    }
    "convert Some(Hello) to JsString(Hello)" in {
      Converter.toJsonUnsafe(Optional.of("Hello")) mustEqual JsString("Hello")
    }
    "convert JsString(Hello) to Some(Hello)" in {
      Converter.fromJsonUnsafe[Optional[String]](JsString("Hello")) mustEqual Optional.of("Hello")
    }
    "omit None fields" in {
      Converter.toJsonUnsafe(Person(Optional.empty[String], Optional.empty[Int])) mustEqual JsObject()
    }
  }
} 
Example 25
Source File: BuilderSpec.scala    From sjson-new   with Apache License 2.0 5 votes vote down vote up
package sjsonnew
package support.spray

import org.specs2.mutable._
import java.util.Arrays
import spray.json.{ JsArray, JsNumber, JsString, JsObject }
import LList._

class BuilderSpec extends Specification with BasicJsonProtocol {
  case class Person(name: String, value: Int)
  implicit object PersonFormat extends JsonFormat[Person] {
    def write[J](x: Person, builder: Builder[J]): Unit = {
      builder.beginObject()
      builder.addField("name", x.name)
      builder.addField("value", x.value)
      builder.endObject()
    }
    def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Person =
      jsOpt match {
        case Some(js) =>
          unbuilder.beginObject(js)
          val name = unbuilder.readField[String]("name")
          val value = unbuilder.readField[Int]("value")
          unbuilder.endObject()
          Person(name, value)
        case None =>
          deserializationError("Expected JsObject but found None")
      }
  }

  "Custom format using builder" should {
    val p1 = Person("Alice", 1)
    val personJs = JsObject("name" -> JsString("Alice"), "value" -> JsNumber(1))
    "convert from value to JObject" in {
      Converter.toJsonUnsafe(p1) mustEqual personJs
    }
    "convert from JObject to the same value" in {
      Converter.fromJsonUnsafe[Person](personJs) mustEqual p1
    }
  }
} 
Example 26
Source File: IsoLListFormatSpec.scala    From sjson-new   with Apache License 2.0 5 votes vote down vote up
package sjsonnew
package support.spray

import spray.json.{ JsArray, JsNumber, JsString, JsObject }
import org.specs2.mutable.Specification

class IsoLListFormatSpec extends Specification with BasicJsonProtocol {
  sealed trait Contact
  case class Person(name: String, value: Option[Int]) extends Contact
  case class Organization(name: String, value: Option[Int]) extends Contact

  implicit val personIso: IsoLList.Aux[Person, String :*: Option[Int] :*: LNil] = LList.isoCurried(
    { p: Person => ("name", p.name) :*: ("value", p.value) :*: LNil })
    { in => Person(
      in.find[String]("name").get,
      in.find[Option[Int]]("value").flatten) }

  implicit val organizationIso: IsoLList.Aux[Organization, String :*: Option[Int] :*: LNil] = LList.isoCurried(
    { o: Organization => ("name", o.name) :*: ("value", o.value) :*: LNil })
    { in => Organization(
      in.find[String]("name").get,
      in.find[Option[Int]]("value").flatten) }

  implicit val ContactFormat: JsonFormat[Contact] = flatUnionFormat2[Contact, Person, Organization]("$type")

  val p1 = Person("Alice", Some(1))
  val personJs = JsObject("$fields" -> JsArray(JsString("name"), JsString("value")),
    "name" -> JsString("Alice"), "value" -> JsNumber(1))
  val c1: Contact = Organization("Company", None)
  val contactJs =
    JsObject(
      "$type" -> JsString("Organization"),
      "$fields" -> JsArray(JsString("name"), JsString("value")),
      "name" -> JsString("Company")
    )
  "The isomorphism from a custom type to LList" should {
    "convert from value to JObject" in {
      Converter.toJsonUnsafe(p1) mustEqual personJs
    }
    "convert from JObject to the same value" in {
      Converter.fromJsonUnsafe[Person](personJs) mustEqual p1
    }
    "convert from a union value to JObject" in {
      Converter.toJsonUnsafe(c1) mustEqual contactJs
    }
  }
} 
Example 27
Source File: UnionFormatSpec.scala    From sjson-new   with Apache License 2.0 5 votes vote down vote up
package sjsonnew
package support.spray

import org.specs2.mutable._
import java.util.Arrays
import spray.json.{ JsArray, JsNumber, JsString, JsObject }
import LList._

class UnionFormatsSpec extends Specification with BasicJsonProtocol {
  sealed trait Fruit
  case class Apple() extends Fruit
  sealed trait Citrus extends Fruit
  case class Orange() extends Citrus
  implicit object AppleJsonFormat extends JsonFormat[Apple] {
    def write[J](x: Apple, builder: Builder[J]): Unit =
      {
        builder.beginObject()
        builder.addField("x", 0)
        builder.endObject()
      }
    def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Apple =
      jsOpt match {
        case Some(js) =>
          val result = unbuilder.beginObject(js) match {
            case 1 =>
              val x = unbuilder.readField[Int]("x")
              if (x == 0) Apple()
              else deserializationError(s"Unexpected value: $x")
            case x => deserializationError(s"Unexpected number of fields: $x")
          }
          unbuilder.endObject()
          result
        case None => deserializationError("Expected JsNumber but found None")
      }
  }
  implicit object OrangeJsonFormat extends JsonFormat[Orange] {
    def write[J](x: Orange, builder: Builder[J]): Unit =
      {
        builder.beginObject()
        builder.addField("x", 1)
        builder.endObject()
      }
    def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Orange =
      jsOpt match {
        case Some(js) =>
          val result = unbuilder.beginObject(js) match {
            case 1 =>
              val x = unbuilder.readField[Int]("x")
              if (x == 1) Orange()
              else deserializationError(s"Unexpected value: $x")
            case x => deserializationError(s"Unexpected number of fields: $x")
          }
          unbuilder.endObject()
          result
        case None => deserializationError("Expected JsNumber but found None")
      }
  }
  val fruit: Fruit = Apple()
  "The unionFormat" should {
    implicit val FruitFormat: JsonFormat[Fruit] = unionFormat2[Fruit, Apple, Orange]
    val fruitJson = JsObject("value" ->  JsObject("x" -> JsNumber(0)), "type" -> JsString("Apple"))
    "convert a value of ADT to JObject" in {
      Converter.toJsonUnsafe(fruit) mustEqual fruitJson
    }
    "convert JObject back to ADT" in {
      Converter.fromJsonUnsafe[Fruit](fruitJson) mustEqual fruit
    }
  }

  "The flatUnionFormat" should {
    implicit val FruitFormat: JsonFormat[Fruit] = flatUnionFormat2[Fruit, Apple, Orange]("type")
    val fruitJson2 = JsObject("type" -> JsString("Apple"), "x" -> JsNumber(0))
    "convert a value of ADT to JObject" in {
      Converter.toJsonUnsafe(fruit) mustEqual fruitJson2
    }
    "convert JObject back to ADT" in {
      // println(Converter.fromJsonUnsafe[Fruit](fruitJson2))
      Converter.fromJsonUnsafe[Fruit](fruitJson2) mustEqual fruit
    }
  }
} 
Example 28
Source File: TextureMappedPropertyIO.scala    From parametric-face-image-generator   with Apache License 2.0 5 votes vote down vote up
package faces.utils

import java.io.{File, FileInputStream, FileOutputStream}

import scalismo.faces.color.{ColorSpaceOperations, RGBA}
import scalismo.faces.image.BufferedImageConverter
import scalismo.faces.io.{MeshIO, PixelImageIO}
import scalismo.faces.mesh.{ColorNormalMesh3D, TextureMappedProperty}
import scalismo.geometry.{Point, _2D}
import scalismo.mesh.{MeshSurfaceProperty, TriangleCell, TriangleList}
import spray.json.JsObject

import scala.reflect.ClassTag
import scala.util.Try
import spray.json._

object TextureMappedPropertyIO extends App {

  import scalismo.faces.io.renderparameters.RenderParameterJSONFormatV2._

  import scalismo.faces.io.RenderParameterIO._
  def read[A: ClassTag](directory: String, stem: String)(implicit converter: BufferedImageConverter[A], ops: ColorSpaceOperations[A]): TextureMappedProperty[A] = read[A](new File(directory+"/"+stem+".json"),new File(directory+"/"+stem+".png"))

  def read[A: ClassTag](mappingFile: File, imageFile: File)(implicit converter: BufferedImageConverter[A],  ops: ColorSpaceOperations[A]) : TextureMappedProperty[A] = {

    import scalismo.faces.io.RenderParameterIO.readASTFromStream

    val fields = readASTFromStream(new FileInputStream(mappingFile)).asJsObject.fields
    val triangles = fields("triangles").convertTo[IndexedSeq[TriangleCell]]
    val triangulation = TriangleList(triangles)

    val textureMapping = fields("textureMapping").convertTo[MeshSurfaceProperty[Point[_2D]]]

    val texture = PixelImageIO.read[A](imageFile).get

    TextureMappedProperty[A](triangulation, textureMapping, texture)
  }

  def write[A:ClassTag](textureMappedProperty: TextureMappedProperty[A], directory: String, stem: String)(implicit converter: BufferedImageConverter[A]): Try[Unit] = Try {
    val writeImage = PixelImageIO.write(
      textureMappedProperty.texture,
      new File(directory+"/"+stem+".png")
    ).get

    val mapping = JsObject(
      "triangles" -> textureMappedProperty.triangulation.triangles.toJson,
      "textureMapping" -> textureMappedProperty.textureMapping.toJson,
      "@type" -> "TextureMappedProperty".toJson
    )

    val os = new FileOutputStream(new File(directory+"/"+stem+".json"))
    writeASTToStream(mapping, os)
  }

} 
Example 29
Source File: EmrSpec.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model.resource

import org.scalatest.{FunSpec, Matchers}
import spray.json.{JsArray, JsObject, JsString, JsonWriter}

class EmrSpec extends FunSpec with Matchers {

  describe("ClusterConfiguration") {
    it("should write non recursive") {
      val clusterConfiguration = ClusterConfiguration(
        Classification = Some("hello"),
        ConfigurationProperties = Some(Map("hello" -> "world")),
        Configurations = None
      )
      val json = implicitly[JsonWriter[ClusterConfiguration]].write(clusterConfiguration)
      json should equal(JsObject(Map(
        "Classification" -> JsString("hello"),
        "ConfigurationProperties" -> JsObject(
          "hello" -> JsString("world")
        )
      )))
    }

    it("should write and read recursive") {
      val clusterConfiguration = ClusterConfiguration(
        Classification = Some("hello"),
        ConfigurationProperties = Some(Map("hello" -> "world")),
        Configurations = Some(Seq(
          ClusterConfiguration(
            Classification = Some("hello1"),
            ConfigurationProperties = Some(Map("hello2" -> "world3")),
            Configurations = None
          )
        ))
      )
      val json = implicitly[JsonWriter[ClusterConfiguration]].write(clusterConfiguration)
      json should equal(JsObject(Map(
        "Classification" -> JsString("hello"),
        "ConfigurationProperties" -> JsObject(
          "hello" -> JsString("world")
        ),
        "Configurations" -> JsArray(
          JsObject(Map(
            "Classification" -> JsString("hello1"),
            "ConfigurationProperties" -> JsObject(
              "hello2" -> JsString("world3")
            )
          ))
        )
      )))
    }
  }

} 
Example 30
Source File: IAMRole_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model.resource

import com.monsanto.arch.cloudformation.model.ResourceRef
import org.scalatest.{FunSpec, Matchers}
import spray.json.{JsObject, JsString, _}


class IAMRole_UT extends FunSpec with Matchers {
  describe("AWS::IAM::Role") {

    it("should handle both AWS Managed and Customer policies into valid json") {
      val customerPolicy = `AWS::IAM::ManagedPolicy`("customer-policy", PolicyDocument(Seq()))
      val awsPolicy = AWSManagedPolicy("AdministratorAccess")

      val fakePolicyDoc = PolicyDocument(Seq(
        PolicyStatement(
          "Allow",
          Some(DefinedPrincipal(Map("Service" -> Seq("config.amazonaws.com")))),
          Seq("sts:AssumeRole")
        )
      ))

      val expectedJson = JsObject(
        "name" -> JsString("role"),
        "AssumeRolePolicyDocument" -> fakePolicyDoc.toJson,
        "ManagedPolicyArns" -> JsArray(
          JsObject("Ref" -> JsString("customer-policy")),
          JsString("arn:aws:iam::aws:policy/AdministratorAccess")
        )
      )

      val role = `AWS::IAM::Role`(
        "role",
        fakePolicyDoc,
        ManagedPolicyArns = Some(Seq(ResourceRef(customerPolicy), awsPolicy))
      )

      role.toJson should be(expectedJson)
    }
  }
} 
Example 31
Source File: JsonWritingMatcher.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model

import com.monsanto.arch.cloudformation.model.resource.Resource
import org.scalatest.Matchers
import spray.json.{JsonWriter, JsObject, JsValue, JsonFormat}


trait JsonWritingMatcher extends Matchers {

  implicit class JsonMatchResource(val value : Resource[_]) extends JsonMatcher[Resource[_]] {
    val format = Resource.seqFormat.format
  }

  implicit class JsonMatch[A](val value : A)(implicit val format: JsonWriter[A]) extends JsonMatcher[A]

  sealed trait JsonMatcher[A] {
    def value : A
    def format : JsonWriter[A]
    def shouldMatch(policy : String): Unit = {

      import spray.json._

      val jsonPolicy = value.toJson(format)
      val parsedPolicy = policy.parseJson
      jsonEquals(Seq(), jsonPolicy, parsedPolicy)
    }
  }

  def jsonEquals(path : Seq[String], v1 : JsValue, v2 : JsValue): Unit = withClue("Path: [" + path.mkString(" -> ") + "]") {
    (v1, v2) match {
      case (JsObject(o1), JsObject(o2)) =>
        o1.seq.keySet shouldEqual o2.seq.keySet
        for {
          key <- o1.seq.keySet
        } {
          jsonEquals(path ++ Seq(key), o1.seq(key), o2.seq(key))
        }
      case (j1, j2) => {
        j1 shouldEqual j2
      }
    }
  }
} 
Example 32
Source File: Diagram.scala    From pnp   with Apache License 2.0 5 votes vote down vote up
package org.allenai.dqa.labeling

import scala.io.Source

import spray.json.DefaultJsonProtocol._
import spray.json.JsArray
import spray.json.JsNumber
import spray.json.JsObject
import spray.json.deserializationError
import spray.json.pimpString
import scala.util.Random


case class DiagramLabel(diagramType: String, partLabels: Vector[String])

object Diagram {
  
  def fromJsonFile(filename: String, features: Map[String, DiagramFeatures]
    ): Array[(Diagram, DiagramLabel)] = {
    val lines = Source.fromFile(filename).getLines
    lines.map(fromJsonLine(_, features)).toArray
  }

  def fromJsonLine(line: String, features: Map[String, DiagramFeatures]
    ): (Diagram, DiagramLabel) = {
    val js = line.parseJson.asJsObject
    val diagramLabel = js.fields("label").convertTo[String]
    val diagramId = js.fields("id").convertTo[String]
    val imageId = js.fields("imageId").convertTo[String]
    val width = js.fields("width").convertTo[Int]
    val height = js.fields("height").convertTo[Int]
    
    // val pointJsons = Random.shuffle(js.fields("points").asInstanceOf[JsArray].elements)
    val pointJsons = js.fields("points").asInstanceOf[JsArray].elements

    val labeledParts = for {
      (pointJson, i) <- pointJsons.zipWithIndex
      p = pointJson.asJsObject
      id = p.fields("textId").convertTo[String]
      label = p.fields("label").convertTo[String]
      xy = p.fields("xy") match {
        case JsArray(Vector(JsNumber(x), JsNumber(y))) => Point(x.toInt, y.toInt)
        case _ => deserializationError("Array of x/y coordinates expected")
      }
    } yield {
      (Part(id, i, xy),  label)
    }

    val f = features(imageId)

    (Diagram(diagramId, imageId, width, height, labeledParts.map(_._1), f),
        (DiagramLabel(diagramLabel, labeledParts.map(_._2))))
  }
} 
Example 33
Source File: MutationCallbackSchemaExecutor.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.deprecated.actions

import com.typesafe.scalalogging.LazyLogging
import cool.graph.client.ClientInjector
import cool.graph.client.database.{DeferredResolverProvider, SimpleManyModelDeferredResolver, SimpleToManyDeferredResolver}
import cool.graph.cuid.Cuid.createCuid
import cool.graph.deprecated.actions.schemas.{ActionUserContext, MutationMetaData}
import cool.graph.shared.models.{Model, Project}
import cool.graph.shared.schema.JsonMarshalling._
import sangria.execution.Executor
import sangria.parser.QueryParser
import sangria.schema.Schema
import spray.json.{JsObject, JsString}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.util.{Failure, Success}

case class Event(id: String, url: String, payload: Option[JsObject])

class MutationCallbackSchemaExecutor(project: Project,
                                     model: Model,
                                     schema: Schema[ActionUserContext, Unit],
                                     nodeId: String,
                                     fragment: String,
                                     url: String,
                                     mutationId: String)(implicit injector: ClientInjector)
    extends LazyLogging {
  def execute: Future[Event] = {
    implicit val inj = injector.toScaldi

    val dataFut = QueryParser.parse(fragment) match {
      case Success(queryAst) =>
        Executor.execute(
          schema,
          queryAst,
          deferredResolver = new DeferredResolverProvider(
            new SimpleToManyDeferredResolver,
            new SimpleManyModelDeferredResolver,
            skipPermissionCheck = true
          ),
          userContext = ActionUserContext(
            requestId = "",
            project = project,
            nodeId = nodeId,
            mutation = MutationMetaData(id = mutationId, _type = "Create"),
            log = (x: String) => logger.info(x)
          )
        )
      case Failure(error) =>
        Future.successful(JsObject("error" -> JsString(error.getMessage)))
    }

    dataFut
      .map {
        case JsObject(dataMap) => Event(id = createCuid(), url = url, payload = Some(dataMap("data").asJsObject))
        case json              => sys.error(s"Must only receive JsObjects here. But got instead: ${json.compactPrint}")
      }

  }
} 
Example 34
Source File: PermissionSchemaResolver.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.shared.queryPermissions

import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import com.typesafe.scalalogging.LazyLogging
import cool.graph.client.UserContext
import cool.graph.client.database.DeferredTypes.ManyModelExistsDeferred
import cool.graph.client.schema.simple.SimpleSchemaModelObjectTypeBuilder
import cool.graph.shared.{ApiMatrixFactory, models}
import cool.graph.shared.models.Project
import sangria.execution.Executor
import sangria.introspection.introspectionQuery
import sangria.schema.{Context, Field, ObjectType, Schema}
import scaldi.{Injectable, Injector}
import spray.json.JsObject

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class PermissionSchemaResolver(implicit inj: Injector) extends Injectable with LazyLogging {

  import sangria.marshalling.sprayJson._

  def resolve(project: Project): Future[String] = {

    implicit val system       = inject[ActorSystem](identified by "actorSystem")
    implicit val materializer = inject[ActorMaterializer](identified by "actorMaterializer")

    val permissionSchema = PermissionSchemaResolver.permissionSchema(project)

    Executor
      .execute(
        schema = permissionSchema,
        queryAst = introspectionQuery,
        userContext = new UserContext(
          project = project,
          authenticatedRequest = None,
          requestId = "PermissionSchemaResolver-request-id",
          requestIp = "PermissionSchemaResolver-request-ip",
          clientId = "PermissionSchemaResolver-client-id",
          log = (_) => (),
          queryAst = Some(introspectionQuery)
        )
      )
      .map { response =>
        val JsObject(fields) = response
        fields("data").compactPrint
      }
  }
}

object PermissionSchemaResolver extends Injectable {
  def permissionSchema(project: Project)(implicit inj: Injector): Schema[UserContext, Unit] = {
    val apiMatrix      = inject[ApiMatrixFactory].create(project)
    val includedModels = project.models.filter(model => apiMatrix.includeModel(model.name))
    val schemaBuilder  = new SimpleSchemaModelObjectTypeBuilder(project, None)

    def getConnectionArguments(model: models.Model) = {
      schemaBuilder.mapToListConnectionArguments(model)
    }

    def resolveGetAllItemsQuery(model: models.Model, ctx: Context[UserContext, Unit]): sangria.schema.Action[UserContext, Boolean] = {
      val arguments = schemaBuilder.extractQueryArgumentsFromContext(model, ctx)

      ManyModelExistsDeferred(model, arguments)
    }

    def getModelField(model: models.Model): Field[UserContext, Unit] = {
      Field(
        s"Some${model.name.capitalize}Exists",
        fieldType = sangria.schema.BooleanType,
        arguments = getConnectionArguments(model),
        resolve = (ctx) => {
          resolveGetAllItemsQuery(model, ctx)
        }
      )
    }

    val query    = ObjectType("Query", includedModels.map(getModelField))
    val mutation = None

    Schema(query, mutation)
  }
} 
Example 35
Source File: DevFunctionEnvironment.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.shared.functions.dev

import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import cool.graph.akkautil.http.SimpleHttpClient
import cool.graph.cuid.Cuid
import cool.graph.shared.functions._
import cool.graph.shared.models.Project
import spray.json.{JsArray, JsObject, JsString, _}

import scala.concurrent.Future
import scala.util.{Failure, Success, Try}

case class DevFunctionEnvironment()(implicit system: ActorSystem, materializer: ActorMaterializer) extends FunctionEnvironment {
  import Conversions._
  import system.dispatcher

  private val httpClient = SimpleHttpClient()

  override def pickDeploymentAccount(): Option[String] = None

  val functionEndpointInternal: String =
    sys.env.getOrElse("FUNCTION_ENDPOINT_INTERNAL", sys.error("FUNCTION_ENDPOINT_INTERNAL env var required for dev function deployment.")).stripSuffix("/")

  val functionEndpointExternal: String =
    sys.env.getOrElse("FUNCTION_ENDPOINT_EXTERNAL", sys.error("FUNCTION_ENDPOINT_EXTERNAL env var required for dev function deployment.")).stripSuffix("/")

  override def getTemporaryUploadUrl(project: Project): String = {
    val deployId = Cuid.createCuid()
    s"$functionEndpointExternal/functions/files/${project.id}/$deployId"
  }

  override def deploy(project: Project, externalFile: ExternalFile, name: String): Future[DeployResponse] = {
    httpClient
      .postJson(s"$functionEndpointInternal/functions/deploy/${project.id}", DeploymentInput(externalFile.url, externalFile.devHandler, name))
      .map { response =>
        response.bodyAs[StatusResponse] match {
          case Success(status) =>
            if (status.success) {
              DeploySuccess()
            } else {
              DeployFailure(new Exception(status.error.getOrElse("")))
            }

          case Failure(e) => DeployFailure(e)
        }
      }
      .recover {
        case e: Throwable => DeployFailure(e)
      }
  }

  override def invoke(project: Project, name: String, event: String): Future[InvokeResponse] = {
    httpClient
      .postJson(s"$functionEndpointInternal/functions/invoke/${project.id}", FunctionInvocation(name, event))
      .map { response =>
        response.bodyAs[FunctionInvocationResult] match {
          case Success(result) =>
            val returnValue = Try { result.value.map(_.toString).getOrElse("").parseJson } match {
              case Success(parsedJson) => parsedJson
              case Failure(_)          => JsObject("error" -> JsString("Function did not return a valid response. Check your function code / logs."))
            }

            val output = JsObject(
              "logs" -> JsArray(
                JsObject("stdout" -> JsString(result.stdout.getOrElse(""))),
                JsObject("stderr" -> JsString(result.stderr.getOrElse(""))),
                JsObject("error"  -> JsString(result.error.getOrElse("")))
              ),
              "response" -> returnValue
            ).compactPrint

            if (result.success) {
              InvokeSuccess(output)
            } else {
              InvokeFailure(new Exception(output))
            }

          case Failure(e) => InvokeFailure(e)
        }
      }
      .recover {
        case e: Throwable => InvokeFailure(e)
      }
  }
} 
Example 36
Source File: Metrics.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.client

import java.util.concurrent.TimeUnit

import akka.actor.Actor
import cool.graph.cuid.Cuid
import cool.graph.shared.externalServices.KinesisPublisher
import org.joda.time.DateTime
import org.joda.time.format.DateTimeFormat
import spray.json.{JsArray, JsBoolean, JsNumber, JsObject, JsString}

import scala.collection.mutable
import scala.concurrent.duration.FiniteDuration
import scala.util.control.NonFatal

object FeatureMetric extends Enumeration {
  type FeatureMetric = Value
  val Subscriptions           = Value("backend/api/subscriptions")
  val Filter                  = Value("backend/feature/filter")
  val NestedMutations         = Value("backend/feature/nested-mutation")
  val ApiSimple               = Value("backend/api/simple")
  val ApiRelay                = Value("backend/api/relay")
  val ApiFiles                = Value("backend/api/files")
  val ServersideSubscriptions = Value("backend/feature/sss")
  val RequestPipeline         = Value("backend/feature/rp") // add this!
  val PermissionQuery         = Value("backend/feature/permission-queries") // add this!
  val Authentication          = Value("backend/feature/authentication")
  val Algolia                 = Value("backend/feature/algolia") // add this!
  val Auth0                   = Value("backend/feature/integration-auth0")
  val Digits                  = Value("backend/feature/integration-digits")
}

case class ApiFeatureMetric(ip: String,
                            date: DateTime,
                            projectId: String,
                            clientId: String,
                            usedFeatures: List[String],
                            // Should be false when we can't determine. This is the case for subscriptions.
                            // Is always false for File api.
                            isFromConsole: Boolean)

class FeatureMetricActor(
    metricsPublisher: KinesisPublisher,
    interval: Int
) extends Actor {
  import context.dispatcher

  val metrics = mutable.Buffer.empty[ApiFeatureMetric]
  val FLUSH   = "FLUSH"
  val tick = context.system.scheduler.schedule(
    initialDelay = FiniteDuration(interval, TimeUnit.SECONDS),
    interval = FiniteDuration(interval, TimeUnit.SECONDS),
    receiver = self,
    message = FLUSH
  )

  override def postStop() = tick.cancel()

  def receive = {
    case metric: ApiFeatureMetric =>
      metrics += metric

    case FLUSH =>
      flushMetrics()
  }

  def flushMetrics() = {
    val byProject = metrics.groupBy(_.projectId) map {
      case (projectId, metrics) =>
        JsObject(
          "requestCount"        -> JsNumber(metrics.length),
          "projectId"           -> JsString(projectId),
          "usedIps"             -> JsArray(metrics.map(_.ip).distinct.take(10).toVector.map(JsString(_))),
          "features"            -> JsArray(metrics.flatMap(_.usedFeatures).distinct.toVector.map(JsString(_))),
          "date"                -> JsString(metrics.head.date.toString(DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z").withZoneUTC())),
          "version"             -> JsString("1"),
          "justConsoleRequests" -> JsBoolean(metrics.forall(_.isFromConsole))
        )
    }

    byProject.foreach { json =>
      try {
        metricsPublisher.putRecord(json.toString, shardId = Cuid.createCuid())
      } catch {
        case NonFatal(e) => println(s"Putting kinesis FeatureMetric failed: ${e.getMessage} ${e.toString}")
      }
    }
    metrics.clear()
  }
} 
Example 37
Source File: LambdaLogsSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.functions.lambda

import cool.graph.shared.functions.lambda.LambdaFunctionEnvironment
import org.scalatest.{FlatSpec, Matchers}
import spray.json.{JsObject, JsString}

class LambdaLogsSpec extends FlatSpec with Matchers {
  "Logs parsing for lambda" should "return the correct aggregation of lines" in {
    val testString =
      """
        |START RequestId:	fb6c1b70-afef-11e7-b988-db72e0053f77	Version: $LATEST
        |2017-10-13T08:24:50.856Z	fb6c1b70-afef-11e7-b988-db72e0053f77	getting event {}
        |2017-10-13T08:24:50.856Z	fb6c1b70-afef-11e7-b988-db72e0053f77	requiring event => {
        |  return {
        |    data: {
        |      message: "msg"
        |    }
        |  }
        |}
        |2017-10-13T08:24:50.857Z	fb6c1b70-afef-11e7-b988-db72e0053f77	{"errorMessage":"Cannot read property 'name' of undefined","errorType":"TypeError","stackTrace":["module.exports.event (/var/task/src/hello2.js:6:47)","executeFunction (/var/task/src/hello2-lambda.js:14:19)","exports.handle (/var/task/src/hello2-lambda.js:9:3)"]}
        |END RequestId: fb6c1b70-afef-11e7-b988-db72e0053f77
        |REPORT RequestId: fb6c1b70-afef-11e7-b988-db72e0053f77	Duration: 1.10 ms	Billed Duration: 100 ms	Memory Size: 128 MB	Max Memory Used: 26 MB
      """.stripMargin

    val testString2 =
      """
        |2017-10-23T10:05:04.839Z	a426c566-b7d9-11e7-a701-7b78cbef51e9	20
        |2017-10-23T10:05:04.839Z	a426c566-b7d9-11e7-a701-7b78cbef51e9	null
        |2017-10-23T10:05:04.839Z	a426c566-b7d9-11e7-a701-7b78cbef51e9	{ big: 'OBJECT' }
      """.stripMargin

    val logs = LambdaFunctionEnvironment.parseLambdaLogs(testString)
    logs should contain(JsObject("2017-10-13T08:24:50.856Z" -> JsString("getting event {}")))
    logs should contain(
      JsObject("2017-10-13T08:24:50.856Z" -> JsString("requiring event => {\n  return {\n    data: {\n      message: \"msg\"\n    }\n  }\n}")))
    logs should contain(JsObject("2017-10-13T08:24:50.857Z" -> JsString(
      """{"errorMessage":"Cannot read property 'name' of undefined","errorType":"TypeError","stackTrace":["module.exports.event (/var/task/src/hello2.js:6:47)","executeFunction (/var/task/src/hello2-lambda.js:14:19)","exports.handle (/var/task/src/hello2-lambda.js:9:3)"]}""")))

    val logs2 = LambdaFunctionEnvironment.parseLambdaLogs(testString2)

    logs.length shouldEqual 3

    logs2.length shouldEqual 3
    logs2 should contain(JsObject("2017-10-23T10:05:04.839Z" -> JsString("20")))
    logs2 should contain(JsObject("2017-10-23T10:05:04.839Z" -> JsString("null")))
    logs2 should contain(JsObject("2017-10-23T10:05:04.839Z" -> JsString("{ big: 'OBJECT' }")))
  }
} 
Example 38
Source File: CreateSeat.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.system.mutactions.internal

import cool.graph.shared.errors.UserInputErrors.CollaboratorProjectWithNameAlreadyExists
import cool.graph._
import cool.graph.client.database.DataResolver
import cool.graph.shared.externalServices.SnsPublisher
import cool.graph.system.database.tables.{ProjectTable, RelayIdTable, SeatTable}
import cool.graph.shared.models._
import scaldi.{Injectable, Injector}
import slick.jdbc.MySQLProfile.api._
import slick.jdbc.MySQLProfile.backend.DatabaseDef
import slick.lifted.TableQuery
import spray.json.{JsObject, JsString}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.util.{Failure, Success, Try}

case class CreateSeat(client: Client, project: Project, seat: Seat, internalDatabase: DatabaseDef, ignoreDuplicateNameVerificationError: Boolean = false)(
    implicit inj: Injector)
    extends SystemSqlMutaction
    with Injectable {

  val seatSnsPublisher: SnsPublisher = inject[SnsPublisher](identified by "seatSnsPublisher")

  if (!seat.clientId.contains(project.ownerId)) {
    seatSnsPublisher.putRecord(
      JsObject(
        "action"      -> JsString("ADD"),
        "projectId"   -> JsString(project.id),
        "projectName" -> JsString(project.name),
        "email"       -> JsString(seat.email),
        "status"      -> JsString(seat.status.toString),
        "byEmail"     -> JsString(client.email),
        "byName"      -> JsString(client.name)
      ).compactPrint)
  }

  override def execute: Future[SystemSqlStatementResult[Any]] = {
    val seats    = TableQuery[SeatTable]
    val relayIds = TableQuery[RelayIdTable]

    Future.successful(
      SystemSqlStatementResult(
        sqlAction = DBIO
          .seq(
            seats += cool.graph.system.database.tables
              .Seat(id = seat.id, status = seat.status, email = seat.email, clientId = seat.clientId, projectId = project.id),
            relayIds +=
              cool.graph.system.database.tables.RelayId(seat.id, "Seat")
          )
      ))
  }

  override def rollback = Some(DeleteSeat(client, project, seat, internalDatabase).execute)

  override def verify(): Future[Try[MutactionVerificationSuccess]] = {

    seat.clientId match {
      case None =>
        // pending collaborators do not have projects yet.
        Future.successful(Success(MutactionVerificationSuccess()))

      case Some(id) =>
        ignoreDuplicateNameVerificationError match {
          case true =>
            Future.successful(Success(MutactionVerificationSuccess()))

          case false =>
            val projects = TableQuery[ProjectTable]
            internalDatabase
              .run(projects.filter(p => p.clientId === id && p.name === project.name).length.result)
              .map {
                case 0 => Success(MutactionVerificationSuccess())
                case _ => Failure(CollaboratorProjectWithNameAlreadyExists(name = project.name))
              }
        }
    }
  }
} 
Example 39
Source File: ActionSchemaResolver.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.system

import com.typesafe.scalalogging.LazyLogging
import cool.graph.DataItem
import cool.graph.Types.Id
import cool.graph.client.schema.simple.SimpleSchemaModelObjectTypeBuilder
import cool.graph.deprecated.actions.schemas._
import cool.graph.shared.{ApiMatrixFactory}
import cool.graph.shared.models.{ActionTriggerMutationModelMutationType, ActionTriggerMutationRelationMutationType, ActionTriggerType, Project}
import sangria.execution.Executor
import sangria.introspection.introspectionQuery
import sangria.marshalling.sprayJson._
import sangria.schema.Schema
import scaldi.{Injectable, Injector}
import spray.json.JsObject

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

case class ActionSchemaPayload(
    triggerType: ActionTriggerType.Value,
    mutationModel: Option[ActionSchemaPayloadMutationModel],
    mutationRelation: Option[ActionSchemaPayloadMutationRelation]
)

case class ActionSchemaPayloadMutationModel(
    modelId: Id,
    mutationType: ActionTriggerMutationModelMutationType.Value
)

case class ActionSchemaPayloadMutationRelation(
    relationId: Id,
    mutationType: ActionTriggerMutationRelationMutationType.Value
)

class ActionSchemaResolver(implicit inj: Injector) extends Injectable with LazyLogging {

  def resolve(project: Project, payload: ActionSchemaPayload): Future[String] = {
    val apiMatrix = inject[ApiMatrixFactory].create(project)

    payload.triggerType match {
      case ActionTriggerType.MutationModel =>
        val model = apiMatrix.filterModel(project.getModelById_!(payload.mutationModel.get.modelId))

        model match {
          case None =>
            Future.successful(JsObject.empty.prettyPrint)
          case Some(model) =>
            val modelObjectTypes = new SimpleSchemaModelObjectTypeBuilder(project)

            val schema: Schema[ActionUserContext, Unit] =
              payload.mutationModel.get.mutationType match {
                case ActionTriggerMutationModelMutationType.Create =>
                  new CreateSchema(model = model, modelObjectTypes = modelObjectTypes, project = project).build()
                case ActionTriggerMutationModelMutationType.Update =>
                  new UpdateSchema(model = model,
                                   modelObjectTypes = modelObjectTypes,
                                   project = project,
                                   updatedFields = List(),
                                   previousValues = DataItem("dummy", Map())).build()
                case ActionTriggerMutationModelMutationType.Delete =>
                  new DeleteSchema(model = model, modelObjectTypes = modelObjectTypes, project = project).build()
              }

            Executor
              .execute(
                schema = schema,
                queryAst = introspectionQuery,
                userContext = ActionUserContext(
                  requestId = "",
                  project = project,
                  nodeId = model.id,
                  mutation = MutationMetaData(id = "", _type = ""),
                  log = (x: String) => logger.info(x)
                )
              )
              .map { response =>
                val JsObject(fields) = response
                fields("data").compactPrint
              }
        }
    }
  }
} 
Example 40
Source File: SearchProviderAlgolia.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.system.schema.types

import com.typesafe.scalalogging.LazyLogging
import cool.graph.client.schema.simple.SimpleSchemaModelObjectTypeBuilder
import cool.graph.shared.algolia.schemas.AlgoliaSchema
import cool.graph.shared.algolia.AlgoliaContext
import cool.graph.shared.models
import cool.graph.system.SystemUserContext
import cool.graph.system.schema.types.AlgoliaSyncQuery.AlgoliaSyncQueryContext
import sangria.execution.Executor
import sangria.introspection.introspectionQuery
import sangria.marshalling.sprayJson._
import sangria.relay.{Connection, ConnectionArgs, Node}
import sangria.schema._
import scaldi.{Injectable, Injector}
import spray.json.JsObject

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

object SearchProviderAlgolia {
  case class SearchProviderAlgoliaContext(project: models.Project, algolia: models.SearchProviderAlgolia) extends Node with models.Integration {
    override val id              = algolia.id
    override val subTableId      = algolia.subTableId
    override val isEnabled       = algolia.isEnabled
    override val name            = algolia.name
    override val integrationType = algolia.integrationType
  }
  lazy val Type: ObjectType[SystemUserContext, SearchProviderAlgoliaContext] =
    ObjectType(
      "SearchProviderAlgolia",
      "This is a SearchProviderAlgolia",
      interfaces[SystemUserContext, SearchProviderAlgoliaContext](nodeInterface, Integration.Type),
      () =>
        idField[SystemUserContext, SearchProviderAlgoliaContext] ::
          fields[SystemUserContext, SearchProviderAlgoliaContext](
          Field("applicationId", StringType, resolve = _.value.algolia.applicationId),
          Field("apiKey", StringType, resolve = _.value.algolia.apiKey),
          Field(
            "algoliaSyncQueries",
            algoliaSyncQueryConnection,
            arguments = Connection.Args.All,
            resolve = ctx =>
              Connection.connectionFromSeq(ctx.value.algolia.algoliaSyncQueries
                                             .sortBy(_.id.toString)
                                             .map(s => AlgoliaSyncQueryContext(ctx.value.project, s)),
                                           ConnectionArgs(ctx))
          ),
          Field(
            "algoliaSchema",
            StringType,
            arguments = List(Argument("modelId", IDType)),
            resolve = ctx => {
              val modelId =
                ctx.args.raw.get("modelId").get.asInstanceOf[String]
              ctx.ctx.getSearchProviderAlgoliaSchema(ctx.value.project, modelId)
            }
          )
      )
    )
}

class SearchProviderAlgoliaSchemaResolver(implicit inj: Injector) extends Injectable with LazyLogging {
  def resolve(project: models.Project, modelId: String): Future[String] = {
    val model = project.getModelById_!(modelId)
    Executor
      .execute(
        schema = new AlgoliaSchema(
          project = project,
          model = model,
          modelObjectTypes = new SimpleSchemaModelObjectTypeBuilder(project)
        ).build(),
        queryAst = introspectionQuery,
        userContext = AlgoliaContext(
          project = project,
          requestId = "",
          nodeId = "",
          log = (x: String) => logger.info(x)
        )
      )
      .map { response =>
        val JsObject(fields) = response
        fields("data").compactPrint
      }
  }
} 
Example 41
Source File: SprayUtilities.scala    From mmlspark   with MIT License 5 votes vote down vote up
// Copyright (C) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in project root for information.

package com.microsoft.ml.nbtest

import spray.json.{JsArray, JsObject, JsValue, JsonFormat}

import scala.language.{existentials, implicitConversions}

abstract class SprayOp

case class IndexOp(item: Int) extends SprayOp

case class FieldOp(value: String) extends SprayOp

class SprayUtility(val json: JsValue) {

  private def parseQuery(q: String): List[SprayOp] = {
    q.split("." (0)).flatMap { t =>
      if (t.contains("]") & t.contains("]")) {
        t.split("][".toCharArray).filter(_.length > 0).toSeq match {
          case Seq(index) => Seq(IndexOp(index.toInt))
          case Seq(field, index) => Seq(FieldOp(field), IndexOp(index.toInt))
        }
      } else if (!t.contains("]") & !t.contains("]")) {
        Seq(FieldOp(t)).asInstanceOf[List[SprayOp]]
      } else {
        throw new IllegalArgumentException(s"Cannot parse query: $q")
      }
    }.toList
  }

  private def selectInternal[T](json: JsValue, ops: List[SprayOp])(implicit format: JsonFormat[T]): T = {
    ops match {
      case Nil => json.convertTo[T]
      case IndexOp(i) :: tail =>
        selectInternal[T](json.asInstanceOf[JsArray].elements(i), tail)
      case FieldOp(f) :: tail =>
        selectInternal[T](json.asInstanceOf[JsObject].fields(f), tail)
      case _ => throw new MatchError("This code should be unreachable")
    }
  }

  def select[T](query: String)(implicit format: JsonFormat[T]): T = {
    selectInternal[T](json, parseQuery(query))
  }
}

object SprayImplicits {
  implicit def sprayUtilityConverter(s: JsValue): SprayUtility = new SprayUtility(s)

  implicit def sprayUtilityConversion(s: SprayUtility): JsValue = s.json
} 
Example 42
Source File: Configuration.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.config

import be.cetic.tsimulus.generators.{Generator, TimeToJson}
import be.cetic.tsimulus.timeseries.TimeSeries
import com.github.nscala_time.time.Imports._
import org.joda.time.LocalDateTime
import spray.json.{JsArray, JsObject, JsValue, _}

case class Configuration(generators: Option[Seq[Generator[Any]]],
                         series: Seq[Series[Any]],
                         from: LocalDateTime,
                         to: LocalDateTime) extends TimeToJson
{
   
   def timeSeries: Map[String, (TimeSeries[Any], Duration)] =
   {
      val memory = firstOrderGenerators

      series.map(s => {
         val duration = s.frequency
         val generator = Model.generator(memory)(s.generator)

         s.name -> (generator.timeseries(memory), duration)
      }).toMap
   }

   def firstOrderGenerators: Map[String, Generator[Any]] =
   {
      generators match {
         case None => Map()
         case Some(gens) => {
            val memory = scala.collection.mutable.Map[String, Generator[Any]]()

            gens.foreach(g => {
               memory.put(g.name.get, g)
            })

            memory.toMap
         }
      }
   }

   def toJson: JsValue = {
      new JsObject(Map(
         "generators" -> generators.map(g => g.map(_.toJson)).toJson,
         "exported" -> series.map(s => s.toJson).toJson,
         "from" -> from.toJson,
         "to" -> to.toJson
      ))
   }
}

object Configuration extends TimeToJson
{
   def apply(value: JsValue): Configuration = {
      val fields = value.asJsObject.fields

      val generators = fields.get("generators").map
      {
         case JsArray(l) => l.map(GeneratorFormat.read)
         case _ => throw new ClassCastException
      }

      val series = fields("exported") match {
         case JsArray(x) => x.map(Series[Any](_)).toSeq
         case _ => throw new ClassCastException
      }

      val from = fields("from").convertTo[LocalDateTime]
      val to = fields("to").convertTo[LocalDateTime]

      Configuration(generators, series, from, to)
   }
} 
Example 43
Source File: Series.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.config

import be.cetic.tsimulus.generators.{Generator, TimeToJson}
import com.github.nscala_time.time.Imports._
import org.joda.time.Duration
import spray.json.{JsObject, JsString, JsValue, _}

case class Series[T](name: String, generator: Either[String, Generator[Any]], frequency: Duration) extends TimeToJson
{
   def toJson: JsValue = {
      val _generator = generator match
      {
         case Left(s) => s.toJson
         case Right(g) => g.toJson
      }

      new JsObject(Map(
         "name" -> name.toJson,
         "generator" -> _generator,
         "frequency" -> frequency.toJson
      ))
   }
}

object Series extends TimeToJson
{
   def apply[T](value: JsValue): Series[T] = {
      val fields = value.asJsObject.fields

      val generator = fields("generator") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }
      val frequency = fields("frequency").convertTo[Duration]

      val name = fields("name").convertTo[String]

      Series(name, generator, frequency)
   }
} 
Example 44
Source File: MonthGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.dt

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.{Generator, TimeToJson}
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.dt.MonthTimeSeries
import org.joda.time.LocalDateTime
import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _}


class MonthGenerator(name: Option[String], val base: Either[String, Generator[LocalDateTime]]) extends Generator[Int](name, "month")
{
   override def timeseries(generators: String => Generator[Any]) = {
      val ts = Model.generator(generators)(base).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]]
      new MonthTimeSeries(ts)
   }

   override def toString = "MonthGenerator()"

   override def equals(o: Any) = o match {
      case that: MonthGenerator => (that.name == this.name) && (that.base == this.base)
      case _ => false
   }

   override def toJson: JsValue = {

      val t = Map(
         "type" -> `type`.toJson,
         "base" -> either2json(base)
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object MonthGenerator extends DefaultJsonProtocol with TimeToJson
{
   def apply(name: Option[String], base: String) = new MonthGenerator(name, Left(base))
   def apply(name: Option[String], base: Generator[LocalDateTime]) = new MonthGenerator(name, Right(base))

   def apply(json: JsValue): MonthGenerator = {

      val fields = json.asJsObject.fields

      val name = fields.get("name") .map(f => f match {
         case JsString(x) => x
      })

      val base = fields("base") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]])
      }

      new MonthGenerator(name, base)
   }
} 
Example 45
Source File: HourGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.dt

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.{Generator, TimeToJson}
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.dt.{DayOfYearTimeSeries, HourTimeSeries}
import org.joda.time.LocalDateTime
import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _}


class HourGenerator(name: Option[String], base: Either[String, Generator[LocalDateTime]]) extends Generator[Int](name, "hour")
{
   override def timeseries(generators: String => Generator[Any]) = {
      val ts = Model.generator(generators)(base).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]]
      new HourTimeSeries(ts)
   }

   override def toString = "HourGenerator()"

   override def equals(o: Any) = o match {
      case that: HourGenerator => that.name == this.name
      case _ => false
   }

   override def toJson: JsValue = {

      val t = Map(
         "type" -> `type`.toJson,
         "base" -> either2json(base)
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object HourGenerator extends DefaultJsonProtocol with TimeToJson
{
   def apply(name: Option[String], base: String) = new HourGenerator(name, Left(base))
   def apply(name: Option[String], base: Generator[LocalDateTime]) = new HourGenerator(name, Right(base))

   def apply(json: JsValue): HourGenerator = {

      val fields = json.asJsObject.fields
      val name = fields.get("name") .map(f => f match {
         case JsString(x) => x
      })

      val base = fields("base") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]])
      }

      new HourGenerator(name, base)
   }
} 
Example 46
Source File: DateTimeDifferenceGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.dt

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.{Generator, TimeToJson}
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.dt.DateTimeDifferenceTimeSeries
import org.joda.time.{Duration, LocalDateTime}
import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _}


class DateTimeDifferenceGenerator(name: Option[String], val a: Either[String, Generator[LocalDateTime]], val b: Either[String, Generator[LocalDateTime]]) extends Generator[Duration](name, "dt::diff")
{
   override def timeseries(generators: String => Generator[Any]) = {
      val aTS = Model.generator(generators)(a).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]]
      val bTS = Model.generator(generators)(b).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]]
      new DateTimeDifferenceTimeSeries(aTS, bTS)
   }

   override def toString = s"DateTimeDifferenceGenerator(${a}, ${b})"

   override def equals(o: Any) = o match {
      case that: DateTimeDifferenceGenerator => that.name == this.name && this.a == that.a && this.b == that.b
      case _ => false
   }

   override def toJson: JsValue = {

      val t = Map(
         "type" -> `type`.toJson,
         "a" -> either2json(a),
         "b" -> either2json(b)
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object DateTimeDifferenceGenerator extends DefaultJsonProtocol with TimeToJson
{
   def apply(name: Option[String], a: String, b: String) = new DateTimeDifferenceGenerator(name, Left(a), Left(b))
   def apply(name: Option[String], a: String, b: Generator[LocalDateTime]) = new DateTimeDifferenceGenerator(name, Left(a), Right(b))
   def apply(name: Option[String], a: Generator[LocalDateTime], b: String) = new DateTimeDifferenceGenerator(name, Right(a), Left(b))
   def apply(name: Option[String], a: Generator[LocalDateTime], b: Generator[LocalDateTime]) = new DateTimeDifferenceGenerator(name, Right(a), Right(b))

   def apply(json: JsValue): DateTimeDifferenceGenerator = {

      val fields = json.asJsObject.fields
      val name = fields.get("name") .map(f => f match {
         case JsString(x) => x
      })

      val a = fields("a") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]])
      }

      val b = fields("b") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]])
      }

      new DateTimeDifferenceGenerator(name, a, b)
   }
} 
Example 47
Source File: SecondTimeGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.dt

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.{Generator, TimeToJson}
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.dt.{MinuteTimeSeries, SecondTimeSeries}
import org.joda.time.LocalDateTime
import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _}


class SecondTimeGenerator(name: Option[String], val base: Either[String, Generator[LocalDateTime]]) extends Generator[Int](name, "second")
{
   override def timeseries(generators: String => Generator[Any]) = {
      val ts = Model.generator(generators)(base).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]]
      new SecondTimeSeries(ts)
   }

   override def toString = "SecondTimeGenerator()"

   override def equals(o: Any) = o match {
      case that: SecondTimeGenerator => (that.name == this.name) && (that.base == this.base)
      case _ => false
   }

   override def toJson: JsValue = {

      val t = Map(
         "type" -> `type`.toJson,
         "base" -> either2json(base)
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object SecondTimeGenerator extends DefaultJsonProtocol with TimeToJson
{
   def apply(name: Option[String], base: String) = new SecondTimeGenerator(name, Left(base))
   def apply(name: Option[String], base: Generator[LocalDateTime]) = new SecondTimeGenerator(name, Right(base))

   def apply(json: JsValue): SecondTimeGenerator = {

      val fields = json.asJsObject.fields

      val name = fields.get("name") .map(f => f match {
         case JsString(x) => x
      })

      val base = fields("base") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]])
      }

      new SecondTimeGenerator(name, base)
   }
} 
Example 48
Source File: DayOfWeekGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.dt

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.{Generator, TimeToJson}
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.dt.DayOfWeekTimeSeries
import org.joda.time.LocalDateTime
import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _}


class DayOfWeekGenerator(name: Option[String], base: Either[String, Generator[LocalDateTime]]) extends Generator[Int](name, "dow")
{
   override def timeseries(generators: String => Generator[Any]) = {
      val ts = Model.generator(generators)(base).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]]
      new DayOfWeekTimeSeries(ts)
   }

   override def toString = "DayOfWeekGenerator()"

   override def equals(o: Any) = o match {
      case that: DayOfWeekGenerator => that.name == this.name
      case _ => false
   }

   override def toJson: JsValue = {

      val t = Map(
         "type" -> `type`.toJson,
         "base" -> either2json(base)
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object DayOfWeekGenerator extends DefaultJsonProtocol with TimeToJson
{
   def apply(name: Option[String], base: String) = new DayOfWeekGenerator(name, Left(base))
   def apply(name: Option[String], base: Generator[LocalDateTime]) = new DayOfWeekGenerator(name, Right(base))

   def apply(json: JsValue): DayOfWeekGenerator = {

      val fields = json.asJsObject.fields

      val name = fields.get("name") .map(f => f match {
         case JsString(x) => x
      })

      val base = fields("base") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]])
      }

      new DayOfWeekGenerator(name, base)
   }
} 
Example 49
Source File: DayOfMonthGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.dt

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.{Generator, TimeToJson}
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.composite.TimeShiftTimeSeries
import be.cetic.tsimulus.timeseries.dt.{DayOfMonthTimeSeries, MonthTimeSeries}
import org.joda.time.LocalDateTime
import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _}


class DayOfMonthGenerator(name: Option[String], val base: Either[String, Generator[LocalDateTime]]) extends Generator[Int](name, "dom")
{
   override def timeseries(generators: String => Generator[Any]) = {
      val ts = Model.generator(generators)(base).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]]
      new DayOfMonthTimeSeries(ts)
   }

   override def toString = s"DayOfMonthGenerator(${base})"

   override def equals(o: Any) = o match {
      case that: DayOfMonthGenerator => that.name == this.name && this.base == that.base
      case _ => false
   }

   override def toJson: JsValue = {

      val t = Map(
         "type" -> `type`.toJson,
         "base" -> either2json(base)
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object DayOfMonthGenerator extends DefaultJsonProtocol with TimeToJson
{
   def apply(name: Option[String], base: String) = new DayOfMonthGenerator(name, Left(base))
   def apply(name: Option[String], base: Generator[LocalDateTime]) = new DayOfMonthGenerator(name, Right(base))

   def apply(json: JsValue): DayOfMonthGenerator = {

      val fields = json.asJsObject.fields
      val name = fields.get("name") .map(f => f match {
         case JsString(x) => x
      })

      val base = fields("base") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]])
      }

      new DayOfMonthGenerator(name, base)
   }
} 
Example 50
Source File: WeekGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.dt

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.{Generator, TimeToJson}
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.dt.{MinuteTimeSeries, WeekTimeSeries}
import org.joda.time.LocalDateTime
import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _}


class WeekGenerator(name: Option[String], val base: Either[String, Generator[LocalDateTime]]) extends Generator[Int](name, "week")
{
   override def timeseries(generators: String => Generator[Any]) = {
      val ts = Model.generator(generators)(base).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]]
      new WeekTimeSeries(ts)
   }

   override def toString = "WeekGenerator()"

   override def equals(o: Any) = o match {
      case that: WeekGenerator => (that.name == this.name) && (that.base == this.base)
      case _ => false
   }

   override def toJson: JsValue = {

      val t = Map(
         "type" -> `type`.toJson,
         "base" -> either2json(base)
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object WeekGenerator extends DefaultJsonProtocol with TimeToJson
{
   def apply(name: Option[String], base: String) = new WeekGenerator(name, Left(base))
   def apply(name: Option[String], base: Generator[LocalDateTime]) = new WeekGenerator(name, Right(base))

   def apply(json: JsValue): WeekGenerator = {

      val fields = json.asJsObject.fields

      val name = fields.get("name") .map(f => f match {
         case JsString(x) => x
      })

      val base = fields("base") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]])
      }

      new WeekGenerator(name, base)
   }
} 
Example 51
Source File: YearGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.dt

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.{Generator, TimeToJson}
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.dt.YearTimeSeries
import org.joda.time.{Duration, LocalDateTime}
import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue}
import spray.json._


class YearGenerator(name: Option[String], val base: Either[String, Generator[LocalDateTime]]) extends Generator[Int](name, "year")
{
   override def timeseries(generators: String => Generator[Any]) = {
      val ts = Model.generator(generators)(base).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]]
      new YearTimeSeries(ts)
   }

   override def toString = "YearGenerator()"

   override def equals(o: Any) = o match {
      case that: YearGenerator => (that.name == this.name) && (that.base == this.base)
      case _ => false
   }

   override def toJson: JsValue = {

      val t = Map(
         "type" -> `type`.toJson,
         "base" -> either2json(base)
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object YearGenerator extends DefaultJsonProtocol with TimeToJson
{
   def apply(name: Option[String], base: String) = new YearGenerator(name, Left(base))
   def apply(name: Option[String], base: Generator[LocalDateTime]) = new YearGenerator(name, Right(base))

   def apply(json: JsValue): YearGenerator = {

      val fields = json.asJsObject.fields

      val name = fields.get("name") .map(f => f match {
         case JsString(x) => x
      })

      val base = fields("base") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]])
      }

      new YearGenerator(name, base)
   }
} 
Example 52
Source File: DayOfYearGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.dt

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.{Generator, TimeToJson}
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.dt.{DayOfWeekTimeSeries, DayOfYearTimeSeries}
import org.joda.time.LocalDateTime
import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _}


class DayOfYearGenerator(name: Option[String], base: Either[String, Generator[LocalDateTime]]) extends Generator[Int](name, "doy")
{
   override def timeseries(generators: String => Generator[Any]) = {
      val ts = Model.generator(generators)(base).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]]
      new DayOfYearTimeSeries(ts)
   }

   override def toString = "DayOfYearGenerator()"

   override def equals(o: Any) = o match {
      case that: DayOfYearGenerator => that.name == this.name
      case _ => false
   }

   override def toJson: JsValue = {

      val t = Map(
         "type" -> `type`.toJson,
         "base" -> either2json(base)
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object DayOfYearGenerator extends DefaultJsonProtocol with TimeToJson
{
   def apply(name: Option[String], base: String) = new DayOfYearGenerator(name, Left(base))
   def apply(name: Option[String], base: Generator[LocalDateTime]) = new DayOfYearGenerator(name, Right(base))

   def apply(json: JsValue): DayOfYearGenerator = {

      val fields = json.asJsObject.fields
      val name = fields.get("name") .map(f => f match {
         case JsString(x) => x
      })

      val base = fields("base") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]])
      }

      new DayOfYearGenerator(name, base)
   }
} 
Example 53
Source File: MillisecondTimeGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.dt

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.{Generator, TimeToJson}
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.dt.MillisecondTimeSeries
import org.joda.time.LocalDateTime
import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _}


class MillisecondTimeGenerator(name: Option[String], val base: Either[String, Generator[LocalDateTime]]) extends Generator[Int](name, "ms")
{
   override def timeseries(generators: String => Generator[Any]) = {
      val ts = Model.generator(generators)(base).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]]
      new MillisecondTimeSeries(ts)
   }

   override def toString = "MillisecondTimeGenerator()"

   override def equals(o: Any) = o match {
      case that: MillisecondTimeGenerator => (that.name == this.name) && (that.base == this.base)
      case _ => false
   }

   override def toJson: JsValue = {

      val t = Map(
         "type" -> `type`.toJson,
         "base" -> either2json(base)
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object MillisecondTimeGenerator extends DefaultJsonProtocol with TimeToJson
{
   def apply(name: Option[String], base: String) = new MillisecondTimeGenerator(name, Left(base))
   def apply(name: Option[String], base: Generator[LocalDateTime]) = new MillisecondTimeGenerator(name, Right(base))

   def apply(json: JsValue): MillisecondTimeGenerator = {

      val fields = json.asJsObject.fields
      val name = fields.get("name") .map(f => f match {
         case JsString(x) => x
      })

      val base = fields("base") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]])
      }

      new MillisecondTimeGenerator(name, base)
   }
} 
Example 54
Source File: MinuteGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.dt

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.{Generator, TimeToJson}
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.dt.{MillisecondTimeSeries, MinuteTimeSeries}
import org.joda.time.LocalDateTime
import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _}


class MinuteGenerator(name: Option[String], val base: Either[String, Generator[LocalDateTime]]) extends Generator[Int](name, "minute")
{
   override def timeseries(generators: String => Generator[Any]) = {
      val ts = Model.generator(generators)(base).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]]
      new MinuteTimeSeries(ts)
   }

   override def toString = "MinuteGenerator()"

   override def equals(o: Any) = o match {
      case that: MinuteGenerator => (that.name == this.name) && (that.base == this.base)
      case _ => false
   }

   override def toJson: JsValue = {

      val t = Map(
         "type" -> `type`.toJson,
         "base" -> either2json(base)
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object MinuteGenerator extends DefaultJsonProtocol with TimeToJson
{
   def apply(name: Option[String], base: String) = new MinuteGenerator(name, Left(base))
   def apply(name: Option[String], base: Generator[LocalDateTime]) = new MinuteGenerator(name, Right(base))

   def apply(json: JsValue): MinuteGenerator = {

      val fields = json.asJsObject.fields
      val name = fields.get("name") .map(f => f match {
         case JsString(x) => x
      })

      val base = fields("base") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]])
      }

      new MinuteGenerator(name, base)
   }
} 
Example 55
Source File: GaussianNoiseGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.primary

import be.cetic.tsimulus.config.ARMAModel
import be.cetic.tsimulus.generators.{Generator, TimeToJson}
import be.cetic.tsimulus.timeseries.primary.GaussianNoiseTimeSeries
import com.github.nscala_time.time.Imports._
import org.joda.time.Duration
import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue}
import spray.json._


import scala.util.Random


class GaussianNoiseGenerator(name: Option[String],
                    val seed: Int,
                    val std: Double) extends Generator[Double](name, "gaussian")
{
   override def timeseries(generators: String => Generator[Any]) = GaussianNoiseTimeSeries(seed, std)

   override def toString = "GaussianNoise(" + seed + ", " + std + ")"

   override def equals(o: Any) = o match {
      case that: GaussianNoiseGenerator => that.name == this.name &&
         that.seed == this.seed &&
         Math.abs(that.std - this.std) < 0.0001
      case _ => false
   }
   override def toJson: JsValue = {

      val t = Map(
         "type" -> `type`.toJson,
         "seed" -> seed.toJson,
         "std" -> std.toJson
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object GaussianNoiseGenerator extends DefaultJsonProtocol with TimeToJson
{
   def apply(json: JsValue): GaussianNoiseGenerator = {

      val fields = json.asJsObject.fields
      val name = fields.get("name") .map(f => f match {
         case JsString(x) => x
      })

      val seed = fields("seed").convertTo[Int]
      val std = fields("std").convertTo[Double]

      new GaussianNoiseGenerator(name, seed, std)
   }
} 
Example 56
Source File: WeeklyGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.primary


import java.security.InvalidParameterException

import be.cetic.tsimulus.generators.Generator
import be.cetic.tsimulus.timeseries.primary.WeeklyTimeSeries
import org.joda.time.DateTimeConstants
import spray.json.{JsNumber, JsObject, JsString, JsValue, _}


class WeeklyGenerator(name: Option[String],
                      val points: Map[String, Double]) extends Generator[Double](name, "weekly")
{
   override def timeseries(generators: String => Generator[Any]) =
   {
      def day = (s: String) => s match {
         case "monday" => DateTimeConstants.MONDAY
         case "tuesday" => DateTimeConstants.TUESDAY
         case "wednesday" => DateTimeConstants.WEDNESDAY
         case "thursday" => DateTimeConstants.THURSDAY
         case "friday" => DateTimeConstants.FRIDAY
         case "saturday" => DateTimeConstants.SATURDAY
         case "sunday" => DateTimeConstants.SUNDAY
         case _ => throw new InvalidParameterException(s"'${s}' is not a valid day name.")
      }

      WeeklyTimeSeries(points map {case (k,v) => (day(k), v)})
   }

   override def toString = "WeeklyGenerator(" + name + "," + points + ")"

   override def equals(o: Any) = o match {
      case that: WeeklyGenerator => that.name == this.name && that.points == this.points
      case _ => false
   }

   override def toJson: JsValue = {
      val t = Map(
         "type" -> `type`.toJson,
         "points" -> points.toJson
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object WeeklyGenerator
{
   def apply(value: JsValue): WeeklyGenerator = {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map
      {
         case JsString(x) => x
      }

      val points = value.asJsObject.fields("points") match {
         case JsObject(x) => x
         case _ => throw new ClassCastException
      }

      val r = points map { case (k,v) => (k, v match { case JsNumber(x) => x.toDouble })}

      val validDayNames = List("monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday")
      val unmatchingDayNames = r.keySet.filterNot(validDayNames contains _)
      if(!unmatchingDayNames.isEmpty) throw new InvalidParameterException("The following day names are not valid: " + unmatchingDayNames)

      new WeeklyGenerator(name, r)
   }
} 
Example 57
Source File: LesserThanGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.binary

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.Generator
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.binary.LesserThanTimeSeries
import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, RootJsonFormat, _}



class LesserThanGenerator( name: Option[String],
                            val a: Either[String, Generator[Any]],
                            val b: Either[String, Generator[Any]],
                            val strict: Option[Boolean]) extends Generator[Any](name, "lesser-than")
{
   override def timeseries(generators: (String) => Generator[Any]) =
   {
      val first = Model.generator(generators)(a).timeseries(generators) match {
         case t: TimeSeries[Double] => t
      }

      val second = Model.generator(generators)(b).timeseries(generators) match {
         case t: TimeSeries[Double] => t
      }

      new LesserThanTimeSeries(first, second, strict match {
         case None => true
         case Some(x) => x
      })
   }

   override def toString = "LesserThan(" + name + ", " + a + ", " + b + ", " + strict + ")"

   override def equals(o: Any) = o match {
      case that: LesserThanGenerator => that.name == this.name &&
         that.a == this.a &&
         that.b == this.b &&
         that.strict == this.strict
      case _ => false
   }

   override def toJson: JsValue = {
      val _a = a match
      {
         case Left(s) => s.toJson
         case Right(g) => g.toJson
      }

      val _b = b match
      {
         case Left(s) => s.toJson
         case Right(g) => g.toJson
      }

      var t = Map(
         "a" -> _a,
         "b" -> _b,
         "type" -> `type`.toJson
      )

      if(name.isDefined) t = t.updated("name", name.toJson)
      if(strict.isDefined) t = t.updated("strict", strict.toJson)

      new JsObject(t)
   }
}

object LesserThanGenerator extends DefaultJsonProtocol
{
   def apply(value: JsValue): LesserThanGenerator =
   {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map(_.convertTo[String])
      val strict = fields.get("strict").map(_.convertTo[Boolean])

      val a = fields("a") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      val b = fields("b") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      new LesserThanGenerator(name, a, b, strict)
   }
} 
Example 58
Source File: ImpliesGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.binary

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.Generator
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.binary.{AndTimeSeries, ImpliesTimeSeries}
import spray.json.{JsObject, JsString, JsValue, _}


class ImpliesGenerator(name: Option[String],
                       val a: Either[String, Generator[Any]],
                       val b: Either[String, Generator[Any]]) extends Generator[Any](name, "then")
{
   override def timeseries(generators: (String) => Generator[Any]) =
   {
      val first = Model.generator(generators)(a).timeseries(generators) match {
         case t: TimeSeries[Boolean] => t
      }

      val second = Model.generator(generators)(b).timeseries(generators) match {
         case t: TimeSeries[Boolean] => t
      }

      new ImpliesTimeSeries(first, second)
   }

   override def toString = "Implies(" + name + ", " + a + ", " + b + ")"

   override def equals(o: Any) = o match {
      case that: ImpliesGenerator => that.name == this.name &&
         that.a == this.a &&
         that.b == this.b
      case _ => false
   }

   override def toJson: JsValue = {
      val _a = a match
      {
         case Left(s) => s.toJson
         case Right(g) => g.toJson
      }

      val _b = b match
      {
         case Left(s) => s.toJson
         case Right(g) => g.toJson
      }

      var t = Map(
         "a" -> _a,
         "b" -> _b,
         "type" -> `type`.toJson
      )

      if(name.isDefined) t = t.updated("name", name.toJson)

      new JsObject(t)
   }
}

object ImpliesGenerator extends DefaultJsonProtocol
{
   def apply(value: JsValue): ImpliesGenerator = {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map(_.convertTo[String])

      val a = fields("a") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      val b = fields("b") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      new ImpliesGenerator(name, a, b)
   }
} 
Example 59
Source File: XorGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.binary

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.Generator
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.binary.XorTimeSeries
import spray.json.{JsObject, JsString, JsValue, _}


class XorGenerator(name: Option[String],
                   val a: Either[String, Generator[Any]],
                   val b: Either[String, Generator[Any]]) extends Generator[Any](name, "xor")
{
   override def timeseries(generators: (String) => Generator[Any]) =
   {
      val first = Model.generator(generators)(a).timeseries(generators) match {
         case t: TimeSeries[Boolean] => t
      }

      val second = Model.generator(generators)(b).timeseries(generators) match {
         case t: TimeSeries[Boolean] => t
      }

      new XorTimeSeries(first, second)
   }

   override def toString = "Xor(" + name + ", " + a + ", " + b + ")"

   override def equals(o: Any) = o match {
      case that: XorGenerator => that.name == this.name &&
         that.a == this.a &&
         that.b == this.b
      case _ => false
   }

   override def toJson: JsValue = {
      val _a = a match
      {
         case Left(s) => s.toJson
         case Right(g) => g.toJson
      }

      val _b = b match
      {
         case Left(s) => s.toJson
         case Right(g) => g.toJson
      }

      var t = Map(
         "a" -> _a,
         "b" -> _b,
         "type" -> `type`.toJson
      )

      if(name.isDefined) t = t.updated("name", name.toJson)

      new JsObject(t)
   }
}

object XorGenerator extends DefaultJsonProtocol
{
   def apply(value: JsValue): XorGenerator = {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map(_.convertTo[String])

      val a = fields("a") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      val b = fields("b") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      new XorGenerator(name, a, b)
   }
} 
Example 60
Source File: FalseGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.binary

import be.cetic.tsimulus.generators.Generator
import be.cetic.tsimulus.timeseries.binary.FalseTimeSeries
import spray.json.{JsObject, JsString, JsValue, _}


class FalseGenerator(name: Option[String]) extends Generator[Boolean](name, "false")
{

   override def timeseries(generators: (String) => Generator[Any]) = new FalseTimeSeries()

   override def toString = "False(" + name + ")"

   override def equals(o: Any) = o match {
      case that: FalseGenerator => that.name == this.name
      case _ => false
   }

   override def toJson: JsValue = {
      val t = Map(
         "type" -> `type`.toJson
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object FalseGenerator
{
   def apply(value: JsValue): FalseGenerator = {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map
      {
         case JsString(x) => x
      }

      new FalseGenerator(name)
   }
} 
Example 61
Source File: AndGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.binary

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.Generator
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.binary.AndTimeSeries
import spray.json.{JsObject, JsString, JsValue, _}


class AndGenerator(name: Option[String],
                   val a: Either[String, Generator[Any]],
                   val b: Either[String, Generator[Any]]) extends Generator[Any](name, "and")
{
   override def timeseries(generators: (String) => Generator[Any]) =
   {
      val first = Model.generator(generators)(a).timeseries(generators) match {
         case t: TimeSeries[Boolean] => t
      }

      val second = Model.generator(generators)(b).timeseries(generators) match {
         case t: TimeSeries[Boolean] => t
      }

      new AndTimeSeries(first, second)
   }

   override def toString = "And(" + name + ", " + a + ", " + b + ")"

   override def equals(o: Any) = o match {
      case that: AndGenerator => that.name == this.name &&
         that.a == this.a &&
         that.b == this.b
      case _ => false
   }

   override def toJson: JsValue = {
      val _a = a match
      {
         case Left(s) => s.toJson
         case Right(g) => g.toJson
      }

      val _b = b match
      {
         case Left(s) => s.toJson
         case Right(g) => g.toJson
      }

      var t = Map(
         "a" -> _a,
         "b" -> _b,
         "type" -> `type`.toJson
      )

      if(name.isDefined) t = t.updated("name", name.toJson)

      new JsObject(t)
   }
}

object AndGenerator extends DefaultJsonProtocol
{
   def apply(value: JsValue): AndGenerator = {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map(_.convertTo[String])

      val a = fields("a") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      val b = fields("b") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      new AndGenerator(name, a, b)
   }
} 
Example 62
Source File: LogisticGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.binary

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.{Generator, TimeToJson}
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.binary.LogisticTimeSeries
import spray.json.{JsObject, JsString, JsValue, _}

import scala.util.Random


class LogisticGenerator(name: Option[String],
                        val generator: Either[String, Generator[Any]],
                        val location: Double,
                        val scale: Double,
                        val seed: Option[Int]) extends Generator[Boolean](name, "logistic")
{

   override def timeseries(generators: (String) => Generator[Any]) =
   {
      Model.generator(generators)(generator).timeseries(generators) match {
         case dTS: TimeSeries[Double] => LogisticTimeSeries(dTS, location, scale, seed.getOrElse(Random.nextInt()))
         case other => throw new ClassCastException(other.toString)
      }
   }

   override def toString = "Logistic(" + name + ", " + generator + ", " + location + ", " + scale + ", " + seed + ")"

   override def equals(o: Any) = o match {
      case that: LogisticGenerator => that.name == this.name &&
         that.generator == this.generator &&
         that.location == this.location &&
         that.scale == this.scale &&
         that.seed == this.seed
      case _ => false
   }

   override def toJson: JsValue = {
      val _generator = (generator match {
         case Left(s) => s.toJson
         case Right(g) => g.toJson
      }).toJson

      val t = Map(
         "type" -> `type`.toJson,
         "generator" -> _generator,
         "location" -> location.toJson,
         "scale" -> scale.toJson,
         "seed" -> seed.toJson
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object LogisticGenerator extends TimeToJson
{
   def apply(value: JsValue): LogisticGenerator = {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map
      {
         case JsString(x) => x
      }

      val generator = fields("generator") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }
      val location = fields("location").convertTo[Double]
      val scale = fields("scale").convertTo[Double]
      val seed = fields.get("seed").map(_.convertTo[Int])

      new LogisticGenerator(name, generator, location, scale, seed)
   }
} 
Example 63
Source File: NotGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.binary

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.Generator
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.binary.NotTimeSeries
import spray.json.{JsObject, JsString, JsValue, _}


class NotGenerator(name: Option[String],
                   val generator: Either[String, Generator[Any]]) extends Generator[Any](name, "or")
{
   override def timeseries(generators: (String) => Generator[Any]) =
   {
      val base = Model.generator(generators)(generator).timeseries(generators) match {
         case t: TimeSeries[Boolean] => t
      }

      NotTimeSeries(base)
   }

   override def toString = "Not(" + name + ", " + generator + ")"

   override def equals(o: Any) = o match {
      case that: NotGenerator => that.name == this.name &&
         that.generator == this.generator
      case _ => false
   }

   override def toJson: JsValue = {
      val _generator = generator match
      {
         case Left(s) => s.toJson
         case Right(g) => g.toJson
      }

      var t = Map(
         "generator" -> _generator,
         "type" -> `type`.toJson
      )

      if(name.isDefined) t = t.updated("name", name.toJson)

      new JsObject(t)
   }
}

object NotGenerator extends DefaultJsonProtocol
{
   def apply(value: JsValue): NotGenerator = {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map(_.convertTo[String])

      val generator = fields("generator") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      new NotGenerator(name, generator)
   }
} 
Example 64
Source File: EquivGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.binary

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.Generator
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.binary.{AndTimeSeries, EquivTimeSeries}
import spray.json.{JsObject, JsString, JsValue, _}


class EquivGenerator(name: Option[String],
                   val a: Either[String, Generator[Any]],
                   val b: Either[String, Generator[Any]]) extends Generator[Any](name, "Equiv")
{
   override def timeseries(generators: (String) => Generator[Any]) =
   {
      val first = Model.generator(generators)(a).timeseries(generators) match {
         case t: TimeSeries[Boolean] => t
      }

      val second = Model.generator(generators)(b).timeseries(generators) match {
         case t: TimeSeries[Boolean] => t
      }

      new EquivTimeSeries(first, second)
   }

   override def toString = "Equiv(" + name + ", " + a + ", " + b + ")"

   override def equals(o: Any) = o match {
      case that: EquivGenerator => that.name == this.name &&
         that.a == this.a &&
         that.b == this.b
      case _ => false
   }

   override def toJson: JsValue = {
      val _a = a match
      {
         case Left(s) => s.toJson
         case Right(g) => g.toJson
      }

      val _b = b match
      {
         case Left(s) => s.toJson
         case Right(g) => g.toJson
      }

      var t = Map(
         "a" -> _a,
         "b" -> _b,
         "type" -> `type`.toJson
      )

      if(name.isDefined) t = t.updated("name", name.toJson)

      new JsObject(t)
   }
}

object EquivGenerator extends DefaultJsonProtocol
{
   def apply(value: JsValue): EquivGenerator = {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map(_.convertTo[String])

      val a = fields("a") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      val b = fields("b") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      new EquivGenerator(name, a, b)
   }
} 
Example 65
Source File: GreaterThanGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.binary

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.Generator
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.binary.GreaterThanTimeSeries
import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _}



class GreaterThanGenerator( name: Option[String],
                            val a: Either[String, Generator[Any]],
                            val b: Either[String, Generator[Any]],
                            val strict: Option[Boolean]) extends Generator[Any](name, "greater-than")
{
   override def timeseries(generators: (String) => Generator[Any]) =
   {
      val first = Model.generator(generators)(a).timeseries(generators) match {
         case t: TimeSeries[Double] => t
      }

      val second = Model.generator(generators)(b).timeseries(generators) match {
         case t: TimeSeries[Double] => t
      }

      new GreaterThanTimeSeries(first, second, strict match {
         case None => true
         case Some(x) => x
      })
   }

   override def toString = "GreaterThan(" + name + ", " + a + ", " + b + ", " + strict + ")"

   override def equals(o: Any) = o match {
      case that: GreaterThanGenerator => that.name == this.name &&
         that.a == this.a &&
         that.b == this.b &&
         that.strict == this.strict
      case _ => false
   }

   override def toJson: JsValue = {
      val _a = a match
      {
         case Left(s) => s.toJson
         case Right(g) => g.toJson
      }

      val _b = b match
      {
         case Left(s) => s.toJson
         case Right(g) => g.toJson
      }

      var t = Map(
         "a" -> _a,
         "b" -> _b,
         "type" -> `type`.toJson
      )

      if(name.isDefined) t = t.updated("name", name.toJson)
      if(strict.isDefined) t = t.updated("strict", strict.toJson)

      new JsObject(t)
   }
}

object GreaterThanGenerator extends DefaultJsonProtocol
{
   def apply(value: JsValue): GreaterThanGenerator =
   {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map(_.convertTo[String])
      val strict = fields.get("strict").map(_.convertTo[Boolean])

      val a = fields("a") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      val b = fields("b") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      new GreaterThanGenerator(name, a, b, strict)
   }
} 
Example 66
Source File: OrGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.binary

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.Generator
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.binary.OrTimeSeries
import spray.json.{JsObject, JsString, JsValue, _}


class OrGenerator(name: Option[String],
                  val a: Either[String, Generator[Any]],
                  val b: Either[String, Generator[Any]]) extends Generator[Any](name, "or")
{
   override def timeseries(generators: (String) => Generator[Any]) =
   {
      val first = Model.generator(generators)(a).timeseries(generators) match {
         case t: TimeSeries[Boolean] => t
      }

      val second = Model.generator(generators)(b).timeseries(generators) match {
         case t: TimeSeries[Boolean] => t
      }

      new OrTimeSeries(first, second)
   }

   override def toString = "Or(" + name + ", " + a + ", " + b + ")"

   override def equals(o: Any) = o match {
      case that: OrGenerator => that.name == this.name &&
         that.a == this.a &&
         that.b == this.b
      case _ => false
   }

   override def toJson: JsValue = {
      val _a = a match
      {
         case Left(s) => s.toJson
         case Right(g) => g.toJson
      }

      val _b = b match
      {
         case Left(s) => s.toJson
         case Right(g) => g.toJson
      }

      var t = Map(
         "a" -> _a,
         "b" -> _b,
         "type" -> `type`.toJson
      )

      if(name.isDefined) t = t.updated("name", name.toJson)

      new JsObject(t)
   }
}

object OrGenerator extends DefaultJsonProtocol
{
   def apply(value: JsValue): OrGenerator = {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map(_.convertTo[String])

      val a = fields("a") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      val b = fields("b") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      new OrGenerator(name, a, b)
   }
} 
Example 67
Source File: TrueGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.binary

import be.cetic.tsimulus.generators.Generator
import be.cetic.tsimulus.timeseries.binary.TrueTimeSeries
import spray.json.{JsObject, JsString, JsValue, _}


class TrueGenerator(name: Option[String]) extends Generator[Boolean](name, "true")
{

   override def timeseries(generators: (String) => Generator[Any]) =
   {
      new TrueTimeSeries()
   }

   override def toString = "True(" + name + ")"

   override def equals(o: Any) = o match {
      case that: TrueGenerator => that.name == this.name
      case _ => false
   }

   override def toJson: JsValue = {
      val t = Map(
         "type" -> `type`.toJson
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object TrueGenerator
{
   def apply(value: JsValue): TrueGenerator = {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map
      {
         case JsString(x) => x
      }

      new TrueGenerator(name)
   }
} 
Example 68
Source File: TimeShiftGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.composite

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.{Generator, TimeToJson}
import be.cetic.tsimulus.timeseries.composite.TimeShiftTimeSeries
import com.github.nscala_time.time.Imports._
import org.joda.time.Duration
import spray.json.{JsObject, JsString, JsValue, _}


class TimeShiftGenerator(name: Option[String],
                         val generator: Either[String, Generator[Any]],
                         val shift: Duration) extends Generator[Any](name, "time-shift")
                                              with TimeToJson
{
   override def timeseries(generators: (String) => Generator[Any]) =
   {
      val ts = Model.generator(generators)(generator).timeseries(generators)
      TimeShiftTimeSeries(ts, shift)
   }

   override def toString = "TimeShift(" + name + ", " + generator + ", " + shift.getMillis + ")"

   override def equals(o: Any) = o match {
      case that: TimeShiftGenerator => that.name == this.name && that.shift == this.shift
      case _ => false
   }

   override def toJson: JsValue =
   {
      var t = Map(
         "generator" -> either2json(generator),
         "shift" -> DurationFormat.write(shift),
         "type" -> `type`.toJson
      )

      if(name.isDefined) t = t.updated("name", name.toJson)

      new JsObject(t)
   }

}

object TimeShiftGenerator extends DefaultJsonProtocol with TimeToJson
{
   def apply(value: JsValue): TimeShiftGenerator = {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map(_.convertTo[String])

      val generator = fields("generator") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      val shift = fields("shift").convertTo[Duration]

      new TimeShiftGenerator(name, generator, shift)
   }
} 
Example 69
Source File: CorrelatedGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.composite

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.Generator
import be.cetic.tsimulus.timeseries.composite.CorrelatedTimeSeries
import spray.json.{JsObject, JsString, JsValue, _}

import scala.util.Random


class CorrelatedGenerator(name: Option[String],
                          val generator: Either[String, Generator[Any]],
                          val coef: Double) extends Generator[Double](name, "correlated")
{
   override def timeseries(generators: (String) => Generator[Any]) =
   {
      Model.generator(generators)(generator) match {
         case dDouble : Generator[Double] => CorrelatedTimeSeries(dDouble.timeseries(generators), Random.nextInt(), coef)
         case _ => throw new ClassCastException
      }
   }

   override def toString = "Correlated(" + name + ", " + generator + ", " + coef + ")"

   override def equals(o: Any) = o match {
      case that: CorrelatedGenerator => that.name == this.name && that.generator == this.generator && that.coef == this.coef
      case _ => false
   }

   override def toJson: JsValue =
   {
      val t = Map(
         "type" -> `type`.toJson,
         "generator" -> either2json(generator),
         "coef" -> coef.toJson
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object CorrelatedGenerator extends DefaultJsonProtocol
{
   def apply(value: JsValue): CorrelatedGenerator = {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map
      {
         case JsString(x) => x
      }

      val `type` = fields("type").convertTo[String]
      val generator = fields("generator") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }
      val coef = fields("coef").convertTo[Double]

      new CorrelatedGenerator(name, generator, coef)
   }
} 
Example 70
Source File: FunctionGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.composite

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.Generator
import be.cetic.tsimulus.timeseries.composite.FunctionTimeSeries
import spray.json.{DefaultJsonProtocol, JsNumber, JsObject, JsString, JsValue, _}


class FunctionGenerator(name: Option[String],
                        val generator: Either[String, Generator[Any]],
                        val slope: Double,
                        val intercept: Double) extends Generator[Double](name, "function")
{
   override def timeseries(generators: String => Generator[Any]) =
   {
      Model.generator(generators)(generator) match {
         // Could also be expressed as a Sum(Times(generator, Constant(slope), intercept)
         case g: Generator[Double] => FunctionTimeSeries[Double](g.timeseries(generators), (t,v) => Some(slope * v + intercept))
         case _ => throw new ClassCastException
      }
   }

   override def toString = "Function(" + name + ", " + generator + ", " + slope + ", " + intercept + ")"

   override def equals(o: Any) = o match {
      case that: FunctionGenerator => (that.name == this.name &&
         that.generator == this.generator &&
         that.slope == this.slope &&
         that.intercept == this.intercept)
      case _ => false
   }

   override def toJson: JsValue =
   {
      val t = Map(
         "type" -> `type`.toJson,
         "generator" -> either2json(generator),
         "slope" -> slope.toJson,
         "intercept" -> intercept.toJson
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object FunctionGenerator
{
   def apply(json: JsValue): FunctionGenerator = {

      val fields = json.asJsObject.fields

      val name = json.asJsObject.fields.get("name").map
      {
         case JsString(x) => x
      }

      val generator = fields("generator") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      val slope = fields("slope") match {
         case JsNumber(n) => n.toDouble
      }

      val intercept = fields("intercept") match {
         case JsNumber(n) => n.toDouble
      }

      new FunctionGenerator(name, generator, slope, intercept)
   }
} 
Example 71
Source File: BinaryTransitionGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.composite

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.{Generator, TimeToJson}
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.composite.TransitionTimeSeries
import org.joda.time.{Duration, LocalDateTime}
import spray.json.{JsObject, JsString, JsValue, _}


class BinaryTransitionGenerator(name: Option[String],
                          val first: Either[String, Generator[Any]],
                          val second: Either[String, Generator[Any]],
                          val time: LocalDateTime) extends Generator[Boolean](name, "binary-transition")
{
   override def timeseries(generators: (String) => Generator[Any]) =
   {
      val firstBase = Model.generator(generators)(first).timeseries(generators) match {
         case t: TimeSeries[Boolean] => t
      }

      val secondBase = Model.generator(generators)(second).timeseries(generators) match {
         case t: TimeSeries[Boolean] => t
      }

      TransitionTimeSeries[Boolean](firstBase, secondBase, time, None)
   }

   override def toString = "BinaryTransitionGenerator(" + name + "," + first + "," + second + "," + time + ")"

   override def equals(o: Any) = o match {
      case that: BinaryTransitionGenerator => that.name == this.name &&
         that.first == this.first &&
         that.second == this.second &&
         that.time == this.time
      case _ => false
   }

   override def toJson: JsValue = {
      val _first = (first match {
         case Left(s) => s.toJson
         case Right(g) => g.toJson
      }).toJson
      val _second = (second match {
         case Left(s) => s.toJson
         case Right(g) => g.toJson
      }).toJson

      var t = Map(
         "type" -> `type`.toJson,
         "first" -> _first,
         "second" -> _second,
         "time" -> time.toJson
      )

      if(name.isDefined)
         t = t.updated("name", name.get.toJson)

      new JsObject(t)
   }
}

object BinaryTransitionGenerator extends TimeToJson
{
   def apply(value: JsValue): BinaryTransitionGenerator = {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map
      {
         case JsString(x) => x
      }

      val first = fields("first") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      val second = fields("second") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      val time = fields("time").convertTo[LocalDateTime]

      new BinaryTransitionGenerator(name, first, second, time)
   }
} 
Example 72
Source File: DivideGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.composite

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.Generator
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.composite.DivideTimeSeries
import spray.json.{JsObject, JsString, JsValue, _}


class DivideGenerator(name: Option[String],
                      val numerator: Either[String, Generator[Any]],
                      val denominator: Either[String, Generator[Any]]) extends Generator[Double](name, "divide")
{
   override def timeseries(gen: String => Generator[Any]) =
   {
      val num = Model.generator(gen)(numerator).timeseries(gen) match {
         case t: TimeSeries[Double] => t
      }

      val den = Model.generator(gen)(denominator).timeseries(gen) match {
         case t: TimeSeries[Double] => t
      }


      new DivideTimeSeries(num, den)
   }

   override def toString = "Divide(" + name + ", " + numerator + ", " + denominator + ")"

   override def equals(o: Any) = o match {
      case that: DivideGenerator => that.name == this.name && that.numerator == this.numerator && that.denominator == this.denominator
      case _ => false
   }

   override def toJson: JsValue =
   {
      val t = Map(
         "type" -> `type`.toJson,
         "numerator" -> either2json(numerator),
         "denominator" -> either2json(denominator)
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object DivideGenerator
{
   def apply(value: JsValue): DivideGenerator = {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map
      {
         case JsString(x) => x
      }

      val numerator = fields("numerator") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      val denominator = fields("denominator") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      new DivideGenerator(name, numerator, denominator)
   }
} 
Example 73
Source File: AggregateGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.composite

import be.cetic.tsimulus.config._
import be.cetic.tsimulus.generators.{Generator, TimeToJson}
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.composite.AggregationTimeSeries
import spray.json.{JsArray, JsObject, JsString, JsValue, _}


class AggregateGenerator[U](name: Option[String],
                         val aggregator: String,
                         val generators: Seq[Either[String, Generator[Any]]]) extends Generator[U](name, "aggregate")
{
   override def timeseries(gen: String => Generator[Any]) =
   {
      val agg = aggregationFunction(aggregator)


      val ts = generators.map
      {
         case Left(s) => gen(s).timeseries(gen)
         case Right(g) => g.timeseries(gen)
      }

      val series = ts flatMap {
         case d : TimeSeries[Double] => Some(d)
         case _ => None
      }

      new AggregationTimeSeries[Double, U](agg, series)
   }

   override def toString = "Aggregate(" + name + ", " + aggregator + ", " + generators.mkString("[", ", ", "]") + ")"

   override def equals(o: Any) = o match {
      case that: AggregateGenerator[U] => that.name == this.name && that.aggregator == this.aggregator && that.generators == this.generators
      case _ => false
   }

   override def toJson: JsValue =
   {
      val t = Map(
         "type" -> `type`.toJson,
         "aggregator" -> aggregator.toJson,
         "generators" -> generators.map(either2json).toJson
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object AggregateGenerator extends DefaultJsonProtocol
{
   def apply[U](value: JsValue): AggregateGenerator[U] = {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map
      {
         case JsString(x) => x
      }

      val aggregator = fields("aggregator").convertTo[String]
      val generators = fields("generators") match {
         case JsArray(x) => x.map
         {
            case JsString(s) => Left(s)
            case g => Right(GeneratorFormat.read(g))
         }.toList
      }

      new AggregateGenerator(name, aggregator, generators)
   }
} 
Example 74
Source File: ConditionalGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.composite

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.{Generator, TimeToJson}
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.composite.ConditionalTimeSeries
import be.cetic.tsimulus.timeseries.missing.UndefinedTimeSeries
import spray.json.{JsObject, JsString, JsValue, _}



class ConditionalGenerator(name: Option[String],
                           val condition: Either[String, Generator[Any]],
                           val success: Either[String, Generator[Any]],
                           val failure: Option[Either[String, Generator[Any]]]) extends Generator[Any](name, "conditional")
{

   override def timeseries(generators: (String) => Generator[Any]) =
   {
      val cond = Model.generator(generators)(condition).timeseries(generators) match {
         case t: TimeSeries[Boolean] => t
      }

      val a = Model.generator(generators)(success).timeseries(generators) match {
         case t: TimeSeries[Any] => t
      }

      val b = failure.map(f => Model.generator(generators)(f).timeseries(generators) match {
         case t: TimeSeries[Any] => t
         }).getOrElse(new UndefinedTimeSeries())

      ConditionalTimeSeries(cond, a, b)
   }

   override def toString = "Conditional(" + name + ", " + condition + ", " + success + ", " + failure + ")"

   override def equals(o: Any) = o match {
      case that: ConditionalGenerator =>  that.name == this.name &&
                                          that.condition == this.condition &&
                                          that.success == this.success &&
                                          that.failure == this.failure
      case _ => false
   }

   override def toJson: JsValue =
   {
      var t = Map(
         "type" -> `type`.toJson,
         "condition" -> either2json(condition),
         "success" -> either2json(success)
      )

      if(failure.isDefined)
      {
         val _failure = (failure.get match {
            case Left(s) => s.toJson
            case Right(g) => g.toJson
         }).toJson

         t = t.updated("failure", _failure)
      }

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object ConditionalGenerator
{
   def apply(value: JsValue): ConditionalGenerator = {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map
      {
         case JsString(x) => x
      }

      val condition = fields("condition") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      val success = fields("success") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      val failure = if(fields.contains("failure")) fields("failure") match {
         case JsString(s) => Some(Left(s))
         case g => Some(Right(GeneratorFormat.read(g)))
      }
                    else None

      new ConditionalGenerator(name, condition, success, failure)
   }
} 
Example 75
Source File: PartialGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.missing

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.{Generator, TimeToJson}
import be.cetic.tsimulus.timeseries.missing.PartialTimeSeries
import org.joda.time.LocalDateTime
import spray.json.{JsObject, JsString, JsValue, _}


class PartialGenerator(name: Option[String],
                       val generator: Either[String, Generator[Any]],
                       val from: Option[LocalDateTime],
                       val to: Option[LocalDateTime],
                       val missingRate: Option[Double]) extends Generator[Any](name, "partial")
{
   override def timeseries(generators: (String) => Generator[Any]) =
   {
      val ts = Model.generator(generators)(generator).timeseries(generators)
      PartialTimeSeries(ts, from, to, missingRate)
   }

   override def toString = "Partial(" + name + ", " + generator + ", " + from + ", " + to + ", " + missingRate + ")"

   override def equals(o: Any) = o match {
      case that: PartialGenerator => that.name == this.name &&
         that.generator == this.generator &&
         that.from == this.from &&
         that.to == this.to &&
         that.missingRate == this.missingRate
      case _ => false
   }

   override def toJson: JsValue =
   {
     var t = Map(
         "type" -> `type`.toJson,
         "generator" -> either2json(generator),
         "from" -> from.toJson,
         "to" -> to.toJson
      )

      if(missingRate.isDefined) t = t.updated("missing-rate" , missingRate.toJson)
      if(name.isDefined) t = t.updated("name", name.toJson)

      new JsObject(t)
   }
}

object PartialGenerator extends DefaultJsonProtocol with TimeToJson
{
   def apply(value: JsValue): PartialGenerator = {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map
      {
         case JsString(x) => x
      }

      val generator = fields("generator") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }
      val from = fields.get("from").map(_.convertTo[LocalDateTime])
      val to = fields.get("to").map(_.convertTo[LocalDateTime])
      val missingRate = fields.get("missing-rate").map(_.convertTo[Double])

      new PartialGenerator(name, generator, from, to, missingRate)
   }
} 
Example 76
Source File: LimitedGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.missing

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.{Generator, TimeToJson}
import be.cetic.tsimulus.timeseries.missing.LimitedTimeSeries
import org.joda.time.LocalDateTime
import spray.json.{JsObject, JsString, JsValue, _}


class LimitedGenerator(name: Option[String],
                       val generator: Either[String, Generator[Any]],
                       val from: Option[LocalDateTime],
                       val to: Option[LocalDateTime]) extends Generator[Any](name, "limited")
{
   override def timeseries(generators: (String) => Generator[Any]) =
      LimitedTimeSeries(Model.generator(generators)(generator).timeseries(generators), from, to)

   override def toString = "Limited(" + name + ", " + generator + ", " + from + ", " + to + ")"

   override def equals(o: Any) = o match {
      case that: LimitedGenerator => that.name == this.name &&
         that.generator == this.generator &&
         that.from == this.from &&
         that.to == this.to
      case _ => false
   }

   override def toJson: JsValue =
   {
      val t = Map(
         "type" -> `type`.toJson,
         "generator" -> either2json(generator),
         "from" -> from.get.toJson,
         "to" -> to.get.toJson
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object LimitedGenerator extends DefaultJsonProtocol with TimeToJson
{
   def apply(value: JsValue): LimitedGenerator = {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map
      {
         case JsString(x) => x
      }

      val generator = fields("generator") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }
      val from = fields.get("from").map(_.convertTo[LocalDateTime])
      val to = fields.get("to").map(_.convertTo[LocalDateTime])
      val missingRate = fields.get("missing-rate").map(_.convertTo[Double])

      new LimitedGenerator(name, generator, from, to)
   }
} 
Example 77
Source File: DefaultGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.missing

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.{Generator, TimeToJson}
import be.cetic.tsimulus.timeseries.TimeSeries
import be.cetic.tsimulus.timeseries.missing.DefaultTimeSeries
import spray.json.{JsArray, JsObject, JsString, JsValue, _}


class DefaultGenerator(name: Option[String], val gens: Seq[Either[String, Generator[Any]]]) extends Generator[Any](name, "first-of")
{
   override def timeseries(generators: (String) => Generator[Any]) =
   {
      val underlyings = gens.map(g => Model.generator(generators)(g).timeseries(generators) match {
         case t: TimeSeries[Any] => t
      })

      DefaultTimeSeries(underlyings)
   }

   override def toString = "UndefinedGenerator(" + name + "," + gens + ")"

   override def equals(o: Any) = o match {
      case that: DefaultGenerator => that.gens == this.gens
      case _ => false
   }

   override def toJson: JsValue =
   {
      val t = Map(
         "type" -> `type`.toJson,
         "generators" -> gens.map(either2json).toJson
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object DefaultGenerator
{
   def apply(value: JsValue): DefaultGenerator = {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map
      {
         case JsString(x) => x
      }

      val generators = fields("generators") match {
         case JsArray(l) => l.map
         {
            case JsString(s) => Left(s)
            case g => Right(GeneratorFormat.read(g))
         }
      }

      new DefaultGenerator(name, generators)
   }
} 
Example 78
Source File: UndefinedGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.missing

import be.cetic.tsimulus.generators.Generator
import be.cetic.tsimulus.timeseries.missing.UndefinedTimeSeries
import spray.json.{JsObject, JsString, JsValue, _}


class UndefinedGenerator(name: Option[String]) extends Generator[Any](name, "undefined")
{
   override def timeseries(generators: (String) => Generator[Any]) = new UndefinedTimeSeries()

   override def toString = "UndefinedGenerator(" + name + ")"

   override def equals(o: Any) = o match {
      case that: UndefinedGenerator => that.name == this.name
      case _ => false
   }

   override def toJson: JsValue = {
      val t = Map(
         "type" -> `type`.toJson
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object UndefinedGenerator
{
   def apply(value: JsValue): UndefinedGenerator = {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map
      {
         case JsString(x) => x
      }

      new UndefinedGenerator(name)
   }
} 
Example 79
Source File: WorkflowMetadataConverter.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.workflowmanager.versionconverter

import spray.json.{JsObject, JsString, JsValue}

import ai.deepsense.commons.utils.Version


object WorkflowMetadataConverter {

  object Js {
    val apiVersion14 = "1.4.0"
    val apiVersion13 = "1.3.0"
    val apiVersion12 = "1.2.0"

    val evaluate13id = "a88eaf35-9061-4714-b042-ddd2049ce917"
    val fit13id = "0c2ff818-977b-11e5-8994-feff819cdc9f"
    val fitPlusTransform13id = "1cb153f1-3731-4046-a29b-5ad64fde093f"
    val gridSearch13id = "9163f706-eaaf-46f6-a5b0-4114d92032b7"
    val transform13id = "643d8706-24db-4674-b5b4-10b5129251fc"
    val customTransformer13id = "65240399-2987-41bd-ba7e-2944d60a3404"
    val readDataFrame13Id = "c48dd54c-6aef-42df-ad7a-42fc59a09f0e"
    val writeDataFrame13Id = "9e460036-95cc-42c5-ba64-5bc767a40e4e"
    val readDatasource14Id = "1a3b32f0-f56d-4c44-a396-29d2dfd43423"
    val writeDatasource14Id = "bf082da2-a0d9-4335-a62f-9804217a1436"
    val readDatasourceName = "Read Datasource"
    val writeDatasourceName = "Write Datasource"

    val apiVersion = "apiVersion"
    val connections = "connections"
    val id = "id"
    val name = "name"
    val innerWorkflow = "inner workflow"
    val metadata = "metadata"
    val nodeId = "nodeId"
    val nodes = "nodes"
    val operation = "operation"
    val parameters = "parameters"
    val portIndex = "portIndex"
    val to = "to"
    val workflow = "workflow"
    val datasourceId = "data source"
  }

  def setWorkflowVersion(workflow: JsValue, targetVersion: Version): JsValue = {
    val fields = workflow.asJsObject.fields
    val oldMetadata = fields.getOrElse(Js.metadata, JsObject()).asJsObject
    val newMetadata = convertMetadata(oldMetadata, targetVersion.humanReadable)
    JsObject(fields + (Js.metadata -> newMetadata))
  }

  def convertMetadata(metadata: JsValue, targetVersion: String): JsValue =
    JsObject(metadata.asJsObject.fields.updated(Js.apiVersion, new JsString(targetVersion)))
} 
Example 80
Source File: GraphJsonTestSupport.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.workflowmanager.storage

import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{Matchers, WordSpec}
import spray.json.{DefaultJsonProtocol, JsObject}

import ai.deepsense.deeplang.DOperation
import ai.deepsense.graph.Endpoint

trait GraphJsonTestSupport
  extends WordSpec
  with MockitoSugar
  with DefaultJsonProtocol
  with Matchers {

  def assertEndpointMatchesJsObject(edgeEnd: Endpoint, edgeEndJs: JsObject): Unit = {
    assert(edgeEndJs.fields("nodeId").convertTo[String] == edgeEnd.nodeId.value.toString)
    assert(edgeEndJs.fields("portIndex").convertTo[Int] == edgeEnd.portIndex)
  }

  def endpointMatchesJsObject(edgeEnd: Endpoint, edgeEndJs: JsObject): Boolean = {
    edgeEndJs.fields("nodeId").convertTo[String] == edgeEnd.nodeId.value.toString &&
    edgeEndJs.fields("portIndex").convertTo[Int] == edgeEnd.portIndex
  }

  def mockOperation(
      inArity: Int,
      outArity: Int,
      id: DOperation.Id,
      name: String): DOperation = {
    val dOperation = mock[DOperation]
    when(dOperation.inArity).thenReturn(inArity)
    when(dOperation.outArity).thenReturn(outArity)
    when(dOperation.id).thenReturn(id)
    when(dOperation.name).thenReturn(name)
    when(dOperation.paramValuesToJson).thenReturn(JsObject())
    dOperation
  }
} 
Example 81
Source File: JsonMQSerializer.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.workflowexecutor.communication.mq.json

import java.nio.charset.Charset

import spray.json.JsObject

import ai.deepsense.workflowexecutor.communication.mq.MQSerializer

class JsonMQSerializer(
  jsonSerializers: Seq[JsonMessageSerializer],
  parent: Option[JsonMQSerializer] = None
) extends MQSerializer with JsonMessageSerializer {

  private val combinedJsonSerializers = {
    jsonSerializers.tail.foldLeft(jsonSerializers.head.serialize) {
      case (acc, serializer) =>
        acc.orElse(serializer.serialize)
    }
  }

  override val serialize: PartialFunction[Any, JsObject] = {
    parent match {
      case Some(p) => combinedJsonSerializers.orElse(p.serialize)
      case None => combinedJsonSerializers
    }
  }

  override def serializeMessage(message: Any): Array[Byte] = {
    serialize(message).compactPrint.getBytes(Global.charset)
  }

  def orElse(next: JsonMQSerializer): JsonMQSerializer =
    new JsonMQSerializer(jsonSerializers, Some(next))
} 
Example 82
Source File: GlobalMQDeserializerSpec.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.workflowexecutor.communication.mq.json

import java.nio.charset.StandardCharsets

import org.scalatest.mockito.MockitoSugar
import spray.json.{JsArray, JsNull, JsObject, JsString}
import ai.deepsense.commons.StandardSpec
import ai.deepsense.models.workflows.Workflow
import ai.deepsense.workflowexecutor.communication.message.global._
import ai.deepsense.workflowexecutor.communication.mq.json.Global.GlobalMQDeserializer

class GlobalMQDeserializerSpec
  extends StandardSpec
  with MockitoSugar {

  "GlobalMQDeserializer" should {
    "deserialize Launch messages" in {
      val workflowId = Workflow.Id.randomId
      val nodesToExecute = Vector(Workflow.Id.randomId, Workflow.Id.randomId, Workflow.Id.randomId)
      val jsNodesToExecute = JsArray(nodesToExecute.map(id => JsString(id.toString)))

      val rawMessage = JsObject(
        "messageType" -> JsString("launch"),
        "messageBody" -> JsObject(
          "workflowId" -> JsString(workflowId.toString),
          "nodesToExecute" -> jsNodesToExecute
        )
      )

      val readMessage: Any = serializeAndRead(rawMessage)
      readMessage shouldBe Launch(workflowId, nodesToExecute.toSet)
    }

    "deserialize Heartbeat messages" in {
      val workflowId = "foo-workflow"
      val rawMessage = JsObject(
        "messageType" -> JsString("heartbeat"),
        "messageBody" -> JsObject(
          "workflowId" -> JsString(workflowId),
          "sparkUiAddress" -> JsNull))
      serializeAndRead(rawMessage) shouldBe Heartbeat(workflowId, None)
    }
    "deserialize PoisonPill messages" in {
      val rawMessage = JsObject(
        "messageType" -> JsString("poisonPill"),
        "messageBody" -> JsObject())
      serializeAndRead(rawMessage) shouldBe PoisonPill()
    }
    "deserialize Ready messages" in {
      val sessionId = "foo-session"
      val rawMessage = JsObject(
        "messageType" -> JsString("ready"),
        "messageBody" -> JsObject(
          "sessionId" -> JsString(sessionId)))
      serializeAndRead(rawMessage) shouldBe Ready(sessionId)
    }
  }

  private def serializeAndRead(
    rawMessage: JsObject): Any = {
    val bytes = rawMessage.compactPrint.getBytes(StandardCharsets.UTF_8)
    GlobalMQDeserializer.deserializeMessage(bytes)
  }
} 
Example 83
Source File: InnerWorkflow.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.params.custom

import java.util.UUID

import spray.json.JsObject

import ai.deepsense.deeplang.DOperation
import ai.deepsense.deeplang.doperations.custom.{Sink, Source}
import ai.deepsense.graph.DeeplangGraph.DeeplangNode
import ai.deepsense.graph.{DeeplangGraph, Node}

case class InnerWorkflow(
   graph: DeeplangGraph,
   thirdPartyData: JsObject,
   publicParams: List[PublicParam] = List.empty) {

  require(findNodeOfType(Source.id).isDefined, "Inner workflow must have source node")
  require(findNodeOfType(Sink.id).isDefined, "Inner workflow must have sink node")

  val source: DeeplangNode = findNodeOfType(Source.id).get
  val sink: DeeplangNode = findNodeOfType(Sink.id).get

  private def findNodeOfType(operationId: DOperation.Id): Option[DeeplangNode] = {
    graph.nodes.find(_.value.id == operationId)
  }
  def getDatasourcesIds: Set[UUID] =
    graph.getDatasourcesIds
}

object InnerWorkflow {
  val empty = InnerWorkflow(
    DeeplangGraph(Set(Node(Node.Id.randomId, Source()), Node(Node.Id.randomId, Sink()))),
    JsObject())
}

case class PublicParam(nodeId: Node.Id, paramName: String, publicName: String) 
Example 84
Source File: CustomTransformerFactory.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.utils

import spray.json.JsObject

import ai.deepsense.deeplang.InnerWorkflowParser
import ai.deepsense.deeplang.doperables.{CustomTransformer, ParamWithValues}
import ai.deepsense.deeplang.params.custom.{InnerWorkflow, PublicParam}

object CustomTransformerFactory {

  def createCustomTransformer(
      innerWorkflow: InnerWorkflow): CustomTransformer = {
    val selectedParams: Seq[ParamWithValues[_]] =
        innerWorkflow.publicParams.flatMap {
      case PublicParam(nodeId, paramName, publicName) =>
        innerWorkflow.graph.nodes.find(_.id == nodeId)
          .flatMap(node => node.value.params.find(_.name == paramName)
          .map(p => {
            ParamWithValues(
              param = p.replicate(publicName),
              defaultValue = node.value.getDefault(p),
              setValue = node.value.get(p))
          }))
    }
    CustomTransformer(innerWorkflow, selectedParams)
  }
} 
Example 85
Source File: ParamsSerialization.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.serialization

import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue}

import ai.deepsense.deeplang.catalogs.doperable.exceptions.NoParameterlessConstructorInClassException
import ai.deepsense.deeplang.params.Params
import ai.deepsense.deeplang.{CatalogRecorder, ExecutionContext, TypeUtils}
import ai.deepsense.models.json.graph.GraphJsonProtocol.GraphReader

trait ParamsSerialization {

  self: Params =>

  def saveObjectWithParams(ctx: ExecutionContext, path: String): Unit = {
    saveMetadata(ctx, path)
    saveParams(ctx, path)
  }

  def loadAndSetParams(ctx: ExecutionContext, path: String): this.type = {
    setParams(loadParams(ctx, path), ctx.inferContext.graphReader)
  }

  protected def saveMetadata(ctx: ExecutionContext, path: String) = {
    val metadataFilePath = ParamsSerialization.metadataFilePath(path)
    val metadataJson = JsObject(
      ParamsSerialization.classNameKey -> JsString(this.getClass.getName)
    )
    JsonObjectPersistence.saveJsonToFile(ctx, metadataFilePath, metadataJson)
  }

  protected def saveParams(ctx: ExecutionContext, path: String): Unit = {
    val paramsFilePath = ParamsSerialization.paramsFilePath(path)
    JsonObjectPersistence.saveJsonToFile(ctx, paramsFilePath, paramValuesToJson)
  }

  protected def loadParams(ctx: ExecutionContext, path: String): JsValue = {
    JsonObjectPersistence.loadJsonFromFile(ctx, ParamsSerialization.paramsFilePath(path))
  }

  private def setParams(paramsJson: JsValue, graphReader: GraphReader): this.type = {
    this.set(paramPairsFromJson(paramsJson, graphReader): _*)
  }
}

object ParamsSerialization {
  val classNameKey = "className"
  val paramsFileName = "params"
  val metadataFileName = "metadata"

  def load(ctx: ExecutionContext, path: String): Loadable = {
    import DefaultJsonProtocol._
    val metadataPath = metadataFilePath(path)
    val metadataJson: JsObject =
      JsonObjectPersistence.loadJsonFromFile(ctx, metadataPath).asJsObject
    val className = metadataJson.fields(classNameKey).convertTo[String]
    val clazz: Class[_] = Class.forName(className)
    val loadable = TypeUtils.createInstance(TypeUtils.constructorForClass(clazz)
        .getOrElse(throw new NoParameterlessConstructorInClassException(clazz.getCanonicalName))
    ).asInstanceOf[Loadable]
    loadable.load(ctx, path)
  }

  def metadataFilePath(path: String): String = {
    PathsUtils.combinePaths(path, metadataFileName)
  }

  def paramsFilePath(path: String): String = {
    PathsUtils.combinePaths(path, paramsFileName)
  }
} 
Example 86
Source File: AbstractParamSpec.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.params

import org.scalatest.mockito.MockitoSugar
import org.scalatest.{Matchers, WordSpec}
import spray.json.{JsObject, JsValue}

import ai.deepsense.models.json.graph.GraphJsonProtocol.GraphReader

abstract class AbstractParamSpec[T, U <: Param[T]]
  extends WordSpec
  with Matchers
  with MockitoSugar {

  def className: String

  def paramFixture: (U, JsValue)  // param + its json description

  def valueFixture: (T, JsValue)  // value + its json description

  val defaultValue: T = valueFixture._1

  def graphReader: GraphReader = mock[GraphReader]

  def serializeDefaultValue(default: T): JsValue = paramFixture._1.valueToJson(default)

  className should {
    "serialize itself to JSON" when {
      "default value is not provided" in {
        val (param, expectedJson) = paramFixture
        param.toJson(maybeDefault = None) shouldBe expectedJson
      }
      "default value is provided" in {
        val (param, expectedJson) = paramFixture
        val expectedJsonWithDefault = JsObject(
          expectedJson.asJsObject.fields + ("default" -> serializeDefaultValue(defaultValue))
        )
        param.toJson(maybeDefault = Some(defaultValue)) shouldBe expectedJsonWithDefault
      }
    }
  }

  it should {
    "serialize value to JSON" in {
      val param = paramFixture._1
      val (value, expectedJson) = valueFixture
      param.valueToJson(value) shouldBe expectedJson
    }
  }

  it should {
    "deserialize value from JSON" in {
      val param = paramFixture._1
      val (expectedValue, valueJson) = valueFixture
      val extractedValue = param.valueFromJson(valueJson, graphReader)
      extractedValue shouldBe expectedValue
    }
  }
} 
Example 87
Source File: AbstractChoiceParamSpec.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.params.choice

import scala.reflect.runtime.universe._

import spray.json.{DeserializationException, JsObject}

import ai.deepsense.deeplang.params.exceptions.NoArgumentConstructorRequiredException
import ai.deepsense.deeplang.params.{AbstractParamSpec, Param}
import ai.deepsense.models.json.graph.GraphJsonProtocol.GraphReader

abstract class AbstractChoiceParamSpec[T, U <: Param[T]] extends AbstractParamSpec[T, U] {

  protected def createChoiceParam[V <: Choice : TypeTag](
    name: String, description: String): Param[V]

  className should {
    "throw an exception when choices don't have no-arg constructor" in {
      a[NoArgumentConstructorRequiredException] should be thrownBy
        createChoiceParam[BaseChoice]("name", "description")
    }
    "throw an exception when unsupported choice is given" in {
      val graphReader = mock[GraphReader]
      a[DeserializationException] should be thrownBy
        createChoiceParam[ChoiceABC]("name", "description").valueFromJson(
          JsObject(
            "unsupportedClass" -> JsObject()
          ),
          graphReader
        )
    }
    "throw an exception when not all choices are declared" in {
      an[IllegalArgumentException] should be thrownBy
        createChoiceParam[ChoiceWithoutDeclaration]("name", "description")
    }
  }
} 
Example 88
Source File: FitSpec.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperations

import spray.json.{JsNumber, JsObject}

import ai.deepsense.deeplang._
import ai.deepsense.deeplang.doperables.Transformer
import ai.deepsense.deeplang.doperables.dataframe.DataFrame
import ai.deepsense.deeplang.doperations.MockDOperablesFactory._
import ai.deepsense.deeplang.doperations.exceptions.TooManyPossibleTypesException
import ai.deepsense.deeplang.exceptions.DeepLangMultiException
import ai.deepsense.deeplang.inference.{InferContext, InferenceWarnings}
import ai.deepsense.deeplang.params.ParamsMatchers._

class FitSpec extends UnitSpec with DeeplangTestSupport {

  "Fit" should {
    "fit input Estimator on input DataFrame with proper parameters set" in {
      val estimator = new MockEstimator

      def testFit(op: Fit, expectedTransformer: Transformer): Unit = {
        val Vector(outputTransformer: Transformer) =
          op.executeUntyped(Vector(estimator, mock[DataFrame]))(createExecutionContext)
        outputTransformer shouldBe expectedTransformer
      }
      val op1 = Fit()
      testFit(op1, transformer1)

      val paramsForEstimator = JsObject(estimator.paramA.name -> JsNumber(2))
      val op2 = Fit().setEstimatorParams(paramsForEstimator)
      testFit(op2, transformer2)
    }
    "not modify params in input Estimator instance upon execution" in {
      val estimator = new MockEstimator
      val originalEstimator = estimator.replicate()

      val paramsForEstimator = JsObject(estimator.paramA.name -> JsNumber(2))
      val op = Fit().setEstimatorParams(paramsForEstimator)
      op.executeUntyped(Vector(estimator, mock[DataFrame]))(createExecutionContext)

      estimator should have (theSameParamsAs (originalEstimator))
    }
    "infer Transformer from input Estimator on input DataFrame with proper parameters set" in {
      val estimator = new MockEstimator

      def testInference(op: Fit, expectedTransformerKnowledge: DKnowledge[Transformer]): Unit = {
        val inputDF = DataFrame.forInference(createSchema())
        val (knowledge, warnings) =
          op.inferKnowledgeUntyped(Vector(DKnowledge(estimator), DKnowledge(inputDF)))(mock[InferContext])
        // Currently, InferenceWarnings are always empty.
        warnings shouldBe InferenceWarnings.empty
        val Vector(transformerKnowledge) = knowledge
        transformerKnowledge shouldBe expectedTransformerKnowledge
      }
      val op1 = Fit()
      testInference(op1, transformerKnowledge1)

      val paramsForEstimator = JsObject(estimator.paramA.name -> JsNumber(2))
      val op2 = Fit().setEstimatorParams(paramsForEstimator)
      testInference(op2, transformerKnowledge2)
    }
    "not modify params in input Estimator instance upon inference" in {
      val estimator = new MockEstimator
      val originalEstimator = estimator.replicate()

      val paramsForEstimator = JsObject(estimator.paramA.name -> JsNumber(2))
      val op = Fit().setEstimatorParams(paramsForEstimator)
      val inputDF = DataFrame.forInference(createSchema())
      op.inferKnowledgeUntyped(Vector(DKnowledge(estimator), DKnowledge(inputDF)))(mock[InferContext])

      estimator should have (theSameParamsAs (originalEstimator))
    }
    "throw Exception" when {
      "there are more than one Estimator in input Knowledge" in {
        val inputDF = DataFrame.forInference(createSchema())
        val estimators = Set[DOperable](new MockEstimator, new MockEstimator)

        val op = Fit()
        a[TooManyPossibleTypesException] shouldBe thrownBy {
          op.inferKnowledgeUntyped(Vector(DKnowledge(estimators), DKnowledge(inputDF)))(mock[InferContext])
        }
      }
      "Estimator's dynamic parameters are invalid" in {
        val inputDF = DataFrame.forInference(createSchema())
        val estimator = new MockEstimator
        val fit = Fit().setEstimatorParams(JsObject(estimator.paramA.name -> JsNumber(-2)))
        a[DeepLangMultiException] shouldBe thrownBy {
          fit.inferKnowledgeUntyped(Vector(DKnowledge(estimator), DKnowledge(inputDF)))(mock[InferContext])
        }
      }
    }
  }
} 
Example 89
Source File: GridSearchSpec.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperations

import spray.json.{JsNumber, JsObject}

import ai.deepsense.deeplang.doperables.dataframe.DataFrame
import ai.deepsense.deeplang.doperables.report.Report
import ai.deepsense.deeplang.doperations.MockDOperablesFactory.{MockEstimator, MockEvaluator}
import ai.deepsense.deeplang.exceptions.DeepLangMultiException
import ai.deepsense.deeplang.inference.{InferContext, InferenceWarnings}
import ai.deepsense.deeplang.{DKnowledge, DeeplangTestSupport, UnitSpec}

class GridSearchSpec extends UnitSpec with DeeplangTestSupport {
  "GridSearch" should {
    "infer knowledge when dynamic parameters are valid" in {
      val inputDF = DataFrame.forInference(createSchema())
      val estimator = new MockEstimator
      val evaluator = new MockEvaluator

      val gridSearch = GridSearch()
      gridSearch.inferKnowledgeUntyped(
          Vector(DKnowledge(estimator), DKnowledge(inputDF), DKnowledge(evaluator)))(mock[InferContext]) shouldBe
        (Vector(DKnowledge(Report())), InferenceWarnings.empty)
    }
    "throw Exception" when {
      "Estimator's dynamic parameters are invalid" in {
        checkMultiException(Some(-2), None)
      }
      "Evaluator's dynamic parameters are invalid" in {
        checkMultiException(None, Some(-2))
      }
      "Both Estimator's and Evaluator's dynamic parameters are invalid" in {
        checkMultiException(Some(-2), Some(-2))
      }
    }
  }

  private def checkMultiException(
      estimatorParamValue: Option[Double],
      evaluatorParamValue: Option[Double]): Unit = {

    val inputDF = DataFrame.forInference(createSchema())
    val estimator = new MockEstimator
    val evaluator = new MockEvaluator

    val gridSearch = GridSearch()
      .setEstimatorParams(prepareParamDictionary(estimator.paramA.name, estimatorParamValue))
      .setEvaluatorParams(prepareParamDictionary(evaluator.paramA.name, evaluatorParamValue))

    val multiException = the [DeepLangMultiException] thrownBy {
      gridSearch.inferKnowledgeUntyped(
        Vector(
          DKnowledge(estimator),
          DKnowledge(inputDF),
          DKnowledge(evaluator)))(mock[InferContext])
    }

    val invalidParamCount =
      estimatorParamValue.map(_ => 1).getOrElse(0) +
      evaluatorParamValue.map(_ => 1).getOrElse(0)

    multiException.exceptions should have size invalidParamCount
  }

  private def prepareParamDictionary(paramName: String, maybeValue: Option[Double]): JsObject = {
    val jsonEntries = maybeValue.map(
        value => Seq(paramName -> JsNumber(value)))
      .getOrElse(Seq())
    JsObject(jsonEntries: _*)
  }
} 
Example 90
Source File: FitPlusTransformSpec.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperations

import spray.json.{JsNumber, JsObject}

import ai.deepsense.deeplang.doperables.Transformer
import ai.deepsense.deeplang.doperables.dataframe.DataFrame
import ai.deepsense.deeplang.doperations.MockDOperablesFactory._
import ai.deepsense.deeplang.doperations.exceptions.TooManyPossibleTypesException
import ai.deepsense.deeplang.exceptions.DeepLangMultiException
import ai.deepsense.deeplang.inference.InferContext
import ai.deepsense.deeplang._

class FitPlusTransformSpec extends UnitSpec with DeeplangTestSupport {

  "FitPlusTransform" when {
    "executed" should {
      "pass parameters to the input Estimator produce a Transformer and transformed DataFrame" in {
        val estimator = new MockEstimator
        val initialParametersValues = estimator.extractParamMap()
        val fpt = new FitPlusTransform

        def testExecute(
          op: FitPlusTransform,
          expectedDataFrame: DataFrame,
          expectedTransformer: Transformer): Unit = {
          val results = op.executeUntyped(Vector(estimator, mock[DataFrame]))(createExecutionContext)
          val outputDataFrame = results(0).asInstanceOf[DataFrame]
          val outputTransformer = results(1).asInstanceOf[Transformer]

          outputDataFrame shouldBe expectedDataFrame
          outputTransformer shouldBe expectedTransformer
        }

        testExecute(fpt, transformedDataFrame1, transformer1)
        fpt.setEstimatorParams(JsObject(estimator.paramA.name -> JsNumber(2)))
        testExecute(fpt, transformedDataFrame2, transformer2)
        estimator.extractParamMap() shouldBe initialParametersValues
      }

    }
    "inferring knowledge" should {
      "take parameters from the input Estimator, infer Transformer and then a DataFrame" in {
        val estimator = new MockEstimator
        val initialParametersValues = estimator.extractParamMap()
        val fpt = new FitPlusTransform

        def testInference(
          op: FitPlusTransform,
          expectedDataFrameKnowledge: DKnowledge[DataFrame],
          expectedTransformerKnowledge: DKnowledge[Transformer]): Unit = {
          val (Vector(outputDataFrameKnowledge, outputTransformerKnowledge), _) =
            op.inferKnowledgeUntyped(Vector(DKnowledge(estimator), mock[DKnowledge[DataFrame]]))(mock[InferContext])

          outputDataFrameKnowledge shouldBe expectedDataFrameKnowledge
          outputTransformerKnowledge shouldBe expectedTransformerKnowledge
        }

        testInference(fpt, transformedDataFrameKnowledge1, transformerKnowledge1)
        fpt.setEstimatorParams(JsObject(estimator.paramA.name -> JsNumber(2)))
        testInference(fpt, transformedDataFrameKnowledge2, transformerKnowledge2)
        estimator.extractParamMap() shouldBe initialParametersValues
      }
      "throw exceptions" when {
        "input Estimator Knowledge consist more than one type" in {
          val estimators = Set[DOperable](new MockEstimator, new MockEstimator)
          val inputKnowledge: Vector[DKnowledge[DOperable]] =
            Vector(DKnowledge(estimators), mock[DKnowledge[DataFrame]])
          val fpt = new FitPlusTransform
          a[TooManyPossibleTypesException] shouldBe thrownBy {
            fpt.inferKnowledgeUntyped(inputKnowledge)(mock[InferContext])
          }
        }
        "Estimator's dynamic parameters are invalid" in {
          val estimator = new MockEstimator
          val inputKnowledge: Vector[DKnowledge[DOperable]] =
            Vector(DKnowledge(estimator), mock[DKnowledge[DataFrame]])
          val fpt = new FitPlusTransform
          fpt.setEstimatorParams(JsObject(estimator.paramA.name -> JsNumber(-2)))
          a[DeepLangMultiException] shouldBe thrownBy {
            fpt.inferKnowledgeUntyped(inputKnowledge)(mock[InferContext])
          }
        }
      }
    }
  }
} 
Example 91
Source File: InnerWorkflowTestFactory.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables

import spray.json.JsObject

import ai.deepsense.deeplang.DOperation
import ai.deepsense.deeplang.doperations.ConvertType
import ai.deepsense.deeplang.doperations.custom.{Sink, Source}
import ai.deepsense.deeplang.params.custom.{InnerWorkflow, PublicParam}
import ai.deepsense.deeplang.params.selections.{MultipleColumnSelection, NameColumnSelection}
import ai.deepsense.graph.{DeeplangGraph, Edge, Node}
import ai.deepsense.models.json.graph.GraphJsonProtocol.GraphReader

object InnerWorkflowTestFactory {

  val sourceNodeId = "2603a7b5-aaa9-40ad-9598-23f234ec5c32"
  val sinkNodeId = "d7798d5e-b1c6-4027-873e-a6d653957418"
  val innerNodeId = "b22bd79e-337d-4223-b9ee-84c2526a1b75"

  val sourceNode = Node(sourceNodeId, Source())
  val sinkNode = Node(sinkNodeId, Sink())

  private def createInnerNodeOperation(targetType: TargetTypeChoice, graphReader: GraphReader): ConvertType = {

    val params = TypeConverter()
      .setTargetType(targetType)
      .setSelectedColumns(MultipleColumnSelection(Vector(NameColumnSelection(Set("column1")))))
      .paramValuesToJson
    new ConvertType().setParamsFromJson(params, graphReader)
  }

  private def createInnerNode(targetType: TargetTypeChoice, graphReader: GraphReader): Node[DOperation] =
    Node(innerNodeId, createInnerNodeOperation(targetType, graphReader))

  def simpleGraph(graphReader: GraphReader,
    targetType: TargetTypeChoice = TargetTypeChoices.StringTargetTypeChoice()): DeeplangGraph = {
    val innerNode = createInnerNode(targetType, graphReader)
    DeeplangGraph(
      Set(sourceNode, sinkNode, innerNode),
      Set(Edge(sourceNode, 0, innerNode, 0), Edge(innerNode, 0, sinkNode, 0)))
  }

} 
Example 92
Source File: WorkflowProtocol.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.workflowexecutor.communication.mq.serialization.json

import spray.json.JsObject

import ai.deepsense.models.json.graph.GraphJsonProtocol.GraphReader
import ai.deepsense.models.workflows.{ExecutionReport, InferredState, WorkflowWithResults}
import ai.deepsense.models.json.workflow.ExecutionReportJsonProtocol._
import ai.deepsense.models.json.workflow.{InferredStateJsonProtocol, WorkflowJsonProtocol, WorkflowWithResultsJsonProtocol}
import ai.deepsense.workflowexecutor.communication.message.workflow.AbortJsonProtocol._
import ai.deepsense.workflowexecutor.communication.message.workflow.SynchronizeJsonProtocol._
import ai.deepsense.workflowexecutor.communication.message.workflow._
import ai.deepsense.workflowexecutor.communication.mq.json.Constants.MessagesTypes._
import ai.deepsense.workflowexecutor.communication.mq.json.{DefaultJsonMessageDeserializer, DefaultJsonMessageSerializer, JsonMessageDeserializer, JsonMessageSerializer}

object WorkflowProtocol {
  val abort = "abort"
  val launch = "launch"
  val updateWorkflow = "updateWorkflow"
  val synchronize = "synchronize"

  object AbortDeserializer extends DefaultJsonMessageDeserializer[Abort](abort)

  object SynchronizeDeserializer extends DefaultJsonMessageDeserializer[Synchronize](synchronize)
  object SynchronizeSerializer extends DefaultJsonMessageSerializer[Synchronize](synchronize)

  case class UpdateWorkflowDeserializer(graphReader: GraphReader)
    extends JsonMessageDeserializer
    with UpdateWorkflowJsonProtocol {

    private val defaultDeserializer =
      new DefaultJsonMessageDeserializer[UpdateWorkflow](updateWorkflow)

    override def deserialize: PartialFunction[(String, JsObject), Any] =
      defaultDeserializer.deserialize
  }
} 
Example 93
Source File: GraphJsonTestSupport.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.models.json.graph

import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{Matchers, WordSpec}
import spray.json.{DefaultJsonProtocol, JsObject}

import ai.deepsense.deeplang.DOperation
import ai.deepsense.graph.Endpoint

trait GraphJsonTestSupport
  extends WordSpec
  with MockitoSugar
  with DefaultJsonProtocol
  with Matchers {

  def assertEndpointMatchesJsObject(edgeEnd: Endpoint, edgeEndJs: JsObject): Unit = {
    assert(edgeEndJs.fields("nodeId").convertTo[String] == edgeEnd.nodeId.value.toString)
    assert(edgeEndJs.fields("portIndex").convertTo[Int] == edgeEnd.portIndex)
  }

  def endpointMatchesJsObject(edgeEnd: Endpoint, edgeEndJs: JsObject): Boolean = {
    edgeEndJs.fields("nodeId").convertTo[String] == edgeEnd.nodeId.value.toString &&
    edgeEndJs.fields("portIndex").convertTo[Int] == edgeEnd.portIndex
  }

  def mockOperation(
      inArity: Int,
      outArity: Int,
      id: DOperation.Id,
      name: String): DOperation = {

    val dOperation = mock[DOperation]
    when(dOperation.inArity).thenReturn(inArity)
    when(dOperation.outArity).thenReturn(outArity)
    when(dOperation.id).thenReturn(id)
    when(dOperation.name).thenReturn(name)
    dOperation
  }
} 
Example 94
Source File: JobDetailsRecord.scala    From mist   with Apache License 2.0 5 votes vote down vote up
package io.hydrosphere.mist.master

import io.hydrosphere.mist.core.CommonData.{Action, JobParams}
import io.hydrosphere.mist.master.interfaces.JsonCodecs
import JsonCodecs._
import mist.api.data.{JsData, JsMap}
import spray.json.{JsObject, JsString, enrichAny, enrichString}


case class JobDetailsRecord (
  path: String, className: String, namespace: String, parameters: String,
  externalId: Option[String], function: Option[String], action: String,
  source: String, jobId: String, startTime: Option[Long], endTime: Option[Long],
  jobResult: Option[String], status: String, workerId: Option[String],
  createTime: Long) {

  def toJobDetails: JobDetails = {
    JobDetails(function.get, jobId,
      JobParams(path, className, parameters.parseJson.convertTo[JsMap], toAction(action)),
      namespace, externalId, JobDetails.Source(source), startTime, endTime,
      toResult(jobResult), JobDetails.Status(status), workerId, createTime)
  }

  def toResult(stringOption: Option[String]): Option[Either[String, JsData]] = {
    stringOption match {
      case Some(string) => string.parseJson match {
          case obj @ JsObject(fields) =>
            val maybeErr = fields.get("error").flatMap({
              case JsString(err) => Some(err)
              case x => None
            })
            maybeErr match {
              case None => Some(Right(fields.get("result").get.convertTo[JsData]))
              case Some(err) => Some(Left(err))
            }
          case JsString(err) => Some(Left(err))
          case _ => throw new IllegalArgumentException(s"can not deserialize $string to Job response")
        }
      case None => None
    }
  }

  def toAction(action: String) = {
    action match {
      case "serve" => Action.Serve
      case "train" => Action.Execute
      case _ => Action.Execute
    }
  }
}

object JobDetailsRecord {
  def apply(jd: JobDetails): JobDetailsRecord = {
    val jp: JobParams = jd.params
    new JobDetailsRecord(jp.filePath, jp.className, jd.context,
      jp.arguments.toJson.compactPrint, jd.externalId, Some(jd.function),
      jp.action.toString, jd.source.toString, jd.jobId, jd.startTime, jd.endTime,
      jobResponseToString(jd.jobResult), jd.status.toString, jd.workerId,
      jd.createTime
    )
  }

  def jobResponseToString(jobResponseOrError: Option[Either[String, JsData]]): Option[String] = {
    jobResponseOrError match {
      case Some(response) => val jsValue = response match {
          case Left(err) => JsObject("error" -> JsString(err))
          case Right(data) => JsObject("result" -> data.toJson)
        }
        Some(jsValue.compactPrint)
      case None => None
    }
  }
} 
Example 95
Source File: modelFactoryPlaceholder.scala    From aloha   with MIT License 5 votes vote down vote up
package com.eharmony.aloha.factory

import java.io.File

import com.eharmony.aloha.factory.ex.AlohaFactoryException
import com.eharmony.aloha.io.StringReadable
import org.apache.commons.{vfs, vfs2}
import spray.json.{JsObject, pimpString}

import scala.util.{Failure, Try}


    def resolveFileContents(): Try[JsObject]
}

private[factory] case class Vfs2ImportedModelPlaceholder(fileDescriptor: String) extends ImportedModelPlaceholder {
    def resolveFileContents() = for {
        file <- Try {
            vfs2.VFS.getManager.resolveFile(fileDescriptor)
        } recoverWith {
            case f => Failure { new AlohaFactoryException(s"Couldn't resolve VFS2 file: $fileDescriptor", f) }
        }
        json <- Try {
            StringReadable.fromVfs2(file).parseJson.asJsObject
        } recoverWith {
            case f => Failure { new AlohaFactoryException(s"Couldn't get JSON for VFS2 file: $file", f) }
        }
    } yield json
}

private[factory] case class Vfs1ImportedModelPlaceholder(fileDescriptor: String) extends ImportedModelPlaceholder {
    def resolveFileContents() = for {
        file <- Try {
            vfs.VFS.getManager.resolveFile(fileDescriptor)
        } recoverWith {
            case f => Failure { new AlohaFactoryException(s"Couldn't resolve VFS1 file: $fileDescriptor", f) }
        }
        json <- Try {
            StringReadable.fromVfs1(file).parseJson.asJsObject
        } recoverWith {
            case f => Failure { new AlohaFactoryException(s"Couldn't get JSON for VFS1 file: $file", f) }
        }
    } yield json
}

private[factory] case class FileImportedModelPlaceholder(fileDescriptor: String) extends ImportedModelPlaceholder {
    def resolveFileContents() = for {
        file <- Try {
            new File(fileDescriptor)
        } recoverWith {
            case f => Failure { new AlohaFactoryException(s"Couldn't resolve file: $fileDescriptor", f) }
        }
        json <- Try {
            StringReadable.fromFile(file).parseJson.asJsObject
        } recoverWith {
            case f => Failure { new AlohaFactoryException(s"Couldn't get JSON for file: $file", f) }
        }
    } yield json
} 
Example 96
Source File: ModelParser.scala    From aloha   with MIT License 5 votes vote down vote up
package com.eharmony.aloha.factory

import com.eharmony.aloha.audit.Auditor
import com.eharmony.aloha.factory.jsext.JsValueExtensions
import com.eharmony.aloha.id.{ModelId, ModelIdentity}
import com.eharmony.aloha.models.{Model, Submodel}
import com.eharmony.aloha.reflect.RefInfo
import com.eharmony.aloha.semantics.Semantics
import spray.json.{DefaultJsonProtocol, JsObject, JsValue, JsonFormat, JsonReader}
import spray.json.DefaultJsonProtocol.{LongJsonFormat, StringJsonFormat}


sealed trait ModelParser {

  val modelType: String

  private implicit val modelIdFormat = DefaultJsonProtocol.jsonFormat2(ModelId.apply)

  protected final def getModelId(json: JsValue): Option[ModelIdentity] =
    json(ModelParser.modelIdField).collect{case o: JsObject => o.convertTo[ModelId]}
}

private object ModelParser {
  val modelIdField = "modelId"
}

trait ModelParsingPlugin extends ModelParser {
  def modelJsonReader[U, N, A, B <: U](factory: SubmodelFactory[U, A], semantics: Semantics[A], auditor: Auditor[U, N, B])
                                      (implicit r: RefInfo[N], jf: JsonFormat[N]): Option[JsonReader[Model[A, B]]]
}

trait SubmodelParsingPlugin extends ModelParser {
  def submodelJsonReader[U, N, A, B <: U](factory: SubmodelFactory[U, A], semantics: Semantics[A], auditor: Auditor[U, N, B])
                                         (implicit r: RefInfo[N], jf: JsonFormat[N]): Option[JsonReader[Submodel[N, A, U]]]
}

trait ModelSubmodelParsingPlugin extends ModelParsingPlugin with SubmodelParsingPlugin {
  def commonJsonReader[U, N, A, B <: U](factory: SubmodelFactory[U, A], semantics: Semantics[A], auditor: Auditor[U, N, B])
                                       (implicit r: RefInfo[N], jf: JsonFormat[N]): Option[JsonReader[_ <: Model[A, B] with Submodel[_, A, B]]]

  final override def modelJsonReader[U, N, A, B <: U](factory: SubmodelFactory[U, A], semantics: Semantics[A], auditor: Auditor[U, N, B])
                                                     (implicit r: RefInfo[N], jf: JsonFormat[N]): Option[JsonReader[Model[A, B]]] = {
    val reader = commonJsonReader(factory, semantics, auditor)
    reader.map(jr => jr.asInstanceOf[JsonReader[Model[A, B]]])
  }

  final def submodelJsonReader[U, N, A, B <: U](factory: SubmodelFactory[U, A], semantics: Semantics[A], auditor: Auditor[U, N, B])
                                               (implicit r: RefInfo[N], jf: JsonFormat[N]): Option[JsonReader[Submodel[N, A, U]]] = {
    val reader = commonJsonReader(factory, semantics, auditor)
    reader.map(jr => jr.asInstanceOf[JsonReader[Submodel[N, A, U]]])
  }
} 
Example 97
Source File: MultilabelModelJson.scala    From aloha   with MIT License 5 votes vote down vote up
package com.eharmony.aloha.models.multilabel.json

import com.eharmony.aloha.id.ModelId
import com.eharmony.aloha.models.multilabel.PluginInfo
import com.eharmony.aloha.models.reg.json.{Spec, SpecJson}
import spray.json.DefaultJsonProtocol._
import spray.json.{JsObject, JsonFormat, RootJsonFormat}

import scala.collection.immutable.ListMap
import com.eharmony.aloha.factory.ScalaJsonFormats

trait MultilabelModelJson extends SpecJson with ScalaJsonFormats {

  protected[this] case class Plugin(`type`: String)

  
  protected[this] case class MultilabelData[K](
      modelType: String,
      modelId: ModelId,
      features: ListMap[String, Spec],
      numMissingThreshold: Option[Int],
      labelsInTrainingSet: Vector[K],
      labelsOfInterest: Option[String],
      underlying: JsObject
  ) extends PluginInfo[K]

  protected[this] final implicit def multilabelDataJsonFormat[K: JsonFormat]: RootJsonFormat[MultilabelData[K]] =
    jsonFormat7(MultilabelData[K])

  protected[this] final implicit val pluginJsonFormat: RootJsonFormat[Plugin] =
    jsonFormat1(Plugin)
} 
Example 98
Source File: RelatedResponseSpec.scala    From jsonapi-scala   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.qvantel.jsonapi

import org.specs2.ScalaCheck
import org.specs2.mutable.Specification
import spray.json.{JsArray, JsNull, JsObject}
import _root_.spray.json._
import _root_.spray.json.DefaultJsonProtocol._

class RelatedResponseSpec extends Specification with ScalaCheck {
  implicit val apiRoot: com.qvantel.jsonapi.ApiRoot = ApiRoot(None)

  @jsonApiResource final case class Test(id: String, name: String, age: Int)
  @jsonApiResource final case class Test2(id: String, name: String, age: Int)

  val test: Option[Test]      = Some(Test("teståöä•Ωé®", "someName", 20)) // test UTF-8
  val emptyTest: Option[Test] = None
  val tests: List[Test]       = List(Test("test 1", "someName1", 20), Test("test 2", "someName2", 21))
  val emptyTests: List[Test]  = List.empty

  def transformToTest2(in: Test): Test2 = Test2(in.id + "-2", in.name, in.age)

  "correctly write to one none case" in {
    RelatedResponse(emptyTest).toResponse must be equalTo JsObject(
      "data" -> JsNull
    )

    RelatedResponse(emptyTest).map(transformToTest2).toResponse must be equalTo JsObject(
      "data" -> JsNull
    )
  }

  "correctly write to one some case" in {
    val answer = rawOne(test.get)

    RelatedResponse(test).toResponse must be equalTo answer
    RelatedResponse(test.get).toResponse must be equalTo answer

    val transformedAnswer = rawOne(transformToTest2(test.get))
    RelatedResponse(test).map(transformToTest2).toResponse must be equalTo transformedAnswer
    RelatedResponse(test.get).map(transformToTest2).toResponse must be equalTo transformedAnswer
  }

  "correctly write to many empty case" in {
    RelatedResponse(emptyTests).toResponse must be equalTo JsObject(
      "data" -> JsArray.empty
    )

    RelatedResponse(emptyTests).map(transformToTest2).toResponse must be equalTo JsObject(
      "data" -> JsArray.empty
    )
  }

  "correctly write to many non-empty case" in {
    val answer = rawCollection(tests)

    RelatedResponse(tests).toResponse must be equalTo answer
    RelatedResponse(tests.toSeq).toResponse must be equalTo answer
    RelatedResponse(tests.toIterable).toResponse must be equalTo answer
    RelatedResponse(tests.toSet).toResponse must be equalTo answer

    val transformedAnswer = rawCollection(tests.map(transformToTest2))

    RelatedResponse(tests).map(transformToTest2).toResponse must be equalTo transformedAnswer
    RelatedResponse(tests.toSeq).map(transformToTest2).toResponse must be equalTo transformedAnswer
    RelatedResponse(tests.toIterable).map(transformToTest2).toResponse must be equalTo transformedAnswer
    RelatedResponse(tests.toSet).map(transformToTest2).toResponse must be equalTo transformedAnswer
  }

  "correctly write sparse fieldsets" in {
    implicit val sparseFields: Map[String, List[String]] = Map("tests" -> List("age"), "test2s" -> List("age"))
    val answer                                           = rawOne(test.get)

    RelatedResponse(test).toResponse must be equalTo answer
    RelatedResponse(test.get).toResponse must be equalTo answer

    val transformedAnswer = rawOne(transformToTest2(test.get))

    RelatedResponse(test).map(transformToTest2).toResponse must be equalTo transformedAnswer
    RelatedResponse(test.get).map(transformToTest2).toResponse must be equalTo transformedAnswer
  }
} 
Example 99
Source File: ElasticSearchClient.scala    From pipelines-examples   with Apache License 2.0 5 votes vote down vote up
package warez

import akka.NotUsed
import akka.kafka.ConsumerMessage.CommittableOffset
import akka.stream.alpakka.elasticsearch.{ ReadResult, WriteMessage, WriteResult }
import akka.stream.alpakka.elasticsearch.scaladsl.{ ElasticsearchFlow, ElasticsearchSource }
import akka.stream.scaladsl.Source

import org.apache.http.HttpHost
import org.elasticsearch.client.RestClient
import spray.json.{ JsObject, JsonFormat }

import pipelines.akkastream.scaladsl.FlowWithOffsetContext


  implicit val esClient: RestClient = RestClient.builder(new HttpHost(hostname, port)).build()

  def indexFlow(): FlowWithOffsetContext[WriteMessage[T, NotUsed], WriteResult[T, CommittableOffset]] =
    ElasticsearchFlow.createWithContext[T, CommittableOffset](indexName, typeName)

  def querySource(searchCriteria: String): Source[ReadResult[JsObject], NotUsed] =
    ElasticsearchSource
      .create(indexName, typeName, query = s"""{
          "bool": {
            "must": {
              "query_string": {
                "query": "$searchCriteria"
              }
            }
          }
        }""")
} 
Example 100
Source File: JsonMQSerializer.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.workflowexecutor.communication.mq.json

import java.nio.charset.Charset

import spray.json.JsObject

import io.deepsense.workflowexecutor.communication.mq.MQSerializer

class JsonMQSerializer(
  jsonSerializers: Seq[JsonMessageSerializer],
  parent: Option[JsonMQSerializer] = None
) extends MQSerializer with JsonMessageSerializer {

  private val combinedJsonSerializers = {
    jsonSerializers.tail.foldLeft(jsonSerializers.head.serialize) {
      case (acc, serializer) =>
        acc.orElse(serializer.serialize)
    }
  }

  override val serialize: PartialFunction[Any, JsObject] = {
    parent match {
      case Some(p) => combinedJsonSerializers.orElse(p.serialize)
      case None => combinedJsonSerializers
    }
  }

  override def serializeMessage(message: Any): Array[Byte] = {
    serialize(message).compactPrint.getBytes(Global.charset)
  }

  def orElse(next: JsonMQSerializer): JsonMQSerializer =
    new JsonMQSerializer(jsonSerializers, Some(next))
} 
Example 101
Source File: GlobalMQDeserializerSpec.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.workflowexecutor.communication.mq.json

import java.nio.charset.StandardCharsets

import org.scalatest.mockito.MockitoSugar
import spray.json.{JsArray, JsObject, JsString}

import io.deepsense.commons.StandardSpec
import io.deepsense.models.workflows.Workflow
import io.deepsense.workflowexecutor.communication.message.global._
import io.deepsense.workflowexecutor.communication.mq.json.Global.GlobalMQDeserializer

class GlobalMQDeserializerSpec
  extends StandardSpec
  with MockitoSugar {

  "GlobalMQDeserializer" should {
    "deserialize Launch messages" in {
      val workflowId = Workflow.Id.randomId
      val nodesToExecute = Vector(Workflow.Id.randomId, Workflow.Id.randomId, Workflow.Id.randomId)
      val jsNodesToExecute = JsArray(nodesToExecute.map(id => JsString(id.toString)))

      val rawMessage = JsObject(
        "messageType" -> JsString("launch"),
        "messageBody" -> JsObject(
          "workflowId" -> JsString(workflowId.toString),
          "nodesToExecute" -> jsNodesToExecute
        )
      )

      val readMessage: Any = serializeAndRead(rawMessage)
      readMessage shouldBe Launch(workflowId, nodesToExecute.toSet)
    }

    "deserialize Heartbeat messages" in {
      val workflowId = "foo-workflow"
      val rawMessage = JsObject(
        "messageType" -> JsString("heartbeat"),
        "messageBody" -> JsObject(
          "workflowId" -> JsString(workflowId)))
      serializeAndRead(rawMessage) shouldBe Heartbeat(workflowId)
    }
    "deserialize PoisonPill messages" in {
      val rawMessage = JsObject(
        "messageType" -> JsString("poisonPill"),
        "messageBody" -> JsObject())
      serializeAndRead(rawMessage) shouldBe PoisonPill()
    }
    "deserialize Ready messages" in {
      val sessionId = "foo-session"
      val rawMessage = JsObject(
        "messageType" -> JsString("ready"),
        "messageBody" -> JsObject(
          "sessionId" -> JsString(sessionId)))
      serializeAndRead(rawMessage) shouldBe Ready(sessionId)
    }
  }

  private def serializeAndRead(
    rawMessage: JsObject): Any = {
    val bytes = rawMessage.compactPrint.getBytes(StandardCharsets.UTF_8)
    GlobalMQDeserializer.deserializeMessage(bytes)
  }
} 
Example 102
Source File: InnerWorkflow.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.params.custom

import spray.json.JsObject

import io.deepsense.deeplang.DOperation
import io.deepsense.deeplang.doperations.custom.{Sink, Source}
import io.deepsense.graph.DeeplangGraph.DeeplangNode
import io.deepsense.graph.{Node, DeeplangGraph}

case class InnerWorkflow(
   graph: DeeplangGraph,
   thirdPartyData: JsObject,
   publicParams: List[PublicParam] = List.empty) {

  require(findNodeOfType(Source.id).isDefined, "Inner workflow must have source node")
  require(findNodeOfType(Sink.id).isDefined, "Inner workflow must have sink node")

  val source: DeeplangNode = findNodeOfType(Source.id).get
  val sink: DeeplangNode = findNodeOfType(Sink.id).get

  private def findNodeOfType(operationId: DOperation.Id): Option[DeeplangNode] = {
    graph.nodes.find(_.value.id == operationId)
  }

}

object InnerWorkflow {
  val empty = InnerWorkflow(
    DeeplangGraph(Set(Node(Node.Id.randomId, Source()), Node(Node.Id.randomId, Sink()))),
    JsObject())
}

case class PublicParam(nodeId: Node.Id, paramName: String, publicName: String) 
Example 103
Source File: CustomTransformerFactory.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.utils

import spray.json.JsObject

import io.deepsense.deeplang.InnerWorkflowParser
import io.deepsense.deeplang.doperables.{CustomTransformer, ParamWithValues}
import io.deepsense.deeplang.params.custom.PublicParam

object CustomTransformerFactory {

  def createCustomTransformer(
      innerWorkflowParser: InnerWorkflowParser,
      innerWorkflowJson: JsObject): CustomTransformer = {
    val innerWorkflow = innerWorkflowParser.parse(innerWorkflowJson)
    val selectedParams: Seq[ParamWithValues[_]] =
        innerWorkflow.publicParams.flatMap {
      case PublicParam(nodeId, paramName, publicName) =>
        innerWorkflow.graph.nodes.find(_.id == nodeId)
          .flatMap(node => node.value.params.find(_.name == paramName)
          .map(p => {
            ParamWithValues(
              param = p.replicate(publicName),
              defaultValue = node.value.getDefault(p),
              setValue = node.value.get(p))
          }))
    }
    CustomTransformer(innerWorkflow, selectedParams)
  }
} 
Example 104
Source File: ParamsSerialization.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.serialization

import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue}

import io.deepsense.deeplang.catalogs.doperable.exceptions.NoParameterlessConstructorInClassException
import io.deepsense.deeplang.params.Params
import io.deepsense.deeplang.{ExecutionContext, TypeUtils}

trait ParamsSerialization {

  self: Params =>

  def saveObjectWithParams(ctx: ExecutionContext, path: String): Unit = {
    saveMetadata(ctx, path)
    saveParams(ctx, path)
  }

  def loadAndSetParams(ctx: ExecutionContext, path: String): this.type = {
    setParams(loadParams(ctx, path))
  }

  protected def saveMetadata(ctx: ExecutionContext, path: String) = {
    val metadataFilePath = ParamsSerialization.metadataFilePath(path)
    val metadataJson = JsObject(
      ParamsSerialization.classNameKey -> JsString(this.getClass.getName)
    )
    JsonObjectPersistence.saveJsonToFile(ctx, metadataFilePath, metadataJson)
  }

  protected def saveParams(ctx: ExecutionContext, path: String): Unit = {
    val paramsFilePath = ParamsSerialization.paramsFilePath(path)
    JsonObjectPersistence.saveJsonToFile(ctx, paramsFilePath, paramValuesToJson)
  }

  protected def loadParams(ctx: ExecutionContext, path: String): JsValue = {
    JsonObjectPersistence.loadJsonFromFile(ctx, ParamsSerialization.paramsFilePath(path))
  }

  private def setParams(paramsJson: JsValue): this.type = {
    this.set(paramPairsFromJson(paramsJson): _*)
  }
}

object ParamsSerialization {
  val classNameKey = "className"
  val paramsFileName = "params"
  val metadataFileName = "metadata"

  def load(ctx: ExecutionContext, path: String): Loadable = {
    import DefaultJsonProtocol._
    val metadataPath = metadataFilePath(path)
    val metadataJson: JsObject =
      JsonObjectPersistence.loadJsonFromFile(ctx, metadataPath).asJsObject
    val className = metadataJson.fields(classNameKey).convertTo[String]
    val clazz: Class[_] = Class.forName(className)
    val loadable = TypeUtils.createInstance(TypeUtils.constructorForClass(clazz)
        .getOrElse(throw new NoParameterlessConstructorInClassException(clazz.getCanonicalName))
    ).asInstanceOf[Loadable]
    loadable.load(ctx, path)
  }

  def metadataFilePath(path: String): String = {
    PathsUtils.combinePaths(path, metadataFileName)
  }

  def paramsFilePath(path: String): String = {
    PathsUtils.combinePaths(path, paramsFileName)
  }
} 
Example 105
Source File: AbstractParamSpec.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.params

import org.scalatest.mockito.MockitoSugar
import org.scalatest.{Matchers, WordSpec}
import spray.json.{JsObject, JsValue}

abstract class AbstractParamSpec[T, U <: Param[T]]
  extends WordSpec
  with Matchers
  with MockitoSugar {

  def className: String

  def paramFixture: (U, JsValue)  // param + its json description

  def valueFixture: (T, JsValue)  // value + its json description

  val defaultValue: T = valueFixture._1

  def serializeDefaultValue(default: T): JsValue = paramFixture._1.valueToJson(default)

  className should {
    "serialize itself to JSON" when {
      "default value is not provided" in {
        val (param, expectedJson) = paramFixture
        param.toJson(maybeDefault = None) shouldBe expectedJson
      }
      "default value is provided" in {
        val (param, expectedJson) = paramFixture
        val expectedJsonWithDefault = JsObject(
          expectedJson.asJsObject.fields + ("default" -> serializeDefaultValue(defaultValue))
        )
        param.toJson(maybeDefault = Some(defaultValue)) shouldBe expectedJsonWithDefault
      }
    }
  }

  it should {
    "serialize value to JSON" in {
      val param = paramFixture._1
      val (value, expectedJson) = valueFixture
      param.valueToJson(value) shouldBe expectedJson
    }
  }

  it should {
    "deserialize value from JSON" in {
      val param = paramFixture._1
      val (expectedValue, valueJson) = valueFixture
      val extractedValue = param.valueFromJson(valueJson)
      extractedValue shouldBe expectedValue
    }
  }
} 
Example 106
Source File: AbstractChoiceParamSpec.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.params.choice

import scala.reflect.runtime.universe._

import spray.json.{DeserializationException, JsObject}

import io.deepsense.deeplang.params.exceptions.NoArgumentConstructorRequiredException
import io.deepsense.deeplang.params.{AbstractParamSpec, Param}

abstract class AbstractChoiceParamSpec[T, U <: Param[T]] extends AbstractParamSpec[T, U] {

  protected def createChoiceParam[V <: Choice : TypeTag](
    name: String, description: String): Param[V]

  className should {
    "throw an exception when choices don't have no-arg constructor" in {
      a[NoArgumentConstructorRequiredException] should be thrownBy
        createChoiceParam[BaseChoice]("name", "description")
    }
    "throw an exception when unsupported choice is given" in {
      a[DeserializationException] should be thrownBy
        createChoiceParam[ChoiceABC]("name", "description").valueFromJson(
          JsObject(
            "unsupportedClass" -> JsObject()
          )
        )
    }
    "throw an exception when not all choices are declared" in {
      an[IllegalArgumentException] should be thrownBy
        createChoiceParam[ChoiceWithoutDeclaration]("name", "description")
    }
  }
} 
Example 107
Source File: FitSpec.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperations

import spray.json.{JsNumber, JsObject}

import io.deepsense.deeplang._
import io.deepsense.deeplang.doperables.Transformer
import io.deepsense.deeplang.doperables.dataframe.DataFrame
import io.deepsense.deeplang.doperations.MockDOperablesFactory._
import io.deepsense.deeplang.doperations.exceptions.TooManyPossibleTypesException
import io.deepsense.deeplang.exceptions.DeepLangMultiException
import io.deepsense.deeplang.inference.{InferContext, InferenceWarnings}
import io.deepsense.deeplang.params.ParamsMatchers._

class FitSpec extends UnitSpec with DeeplangTestSupport {

  "Fit" should {
    "fit input Estimator on input DataFrame with proper parameters set" in {
      val estimator = new MockEstimator

      def testFit(op: Fit, expectedTransformer: Transformer): Unit = {
        val Vector(outputTransformer: Transformer) =
          op.executeUntyped(Vector(estimator, mock[DataFrame]))(mock[ExecutionContext])
        outputTransformer shouldBe expectedTransformer
      }
      val op1 = Fit()
      testFit(op1, transformer1)

      val paramsForEstimator = JsObject(estimator.paramA.name -> JsNumber(2))
      val op2 = Fit().setEstimatorParams(paramsForEstimator)
      testFit(op2, transformer2)
    }
    "not modify params in input Estimator instance upon execution" in {
      val estimator = new MockEstimator
      val originalEstimator = estimator.replicate()

      val paramsForEstimator = JsObject(estimator.paramA.name -> JsNumber(2))
      val op = Fit().setEstimatorParams(paramsForEstimator)
      op.executeUntyped(Vector(estimator, mock[DataFrame]))(mock[ExecutionContext])

      estimator should have (theSameParamsAs (originalEstimator))
    }
    "infer Transformer from input Estimator on input DataFrame with proper parameters set" in {
      val estimator = new MockEstimator

      def testInference(op: Fit, expectedTransformerKnowledge: DKnowledge[Transformer]): Unit = {
        val inputDF = DataFrame.forInference(createSchema())
        val (knowledge, warnings) =
          op.inferKnowledgeUntyped(Vector(DKnowledge(estimator), DKnowledge(inputDF)))(mock[InferContext])
        // Currently, InferenceWarnings are always empty.
        warnings shouldBe InferenceWarnings.empty
        val Vector(transformerKnowledge) = knowledge
        transformerKnowledge shouldBe expectedTransformerKnowledge
      }
      val op1 = Fit()
      testInference(op1, transformerKnowledge1)

      val paramsForEstimator = JsObject(estimator.paramA.name -> JsNumber(2))
      val op2 = Fit().setEstimatorParams(paramsForEstimator)
      testInference(op2, transformerKnowledge2)
    }
    "not modify params in input Estimator instance upon inference" in {
      val estimator = new MockEstimator
      val originalEstimator = estimator.replicate()

      val paramsForEstimator = JsObject(estimator.paramA.name -> JsNumber(2))
      val op = Fit().setEstimatorParams(paramsForEstimator)
      val inputDF = DataFrame.forInference(createSchema())
      op.inferKnowledgeUntyped(Vector(DKnowledge(estimator), DKnowledge(inputDF)))(mock[InferContext])

      estimator should have (theSameParamsAs (originalEstimator))
    }
    "throw Exception" when {
      "there are more than one Estimator in input Knowledge" in {
        val inputDF = DataFrame.forInference(createSchema())
        val estimators = Set[DOperable](new MockEstimator, new MockEstimator)

        val op = Fit()
        a[TooManyPossibleTypesException] shouldBe thrownBy {
          op.inferKnowledgeUntyped(Vector(DKnowledge(estimators), DKnowledge(inputDF)))(mock[InferContext])
        }
      }
      "Estimator's dynamic parameters are invalid" in {
        val inputDF = DataFrame.forInference(createSchema())
        val estimator = new MockEstimator
        val fit = Fit().setEstimatorParams(JsObject(estimator.paramA.name -> JsNumber(-2)))
        a[DeepLangMultiException] shouldBe thrownBy {
          fit.inferKnowledgeUntyped(Vector(DKnowledge(estimator), DKnowledge(inputDF)))(mock[InferContext])
        }
      }
    }
  }
} 
Example 108
Source File: GridSearchSpec.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperations

import spray.json.{JsNumber, JsObject}

import io.deepsense.deeplang.doperables.dataframe.DataFrame
import io.deepsense.deeplang.doperables.report.Report
import io.deepsense.deeplang.doperations.MockDOperablesFactory.{MockEstimator, MockEvaluator}
import io.deepsense.deeplang.exceptions.DeepLangMultiException
import io.deepsense.deeplang.inference.{InferContext, InferenceWarnings}
import io.deepsense.deeplang.{DKnowledge, DeeplangTestSupport, UnitSpec}

class GridSearchSpec extends UnitSpec with DeeplangTestSupport {
  "GridSearch" should {
    "infer knowledge when dynamic parameters are valid" in {
      val inputDF = DataFrame.forInference(createSchema())
      val estimator = new MockEstimator
      val evaluator = new MockEvaluator

      val gridSearch = GridSearch()
      gridSearch.inferKnowledgeUntyped(
          Vector(DKnowledge(estimator), DKnowledge(inputDF), DKnowledge(evaluator)))(mock[InferContext]) shouldBe
        (Vector(DKnowledge(Report())), InferenceWarnings.empty)
    }
    "throw Exception" when {
      "Estimator's dynamic parameters are invalid" in {
        checkMultiException(Some(-2), None)
      }
      "Evaluator's dynamic parameters are invalid" in {
        checkMultiException(None, Some(-2))
      }
      "Both Estimator's and Evaluator's dynamic parameters are invalid" in {
        checkMultiException(Some(-2), Some(-2))
      }
    }
  }

  private def checkMultiException(
      estimatorParamValue: Option[Double],
      evaluatorParamValue: Option[Double]): Unit = {

    val inputDF = DataFrame.forInference(createSchema())
    val estimator = new MockEstimator
    val evaluator = new MockEvaluator

    val gridSearch = GridSearch()
      .setEstimatorParams(prepareParamDictionary(estimator.paramA.name, estimatorParamValue))
      .setEvaluatorParams(prepareParamDictionary(evaluator.paramA.name, evaluatorParamValue))

    val multiException = the [DeepLangMultiException] thrownBy {
      gridSearch.inferKnowledgeUntyped(
        Vector(
          DKnowledge(estimator),
          DKnowledge(inputDF),
          DKnowledge(evaluator)))(mock[InferContext])
    }

    val invalidParamCount =
      estimatorParamValue.map(_ => 1).getOrElse(0) +
      evaluatorParamValue.map(_ => 1).getOrElse(0)

    multiException.exceptions should have size invalidParamCount
  }

  private def prepareParamDictionary(paramName: String, maybeValue: Option[Double]): JsObject = {
    val jsonEntries = maybeValue.map(
        value => Seq(paramName -> JsNumber(value)))
      .getOrElse(Seq())
    JsObject(jsonEntries: _*)
  }
} 
Example 109
Source File: FitPlusTransformSpec.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperations

import spray.json.{JsNumber, JsObject}

import io.deepsense.deeplang.doperables.Transformer
import io.deepsense.deeplang.doperables.dataframe.DataFrame
import io.deepsense.deeplang.doperations.MockDOperablesFactory._
import io.deepsense.deeplang.doperations.exceptions.TooManyPossibleTypesException
import io.deepsense.deeplang.exceptions.DeepLangMultiException
import io.deepsense.deeplang.inference.InferContext
import io.deepsense.deeplang.{DKnowledge, DOperable, ExecutionContext, UnitSpec}

class FitPlusTransformSpec extends UnitSpec {

  "FitPlusTransform" when {
    "executed" should {
      "pass parameters to the input Estimator produce a Transformer and transformed DataFrame" in {
        val estimator = new MockEstimator
        val initialParametersValues = estimator.extractParamMap()
        val fpt = new FitPlusTransform

        def testExecute(
          op: FitPlusTransform,
          expectedDataFrame: DataFrame,
          expectedTransformer: Transformer): Unit = {
          val results = op.executeUntyped(Vector(estimator, mock[DataFrame]))(mock[ExecutionContext])
          val outputDataFrame = results(0).asInstanceOf[DataFrame]
          val outputTransformer = results(1).asInstanceOf[Transformer]

          outputDataFrame shouldBe expectedDataFrame
          outputTransformer shouldBe expectedTransformer
        }

        testExecute(fpt, transformedDataFrame1, transformer1)
        fpt.setEstimatorParams(JsObject(estimator.paramA.name -> JsNumber(2)))
        testExecute(fpt, transformedDataFrame2, transformer2)
        estimator.extractParamMap() shouldBe initialParametersValues
      }

    }
    "inferring knowledge" should {
      "take parameters from the input Estimator, infer Transformer and then a DataFrame" in {
        val estimator = new MockEstimator
        val initialParametersValues = estimator.extractParamMap()
        val fpt = new FitPlusTransform

        def testInference(
          op: FitPlusTransform,
          expectedDataFrameKnowledge: DKnowledge[DataFrame],
          expectedTransformerKnowledge: DKnowledge[Transformer]): Unit = {
          val (Vector(outputDataFrameKnowledge, outputTransformerKnowledge), _) =
            op.inferKnowledgeUntyped(Vector(DKnowledge(estimator), mock[DKnowledge[DataFrame]]))(mock[InferContext])

          outputDataFrameKnowledge shouldBe expectedDataFrameKnowledge
          outputTransformerKnowledge shouldBe expectedTransformerKnowledge
        }

        testInference(fpt, transformedDataFrameKnowledge1, transformerKnowledge1)
        fpt.setEstimatorParams(JsObject(estimator.paramA.name -> JsNumber(2)))
        testInference(fpt, transformedDataFrameKnowledge2, transformerKnowledge2)
        estimator.extractParamMap() shouldBe initialParametersValues
      }
      "throw exceptions" when {
        "input Estimator Knowledge consist more than one type" in {
          val estimators = Set[DOperable](new MockEstimator, new MockEstimator)
          val inputKnowledge: Vector[DKnowledge[DOperable]] =
            Vector(DKnowledge(estimators), mock[DKnowledge[DataFrame]])
          val fpt = new FitPlusTransform
          a[TooManyPossibleTypesException] shouldBe thrownBy {
            fpt.inferKnowledgeUntyped(inputKnowledge)(mock[InferContext])
          }
        }
        "Estimator's dynamic parameters are invalid" in {
          val estimator = new MockEstimator
          val inputKnowledge: Vector[DKnowledge[DOperable]] =
            Vector(DKnowledge(estimator), mock[DKnowledge[DataFrame]])
          val fpt = new FitPlusTransform
          fpt.setEstimatorParams(JsObject(estimator.paramA.name -> JsNumber(-2)))
          a[DeepLangMultiException] shouldBe thrownBy {
            fpt.inferKnowledgeUntyped(inputKnowledge)(mock[InferContext])
          }
        }
      }
    }
  }
} 
Example 110
Source File: InnerWorkflowTestFactory.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables

import spray.json.JsObject

import io.deepsense.deeplang.DOperation
import io.deepsense.deeplang.doperations.ConvertType
import io.deepsense.deeplang.doperations.custom.{Sink, Source}
import io.deepsense.deeplang.params.custom.{InnerWorkflow, PublicParam}
import io.deepsense.deeplang.params.selections.{MultipleColumnSelection, NameColumnSelection}
import io.deepsense.graph.{DeeplangGraph, Edge, Node}

object InnerWorkflowTestFactory {

  val sourceNodeId = "2603a7b5-aaa9-40ad-9598-23f234ec5c32"
  val sinkNodeId = "d7798d5e-b1c6-4027-873e-a6d653957418"
  val innerNodeId = "b22bd79e-337d-4223-b9ee-84c2526a1b75"

  val sourceNode = Node(sourceNodeId, Source())
  val sinkNode = Node(sinkNodeId, Sink())

  private def createInnerNodeOperation(targetType: TargetTypeChoice): ConvertType = {
    val params = TypeConverter()
      .setTargetType(targetType)
      .setSelectedColumns(MultipleColumnSelection(Vector(NameColumnSelection(Set("column1")))))
      .paramValuesToJson
    new ConvertType().setParamsFromJson(params)
  }

  private def createInnerNode(targetType: TargetTypeChoice): Node[DOperation] =
    Node(innerNodeId, createInnerNodeOperation(targetType))

  def simpleGraph(
    targetType: TargetTypeChoice = TargetTypeChoices.StringTargetTypeChoice()): DeeplangGraph = {
    val innerNode = createInnerNode(targetType)
    DeeplangGraph(
      Set(sourceNode, sinkNode, innerNode),
      Set(Edge(sourceNode, 0, innerNode, 0), Edge(innerNode, 0, sinkNode, 0)))
  }

  def simpleInnerWorkflow(publicParams: List[PublicParam] = List.empty): InnerWorkflow = {
    InnerWorkflow(simpleGraph(), JsObject(), publicParams)
  }
} 
Example 111
Source File: WorkflowProtocol.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.workflowexecutor.communication.mq.serialization.json

import spray.json.JsObject

import io.deepsense.models.json.graph.GraphJsonProtocol.GraphReader
import io.deepsense.models.workflows.{ExecutionReport, InferredState, WorkflowWithResults}
import io.deepsense.models.json.workflow.ExecutionReportJsonProtocol._
import io.deepsense.models.json.workflow.{InferredStateJsonProtocol, WorkflowJsonProtocol, WorkflowWithResultsJsonProtocol}
import io.deepsense.workflowexecutor.communication.message.workflow.AbortJsonProtocol._
import io.deepsense.workflowexecutor.communication.message.workflow.SynchronizeJsonProtocol._
import io.deepsense.workflowexecutor.communication.message.workflow._
import io.deepsense.workflowexecutor.communication.mq.json.Constants.MessagesTypes._
import io.deepsense.workflowexecutor.communication.mq.json.{DefaultJsonMessageDeserializer, DefaultJsonMessageSerializer, JsonMessageDeserializer, JsonMessageSerializer}

object WorkflowProtocol {
  val abort = "abort"
  val launch = "launch"
  val updateWorkflow = "updateWorkflow"
  val synchronize = "synchronize"

  object AbortDeserializer extends DefaultJsonMessageDeserializer[Abort](abort)

  object SynchronizeDeserializer extends DefaultJsonMessageDeserializer[Synchronize](synchronize)
  object SynchronizeSerializer extends DefaultJsonMessageSerializer[Synchronize](synchronize)

  case class UpdateWorkflowDeserializer(graphReader: GraphReader)
    extends JsonMessageDeserializer
    with UpdateWorkflowJsonProtocol {

    private val defaultDeserializer =
      new DefaultJsonMessageDeserializer[UpdateWorkflow](updateWorkflow)

    override def deserialize: PartialFunction[(String, JsObject), Any] =
      defaultDeserializer.deserialize
  }
} 
Example 112
Source File: GraphJsonTestSupport.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.models.json.graph

import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{Matchers, WordSpec}
import spray.json.{DefaultJsonProtocol, JsObject}

import io.deepsense.deeplang.DOperation
import io.deepsense.graph.Endpoint

trait GraphJsonTestSupport
  extends WordSpec
  with MockitoSugar
  with DefaultJsonProtocol
  with Matchers {

  def assertEndpointMatchesJsObject(edgeEnd: Endpoint, edgeEndJs: JsObject): Unit = {
    assert(edgeEndJs.fields("nodeId").convertTo[String] == edgeEnd.nodeId.value.toString)
    assert(edgeEndJs.fields("portIndex").convertTo[Int] == edgeEnd.portIndex)
  }

  def endpointMatchesJsObject(edgeEnd: Endpoint, edgeEndJs: JsObject): Boolean = {
    edgeEndJs.fields("nodeId").convertTo[String] == edgeEnd.nodeId.value.toString &&
    edgeEndJs.fields("portIndex").convertTo[Int] == edgeEnd.portIndex
  }

  def mockOperation(
      inArity: Int,
      outArity: Int,
      id: DOperation.Id,
      name: String): DOperation = {

    val dOperation = mock[DOperation]
    when(dOperation.inArity).thenReturn(inArity)
    when(dOperation.outArity).thenReturn(outArity)
    when(dOperation.id).thenReturn(id)
    when(dOperation.name).thenReturn(name)
    dOperation
  }
}