spray.json.JsNumber Scala Examples

The following examples show how to use spray.json.JsNumber. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: BasicTestPerformance4Ftp.scala    From ohara   with Apache License 2.0 6 votes vote down vote up
package oharastream.ohara.it.performance

import java.io.{BufferedWriter, OutputStreamWriter}
import java.util.concurrent.atomic.LongAdder

import oharastream.ohara.common.data.Row
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import org.junit.AssumptionViolatedException
import spray.json.{JsNumber, JsString, JsValue}

import scala.jdk.CollectionConverters._
import oharastream.ohara.client.filesystem.FileSystem

import scala.concurrent.duration.Duration

abstract class BasicTestPerformance4Ftp extends BasicTestPerformance {
  private[this] val ftpHostname = value(PerformanceTestingUtils.FTP_HOSTNAME_KEY)
    .getOrElse(throw new AssumptionViolatedException(s"${PerformanceTestingUtils.FTP_HOSTNAME_KEY} is required"))

  private[this] val ftpPort = value(PerformanceTestingUtils.FTP_PORT_KEY)
    .getOrElse(throw new AssumptionViolatedException(s"${PerformanceTestingUtils.FTP_PORT_KEY} is required"))
    .toInt

  private[this] val ftpUser = value(PerformanceTestingUtils.FTP_USER_KEY)
    .getOrElse(throw new AssumptionViolatedException(s"${PerformanceTestingUtils.FTP_USER_KEY} is required"))

  private[this] val ftpPassword = value(PerformanceTestingUtils.FTP_PASSWORD_KEY)
    .getOrElse(throw new AssumptionViolatedException(s"${PerformanceTestingUtils.FTP_PASSWORD_KEY} is required"))

  
  protected val ftpSettings: Map[String, JsValue] = Map(
    // convert the hostname to IP address
    oharastream.ohara.connector.ftp.FTP_HOSTNAME_KEY  -> JsString(ftpHostname),
    oharastream.ohara.connector.ftp.FTP_PORT_KEY      -> JsNumber(ftpPort),
    oharastream.ohara.connector.ftp.FTP_USER_NAME_KEY -> JsString(ftpUser),
    oharastream.ohara.connector.ftp.FTP_PASSWORD_KEY  -> JsString(ftpPassword)
  )

  private[this] val csvInputFolderKey       = PerformanceTestingUtils.CSV_INPUT_KEY
  private[this] val csvOutputFolder: String = value(csvInputFolderKey).getOrElse("/input")

  private[this] val cleanupTestDataKey   = PerformanceTestingUtils.DATA_CLEANUP_KEY
  protected val cleanupTestData: Boolean = value(cleanupTestDataKey).forall(_.toBoolean)

  protected def setupInputData(timeout: Duration): (String, Long, Long) = {
    val client = ftpClient()
    try {
      if (!PerformanceTestingUtils.exists(client, csvOutputFolder))
        PerformanceTestingUtils.createFolder(client, csvOutputFolder)

      val result = generateData(
        numberOfRowsToFlush,
        timeout,
        (rows: Seq[Row]) => {
          val file        = s"$csvOutputFolder/${CommonUtils.randomString()}"
          val writer      = new BufferedWriter(new OutputStreamWriter(client.create(file)))
          val count       = new LongAdder()
          val sizeInBytes = new LongAdder()

          try {
            val cellNames: Set[String] = rows.head.cells().asScala.map(_.name).toSet
            writer
              .append(cellNames.mkString(","))
              .append("\n")
            rows.foreach(row => {
              val content = row.cells().asScala.map(_.value).mkString(",")
              count.increment()
              sizeInBytes.add(content.length)
              writer.append(content).append("\n")
            })
            (count.longValue(), sizeInBytes.longValue())
          } finally Releasable.close(writer)
        }
      )
      (csvOutputFolder, result._1, result._2)
    } finally Releasable.close(client)
  }

  protected[this] def ftpClient() =
    FileSystem.ftpBuilder
      .hostname(ftpHostname)
      .port(ftpPort)
      .user(ftpUser)
      .password(ftpPassword)
      .build
} 
Example 2
Source File: ConstantGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.primary

import be.cetic.tsimulus.generators.Generator
import be.cetic.tsimulus.timeseries.primary.ConstantTimeSeries
import spray.json.{JsNumber, JsString, JsValue, _}


class ConstantGenerator(name: Option[String],
                        val value: Double) extends Generator[Double](name, "constant")
{
   override def timeseries(generators: String => Generator[Any]) = ConstantTimeSeries(value)

   override def toString = "Constant(" + name + ", " + value + ")"

   override def equals(o: Any) = o match {
      case that: ConstantGenerator => that.name == this.name && that.value == this.value
      case _ => false
   }

   override def toJson: JsValue = {
      val t = Map(
         "type" -> `type`.toJson,
         "value" -> value.toJson
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object ConstantGenerator
{
   def apply(json: JsValue): ConstantGenerator = {

      val fields = json.asJsObject.fields
      val name = fields.get("name").map
      {
         case JsString(x) => x
      }

      val value = fields("value") match {
         case JsNumber(n) => n.toDouble
      }

      new ConstantGenerator(name, value)
   }
} 
Example 3
Source File: LListFormatSpec.scala    From sjson-new   with Apache License 2.0 5 votes vote down vote up
package sjsonnew
package support.spray

import org.specs2.mutable._
import java.util.Arrays
import spray.json.{ JsArray, JsNumber, JsString, JsObject }

class LListFormatsSpec extends Specification with BasicJsonProtocol {

  "The llistFormat" should {
    val empty = LNil
    val emptyObject = JsObject()
    val list = ("Z", 2) :*: ("a", 1) :*: LNil
    val obj = JsObject("$fields" -> JsArray(JsString("Z"), JsString("a")), "Z" -> JsNumber(2), "a" -> JsNumber(1))
    val nested = ("b", list) :*: LNil
    val nestedObj = JsObject("$fields" -> JsArray(JsString("b")), "b" -> obj)
    "convert an empty list to JObject" in {
      Converter.toJsonUnsafe(empty) mustEqual emptyObject
    }
    "convert a list to JObject" in {
      Converter.toJsonUnsafe(list) mustEqual obj
    }
    "convert a nested list to JObject" in {
      Converter.toJsonUnsafe(nested) mustEqual nestedObj
    }
    "convert a JObject to list" in {
      Converter.fromJsonUnsafe[Int :*: Int :*: LNil](obj) mustEqual list
    }
    "convert a nested JObject to list" in {
      Converter.fromJsonUnsafe[(Int :*: Int :*: LNil) :*: LNil](nestedObj) mustEqual nested
    }

    val obj2 = JsObject("$fields" -> JsArray(JsString("f")), "f" -> JsString("foo"))
    val nested2Obj = JsObject("$fields" -> JsArray(JsString("b"), JsString("c")), "b" -> obj, "c" -> obj2)

    val list2 = ("f", "foo") :*: LNil
    val nested2 = ("b", list) :*: ("c", list2) :*: LNil

    "convert a 2 nested JObjects to list" in {
      Converter.fromJsonUnsafe[(Int :*: Int :*: LNil) :*: (String :*: LNil) :*: LNil](nested2Obj) mustEqual nested2
    }
  }
} 
Example 4
Source File: JavaPrimitiveSpec.scala    From sjson-new   with Apache License 2.0 5 votes vote down vote up
package sjsonnew
package support.spray

import spray.json.{ JsValue, JsNumber, JsString, JsNull, JsTrue, JsFalse, JsObject }
import org.specs2.mutable._
import java.lang.{ Integer => JInteger, Long => JLong, Boolean => JBoolean,
  Float => JFloat, Double => JDouble, Byte => JByte, Short => JShort,
  Character => JCharacter }

class JavaPrimitiveFormatsSpec extends Specification with BasicJsonProtocol {
  "The JIntegerJsonFormat" should {
    "convert an JInteger to a JsNumber" in {
      Converter.toJsonUnsafe[JInteger](42: JInteger) mustEqual JsNumber(42)
    }
    "convert a JsNumber to an Int" in {
      Converter.fromJsonUnsafe[JInteger](JsNumber(42)) mustEqual (42: JInteger)
    }
  }

  "The JLongJsonFormat" should {
    "convert a JLong to a JsNumber" in {
      Converter.toJsonUnsafe[JLong](7563661897011259335L: JLong) mustEqual JsNumber(7563661897011259335L)
    }
    "convert a JsNumber to a JLong" in {
      Converter.fromJsonUnsafe[JLong](JsNumber(7563661897011259335L)) mustEqual (7563661897011259335L: JLong)
    }
  }

  "The JFloatJsonFormat" should {
    "convert a JFloat to a JsNumber" in {
      Converter.toJsonUnsafe[JFloat](4.2f: JFloat) mustEqual JsNumber(4.2f)
    }
    "convert a JsNumber to a JFloat" in {
      Converter.fromJsonUnsafe[JFloat](JsNumber(4.2f)) mustEqual (4.2f: JFloat)
    }
  }

  "The JDoubleJsonFormat" should {
    "convert a JDouble to a JsNumber" in {
      Converter.toJsonUnsafe[JDouble](4.2: JDouble) mustEqual JsNumber(4.2)
    }
    "convert a JsNumber to a JDouble" in {
      Converter.fromJsonUnsafe[JDouble](JsNumber(4.2)) mustEqual (4.2: JDouble)
    }
  }

  "The JByteJsonFormat" should {
    "convert a JByte to a JsNumber" in {
      Converter.toJsonUnsafe[JByte](42.toByte: JByte) mustEqual JsNumber(42)
    }
    "convert a JsNumber to a JByte" in {
      Converter.fromJsonUnsafe[JByte](JsNumber(42)) mustEqual (42.toByte: JByte)
    }
  }

  "The JShortJsonFormat" should {
    "convert a JShort to a JsNumber" in {
      Converter.toJsonUnsafe(42.toShort: JShort) mustEqual JsNumber(42)
    }
    "convert a JsNumber to a JShort" in {
      Converter.fromJsonUnsafe[JShort](JsNumber(42)) mustEqual (42.toShort: JShort)
    }
  }

  "The JBooleanJsonFormat" should {
    "convert true to a JsTrue" in { Converter.toJsonUnsafe[JBoolean](true: JBoolean) mustEqual JsTrue }
    "convert false to a JsFalse" in { Converter.toJsonUnsafe[JBoolean](false: JBoolean) mustEqual JsFalse }
    "convert a JsTrue to true" in { Converter.fromJsonUnsafe[JBoolean](JsTrue) mustEqual true }
    "convert a JsFalse to false" in { Converter.fromJsonUnsafe[JBoolean](JsFalse) mustEqual false }
  }

  "The JCharacterJsonFormat" should {
    "convert a JCharacter to a JsString" in {
      Converter.toJsonUnsafe[JCharacter]('c': JCharacter) mustEqual JsString("c")
    }
    "convert a JsString to a JCharacter" in {
      Converter.fromJsonUnsafe[JCharacter](JsString("c")) mustEqual ('c': JCharacter)
    }
  }
} 
Example 5
Source File: JavaExtraFormatsSpec.scala    From sjson-new   with Apache License 2.0 5 votes vote down vote up
package sjsonnew
package support.spray

import spray.json.{ JsValue, JsNumber, JsString, JsNull, JsTrue, JsFalse, JsObject }
import org.specs2.mutable._
import java.util.{ UUID, Optional }
import java.net.{ URI, URL }
import java.io.File

class JavaExtraFormatsSpec extends Specification with BasicJsonProtocol {
  case class Person(name: Optional[String], value: Optional[Int])
  implicit object PersonFormat extends JsonFormat[Person] {
    def write[J](x: Person, builder: Builder[J]): Unit = {
      builder.beginObject()
      builder.addField("name", x.name)
      builder.addField("value", x.value)
      builder.endObject()
    }
    def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Person =
      jsOpt match {
        case Some(js) =>
          unbuilder.beginObject(js)
          val name = unbuilder.readField[Optional[String]]("name")
          val value = unbuilder.readField[Optional[Int]]("value")
          unbuilder.endObject()
          Person(name, value)
        case None =>
          deserializationError("Expected JsObject but found None")
      }
  }

  "The uuidStringIso" should {
    val uuid = UUID.fromString("abc220ea-2a01-11e6-b67b-9e71128cae77")
    "convert a UUID to JsString" in {
      Converter.toJsonUnsafe(uuid) mustEqual JsString("abc220ea-2a01-11e6-b67b-9e71128cae77")
    }
    "convert the JsString back to the UUID" in {
      Converter.fromJsonUnsafe[UUID](JsString("abc220ea-2a01-11e6-b67b-9e71128cae77")) mustEqual uuid
    }
  }

  "The uriStringIso" should {
    val uri = new URI("http://localhost")
    "convert a URI to JsString" in {
      Converter.toJsonUnsafe(uri) mustEqual JsString("http://localhost")
    }
    "convert the JsString back to the URI" in {
      Converter.fromJsonUnsafe[URI](JsString("http://localhost")) mustEqual uri
    }
  }

  "The urlStringIso" should {
    val url = new URL("http://localhost")
    "convert a URL to JsString" in {
      Converter.toJsonUnsafe(url) mustEqual JsString("http://localhost")
    }
    "convert the JsString back to the URI" in {
      Converter.fromJsonUnsafe[URL](JsString("http://localhost")) mustEqual url
    }
  }

  "The fileStringIso" should {
    val f = new File("/tmp")
    val f2 = new File(new File("src"), "main")
    "convert a File to JsString" in {
      Converter.toJsonUnsafe(f) mustEqual JsString("file:///tmp/")
    }
    "convert a relative path to JsString" in {
      // https://tools.ietf.org/html/rfc3986#section-4.2
      Converter.toJsonUnsafe(f2) mustEqual JsString("src/main")
    }
    "convert the JsString back to the File" in {
      Converter.fromJsonUnsafe[File](JsString("file:///tmp/")) mustEqual f
    }
    "convert the JsString back to the relative path" in {
      Converter.fromJsonUnsafe[File](JsString("src/main")) mustEqual f2
    }
  }

  "The optionalFormat" should {
    "convert Optional.empty to JsNull" in {
      Converter.toJsonUnsafe(Optional.empty[Int]) mustEqual JsNull
    }
    "convert JsNull to None" in {
      Converter.fromJsonUnsafe[Optional[Int]](JsNull) mustEqual Optional.empty[Int]
    }
    "convert Some(Hello) to JsString(Hello)" in {
      Converter.toJsonUnsafe(Optional.of("Hello")) mustEqual JsString("Hello")
    }
    "convert JsString(Hello) to Some(Hello)" in {
      Converter.fromJsonUnsafe[Optional[String]](JsString("Hello")) mustEqual Optional.of("Hello")
    }
    "omit None fields" in {
      Converter.toJsonUnsafe(Person(Optional.empty[String], Optional.empty[Int])) mustEqual JsObject()
    }
  }
} 
Example 6
Source File: BuilderSpec.scala    From sjson-new   with Apache License 2.0 5 votes vote down vote up
package sjsonnew
package support.spray

import org.specs2.mutable._
import java.util.Arrays
import spray.json.{ JsArray, JsNumber, JsString, JsObject }
import LList._

class BuilderSpec extends Specification with BasicJsonProtocol {
  case class Person(name: String, value: Int)
  implicit object PersonFormat extends JsonFormat[Person] {
    def write[J](x: Person, builder: Builder[J]): Unit = {
      builder.beginObject()
      builder.addField("name", x.name)
      builder.addField("value", x.value)
      builder.endObject()
    }
    def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Person =
      jsOpt match {
        case Some(js) =>
          unbuilder.beginObject(js)
          val name = unbuilder.readField[String]("name")
          val value = unbuilder.readField[Int]("value")
          unbuilder.endObject()
          Person(name, value)
        case None =>
          deserializationError("Expected JsObject but found None")
      }
  }

  "Custom format using builder" should {
    val p1 = Person("Alice", 1)
    val personJs = JsObject("name" -> JsString("Alice"), "value" -> JsNumber(1))
    "convert from value to JObject" in {
      Converter.toJsonUnsafe(p1) mustEqual personJs
    }
    "convert from JObject to the same value" in {
      Converter.fromJsonUnsafe[Person](personJs) mustEqual p1
    }
  }
} 
Example 7
Source File: IsoLListFormatSpec.scala    From sjson-new   with Apache License 2.0 5 votes vote down vote up
package sjsonnew
package support.spray

import spray.json.{ JsArray, JsNumber, JsString, JsObject }
import org.specs2.mutable.Specification

class IsoLListFormatSpec extends Specification with BasicJsonProtocol {
  sealed trait Contact
  case class Person(name: String, value: Option[Int]) extends Contact
  case class Organization(name: String, value: Option[Int]) extends Contact

  implicit val personIso: IsoLList.Aux[Person, String :*: Option[Int] :*: LNil] = LList.isoCurried(
    { p: Person => ("name", p.name) :*: ("value", p.value) :*: LNil })
    { in => Person(
      in.find[String]("name").get,
      in.find[Option[Int]]("value").flatten) }

  implicit val organizationIso: IsoLList.Aux[Organization, String :*: Option[Int] :*: LNil] = LList.isoCurried(
    { o: Organization => ("name", o.name) :*: ("value", o.value) :*: LNil })
    { in => Organization(
      in.find[String]("name").get,
      in.find[Option[Int]]("value").flatten) }

  implicit val ContactFormat: JsonFormat[Contact] = flatUnionFormat2[Contact, Person, Organization]("$type")

  val p1 = Person("Alice", Some(1))
  val personJs = JsObject("$fields" -> JsArray(JsString("name"), JsString("value")),
    "name" -> JsString("Alice"), "value" -> JsNumber(1))
  val c1: Contact = Organization("Company", None)
  val contactJs =
    JsObject(
      "$type" -> JsString("Organization"),
      "$fields" -> JsArray(JsString("name"), JsString("value")),
      "name" -> JsString("Company")
    )
  "The isomorphism from a custom type to LList" should {
    "convert from value to JObject" in {
      Converter.toJsonUnsafe(p1) mustEqual personJs
    }
    "convert from JObject to the same value" in {
      Converter.fromJsonUnsafe[Person](personJs) mustEqual p1
    }
    "convert from a union value to JObject" in {
      Converter.toJsonUnsafe(c1) mustEqual contactJs
    }
  }
} 
Example 8
Source File: TupleFormatsSpec.scala    From sjson-new   with Apache License 2.0 5 votes vote down vote up
package sjsonnew
package support.spray

import spray.json.{ JsValue, JsNumber, JsString, JsNull, JsTrue, JsFalse, JsArray }
import org.specs2.mutable._
import scala.Right

class TupleFormatsSpec extends Specification with BasicJsonProtocol {

  "The tuple1Format" should {
    "convert (42) to a JsNumber" in {
      Converter.toJsonUnsafe(Tuple1(42)) mustEqual JsArray(JsNumber(42))
    }
    "be able to convert a JsNumber to a Tuple1[Int]" in {
      Converter.fromJsonUnsafe[Tuple1[Int]](JsArray(JsNumber(42))) mustEqual Tuple1(42)
    }
  }

  "The tuple2Format" should {
    val json = JsArray(JsNumber(42), JsNumber(4.2))
    "convert (42, 4.2) to a JsArray" in {
      Converter.toJsonUnsafe((42, 4.2)) mustEqual json
    }
    "be able to convert a JsArray to a (Int, Double)]" in {
      Converter.fromJsonUnsafe[(Int, Double)](json) mustEqual (42, 4.2)
    }
  }

  "The tuple3Format" should {
    val json = JsArray(JsNumber(42), JsNumber(4.2), JsString("hello"))
    "convert (42, 4.2, \"hello\") to a JsArray" in {
      Converter.toJsonUnsafe((42, 4.2, "hello")) mustEqual json
    }
    "be able to convert a JsArray to a (Int, Double, Int)]" in {
      Converter.fromJsonUnsafe[(Int, Double, String)](json) mustEqual (42, 4.2, "hello")
    }
  }

} 
Example 9
Source File: UnionFormatSpec.scala    From sjson-new   with Apache License 2.0 5 votes vote down vote up
package sjsonnew
package support.spray

import org.specs2.mutable._
import java.util.Arrays
import spray.json.{ JsArray, JsNumber, JsString, JsObject }
import LList._

class UnionFormatsSpec extends Specification with BasicJsonProtocol {
  sealed trait Fruit
  case class Apple() extends Fruit
  sealed trait Citrus extends Fruit
  case class Orange() extends Citrus
  implicit object AppleJsonFormat extends JsonFormat[Apple] {
    def write[J](x: Apple, builder: Builder[J]): Unit =
      {
        builder.beginObject()
        builder.addField("x", 0)
        builder.endObject()
      }
    def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Apple =
      jsOpt match {
        case Some(js) =>
          val result = unbuilder.beginObject(js) match {
            case 1 =>
              val x = unbuilder.readField[Int]("x")
              if (x == 0) Apple()
              else deserializationError(s"Unexpected value: $x")
            case x => deserializationError(s"Unexpected number of fields: $x")
          }
          unbuilder.endObject()
          result
        case None => deserializationError("Expected JsNumber but found None")
      }
  }
  implicit object OrangeJsonFormat extends JsonFormat[Orange] {
    def write[J](x: Orange, builder: Builder[J]): Unit =
      {
        builder.beginObject()
        builder.addField("x", 1)
        builder.endObject()
      }
    def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Orange =
      jsOpt match {
        case Some(js) =>
          val result = unbuilder.beginObject(js) match {
            case 1 =>
              val x = unbuilder.readField[Int]("x")
              if (x == 1) Orange()
              else deserializationError(s"Unexpected value: $x")
            case x => deserializationError(s"Unexpected number of fields: $x")
          }
          unbuilder.endObject()
          result
        case None => deserializationError("Expected JsNumber but found None")
      }
  }
  val fruit: Fruit = Apple()
  "The unionFormat" should {
    implicit val FruitFormat: JsonFormat[Fruit] = unionFormat2[Fruit, Apple, Orange]
    val fruitJson = JsObject("value" ->  JsObject("x" -> JsNumber(0)), "type" -> JsString("Apple"))
    "convert a value of ADT to JObject" in {
      Converter.toJsonUnsafe(fruit) mustEqual fruitJson
    }
    "convert JObject back to ADT" in {
      Converter.fromJsonUnsafe[Fruit](fruitJson) mustEqual fruit
    }
  }

  "The flatUnionFormat" should {
    implicit val FruitFormat: JsonFormat[Fruit] = flatUnionFormat2[Fruit, Apple, Orange]("type")
    val fruitJson2 = JsObject("type" -> JsString("Apple"), "x" -> JsNumber(0))
    "convert a value of ADT to JObject" in {
      Converter.toJsonUnsafe(fruit) mustEqual fruitJson2
    }
    "convert JObject back to ADT" in {
      // println(Converter.fromJsonUnsafe[Fruit](fruitJson2))
      Converter.fromJsonUnsafe[Fruit](fruitJson2) mustEqual fruit
    }
  }
} 
Example 10
Source File: Subnet_Parameter_List_UT.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model.resource

import com.monsanto.arch.cloudformation.model._
import org.scalatest.{FunSpec, Matchers}
import spray.json.{JsNumber, JsString, _}

class Subnet_Parameter_List_UT extends FunSpec with Matchers {
  describe("AWS::EC2::Subnet_Parameter_List") {

    it("should serialize into valid json") {
      val subnetListParam = `AWS::EC2::Subnet_Parameter_List`("subnets", "Select subnets where the RDS instances should be created")
      val expectedJson = JsObject(
        "subnets" -> JsObject(
          "Description" -> JsString("Select subnets where the RDS instances should be created"),
          "Type" -> JsString("List<AWS::EC2::Subnet::Id>")
        )
      )
      Seq[Parameter](subnetListParam).toJson should be (expectedJson)
    }

    it("should serialize into valid json as InputParameter") {
      val subnetListParam = `AWS::EC2::Subnet_Parameter_List`("subnets", "Select subnets where the RDS instances should be created")
      val expectedJson = JsObject(
        "ParameterKey" -> JsString("subnets"),
        "ParameterValue" -> JsString("")
      )
      val inputParam = InputParameter.templateParameterToInputParameter(Some(Seq(subnetListParam)))
      inputParam.get(0).toJson should be (expectedJson)
    }

    it("can be passed as ParameterRef to AWS::RDS::DBSubnetGroup") {
      val subnetListParam = `AWS::EC2::Subnet_Parameter_List`("subnets", "Select subnets where the RDS instances should be created")
      val dbSubnetGroup = `AWS::RDS::DBSubnetGroup`(
        name = "dbSubnetGroup",
        DBSubnetGroupDescription = "DB subnet group",
        SubnetIds = ParameterRef(subnetListParam)
      )
      val expected = JsObject(
        "dbSubnetGroup" -> JsObject(
          "Type" -> JsString("AWS::RDS::DBSubnetGroup"),
          "Properties" -> JsObject(
            "DBSubnetGroupDescription" -> JsString("DB subnet group"),
            "SubnetIds" -> JsObject("Ref" -> JsString("subnets"))
          )
        )
      )
      Seq[Resource[_]](dbSubnetGroup).toJson should be (expected)
    }
  }
} 
Example 11
Source File: Diagram.scala    From pnp   with Apache License 2.0 5 votes vote down vote up
package org.allenai.dqa.labeling

import scala.io.Source

import spray.json.DefaultJsonProtocol._
import spray.json.JsArray
import spray.json.JsNumber
import spray.json.JsObject
import spray.json.deserializationError
import spray.json.pimpString
import scala.util.Random


case class DiagramLabel(diagramType: String, partLabels: Vector[String])

object Diagram {
  
  def fromJsonFile(filename: String, features: Map[String, DiagramFeatures]
    ): Array[(Diagram, DiagramLabel)] = {
    val lines = Source.fromFile(filename).getLines
    lines.map(fromJsonLine(_, features)).toArray
  }

  def fromJsonLine(line: String, features: Map[String, DiagramFeatures]
    ): (Diagram, DiagramLabel) = {
    val js = line.parseJson.asJsObject
    val diagramLabel = js.fields("label").convertTo[String]
    val diagramId = js.fields("id").convertTo[String]
    val imageId = js.fields("imageId").convertTo[String]
    val width = js.fields("width").convertTo[Int]
    val height = js.fields("height").convertTo[Int]
    
    // val pointJsons = Random.shuffle(js.fields("points").asInstanceOf[JsArray].elements)
    val pointJsons = js.fields("points").asInstanceOf[JsArray].elements

    val labeledParts = for {
      (pointJson, i) <- pointJsons.zipWithIndex
      p = pointJson.asJsObject
      id = p.fields("textId").convertTo[String]
      label = p.fields("label").convertTo[String]
      xy = p.fields("xy") match {
        case JsArray(Vector(JsNumber(x), JsNumber(y))) => Point(x.toInt, y.toInt)
        case _ => deserializationError("Array of x/y coordinates expected")
      }
    } yield {
      (Part(id, i, xy),  label)
    }

    val f = features(imageId)

    (Diagram(diagramId, imageId, width, height, labeledParts.map(_._1), f),
        (DiagramLabel(diagramLabel, labeledParts.map(_._2))))
  }
} 
Example 12
Source File: Metrics.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.client

import java.util.concurrent.TimeUnit

import akka.actor.Actor
import cool.graph.cuid.Cuid
import cool.graph.shared.externalServices.KinesisPublisher
import org.joda.time.DateTime
import org.joda.time.format.DateTimeFormat
import spray.json.{JsArray, JsBoolean, JsNumber, JsObject, JsString}

import scala.collection.mutable
import scala.concurrent.duration.FiniteDuration
import scala.util.control.NonFatal

object FeatureMetric extends Enumeration {
  type FeatureMetric = Value
  val Subscriptions           = Value("backend/api/subscriptions")
  val Filter                  = Value("backend/feature/filter")
  val NestedMutations         = Value("backend/feature/nested-mutation")
  val ApiSimple               = Value("backend/api/simple")
  val ApiRelay                = Value("backend/api/relay")
  val ApiFiles                = Value("backend/api/files")
  val ServersideSubscriptions = Value("backend/feature/sss")
  val RequestPipeline         = Value("backend/feature/rp") // add this!
  val PermissionQuery         = Value("backend/feature/permission-queries") // add this!
  val Authentication          = Value("backend/feature/authentication")
  val Algolia                 = Value("backend/feature/algolia") // add this!
  val Auth0                   = Value("backend/feature/integration-auth0")
  val Digits                  = Value("backend/feature/integration-digits")
}

case class ApiFeatureMetric(ip: String,
                            date: DateTime,
                            projectId: String,
                            clientId: String,
                            usedFeatures: List[String],
                            // Should be false when we can't determine. This is the case for subscriptions.
                            // Is always false for File api.
                            isFromConsole: Boolean)

class FeatureMetricActor(
    metricsPublisher: KinesisPublisher,
    interval: Int
) extends Actor {
  import context.dispatcher

  val metrics = mutable.Buffer.empty[ApiFeatureMetric]
  val FLUSH   = "FLUSH"
  val tick = context.system.scheduler.schedule(
    initialDelay = FiniteDuration(interval, TimeUnit.SECONDS),
    interval = FiniteDuration(interval, TimeUnit.SECONDS),
    receiver = self,
    message = FLUSH
  )

  override def postStop() = tick.cancel()

  def receive = {
    case metric: ApiFeatureMetric =>
      metrics += metric

    case FLUSH =>
      flushMetrics()
  }

  def flushMetrics() = {
    val byProject = metrics.groupBy(_.projectId) map {
      case (projectId, metrics) =>
        JsObject(
          "requestCount"        -> JsNumber(metrics.length),
          "projectId"           -> JsString(projectId),
          "usedIps"             -> JsArray(metrics.map(_.ip).distinct.take(10).toVector.map(JsString(_))),
          "features"            -> JsArray(metrics.flatMap(_.usedFeatures).distinct.toVector.map(JsString(_))),
          "date"                -> JsString(metrics.head.date.toString(DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z").withZoneUTC())),
          "version"             -> JsString("1"),
          "justConsoleRequests" -> JsBoolean(metrics.forall(_.isFromConsole))
        )
    }

    byProject.foreach { json =>
      try {
        metricsPublisher.putRecord(json.toString, shardId = Cuid.createCuid())
      } catch {
        case NonFatal(e) => println(s"Putting kinesis FeatureMetric failed: ${e.getMessage} ${e.toString}")
      }
    }
    metrics.clear()
  }
} 
Example 13
Source File: StandardFormatsSpec.scala    From sjson-new   with Apache License 2.0 5 votes vote down vote up
package sjsonnew
package support.spray

import spray.json.{ JsValue, JsNumber, JsString, JsNull, JsTrue, JsFalse, JsObject }
import org.specs2.mutable._
import scala.Right

class StandardFormatsSpec extends Specification with BasicJsonProtocol {
  case class Person(name: Option[String], value: Option[Int])
  implicit object PersonFormat extends JsonFormat[Person] {
    def write[J](x: Person, builder: Builder[J]): Unit = {
      builder.beginObject()
      builder.addField("name", x.name)
      builder.addField("value", x.value)
      builder.endObject()
    }
    def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Person =
      jsOpt match {
        case Some(js) =>
          unbuilder.beginObject(js)
          val name = unbuilder.readField[Option[String]]("name")
          val value = unbuilder.readField[Option[Int]]("value")
          unbuilder.endObject()
          Person(name, value)
        case None =>
          deserializationError("Expected JsObject but found None")
      }
  }

  "The optionFormat" should {
    "convert None to JsNull" in {
      Converter.toJsonUnsafe(None.asInstanceOf[Option[Int]]) mustEqual JsNull
    }
    "convert JsNull to None" in {
      Converter.fromJsonUnsafe[Option[Int]](JsNull) mustEqual None
    }
    "convert Some(Hello) to JsString(Hello)" in {
      Converter.toJsonUnsafe(Some("Hello").asInstanceOf[Option[String]]) mustEqual JsString("Hello")
    }
    "convert JsString(Hello) to Some(Hello)" in {
      Converter.fromJsonUnsafe[Option[String]](JsString("Hello")) mustEqual Some("Hello")
    }
    "omit None fields" in {
      Converter.toJsonUnsafe(Person(None, None)) mustEqual JsObject()
    }
  }

  "The eitherFormat" should {
    val a: Either[Int, String] = Left(42)
    val b: Either[Int, String] = Right("Hello")

    "convert the left side of an Either value to Json" in {
      Converter.toJsonUnsafe(a) mustEqual JsNumber(42)
    }
    "convert the right side of an Either value to Json" in {
      Converter.toJsonUnsafe(b) mustEqual JsString("Hello")
    }
    "convert the left side of an Either value from Json" in {
      Converter.fromJsonUnsafe[Either[Int, String]](JsNumber(42)) mustEqual Left(42)
    }
    "convert the right side of an Either value from Json" in {
      Converter.fromJsonUnsafe[Either[Int, String]](JsString("Hello")) mustEqual Right("Hello")
    }
  }
} 
Example 14
Source File: WeeklyGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.primary


import java.security.InvalidParameterException

import be.cetic.tsimulus.generators.Generator
import be.cetic.tsimulus.timeseries.primary.WeeklyTimeSeries
import org.joda.time.DateTimeConstants
import spray.json.{JsNumber, JsObject, JsString, JsValue, _}


class WeeklyGenerator(name: Option[String],
                      val points: Map[String, Double]) extends Generator[Double](name, "weekly")
{
   override def timeseries(generators: String => Generator[Any]) =
   {
      def day = (s: String) => s match {
         case "monday" => DateTimeConstants.MONDAY
         case "tuesday" => DateTimeConstants.TUESDAY
         case "wednesday" => DateTimeConstants.WEDNESDAY
         case "thursday" => DateTimeConstants.THURSDAY
         case "friday" => DateTimeConstants.FRIDAY
         case "saturday" => DateTimeConstants.SATURDAY
         case "sunday" => DateTimeConstants.SUNDAY
         case _ => throw new InvalidParameterException(s"'${s}' is not a valid day name.")
      }

      WeeklyTimeSeries(points map {case (k,v) => (day(k), v)})
   }

   override def toString = "WeeklyGenerator(" + name + "," + points + ")"

   override def equals(o: Any) = o match {
      case that: WeeklyGenerator => that.name == this.name && that.points == this.points
      case _ => false
   }

   override def toJson: JsValue = {
      val t = Map(
         "type" -> `type`.toJson,
         "points" -> points.toJson
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object WeeklyGenerator
{
   def apply(value: JsValue): WeeklyGenerator = {
      val fields = value.asJsObject.fields

      val name = fields.get("name").map
      {
         case JsString(x) => x
      }

      val points = value.asJsObject.fields("points") match {
         case JsObject(x) => x
         case _ => throw new ClassCastException
      }

      val r = points map { case (k,v) => (k, v match { case JsNumber(x) => x.toDouble })}

      val validDayNames = List("monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday")
      val unmatchingDayNames = r.keySet.filterNot(validDayNames contains _)
      if(!unmatchingDayNames.isEmpty) throw new InvalidParameterException("The following day names are not valid: " + unmatchingDayNames)

      new WeeklyGenerator(name, r)
   }
} 
Example 15
Source File: FunctionGenerator.scala    From TSimulus   with Apache License 2.0 5 votes vote down vote up
package be.cetic.tsimulus.generators.composite

import be.cetic.tsimulus.config.{GeneratorFormat, Model}
import be.cetic.tsimulus.generators.Generator
import be.cetic.tsimulus.timeseries.composite.FunctionTimeSeries
import spray.json.{DefaultJsonProtocol, JsNumber, JsObject, JsString, JsValue, _}


class FunctionGenerator(name: Option[String],
                        val generator: Either[String, Generator[Any]],
                        val slope: Double,
                        val intercept: Double) extends Generator[Double](name, "function")
{
   override def timeseries(generators: String => Generator[Any]) =
   {
      Model.generator(generators)(generator) match {
         // Could also be expressed as a Sum(Times(generator, Constant(slope), intercept)
         case g: Generator[Double] => FunctionTimeSeries[Double](g.timeseries(generators), (t,v) => Some(slope * v + intercept))
         case _ => throw new ClassCastException
      }
   }

   override def toString = "Function(" + name + ", " + generator + ", " + slope + ", " + intercept + ")"

   override def equals(o: Any) = o match {
      case that: FunctionGenerator => (that.name == this.name &&
         that.generator == this.generator &&
         that.slope == this.slope &&
         that.intercept == this.intercept)
      case _ => false
   }

   override def toJson: JsValue =
   {
      val t = Map(
         "type" -> `type`.toJson,
         "generator" -> either2json(generator),
         "slope" -> slope.toJson,
         "intercept" -> intercept.toJson
      )

      new JsObject(
         name.map(n => t + ("name" -> n.toJson)).getOrElse(t)
      )
   }
}

object FunctionGenerator
{
   def apply(json: JsValue): FunctionGenerator = {

      val fields = json.asJsObject.fields

      val name = json.asJsObject.fields.get("name").map
      {
         case JsString(x) => x
      }

      val generator = fields("generator") match {
         case JsString(s) => Left(s)
         case g => Right(GeneratorFormat.read(g))
      }

      val slope = fields("slope") match {
         case JsNumber(n) => n.toDouble
      }

      val intercept = fields("intercept") match {
         case JsNumber(n) => n.toDouble
      }

      new FunctionGenerator(name, generator, slope, intercept)
   }
} 
Example 16
Source File: FitSpec.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperations

import spray.json.{JsNumber, JsObject}

import ai.deepsense.deeplang._
import ai.deepsense.deeplang.doperables.Transformer
import ai.deepsense.deeplang.doperables.dataframe.DataFrame
import ai.deepsense.deeplang.doperations.MockDOperablesFactory._
import ai.deepsense.deeplang.doperations.exceptions.TooManyPossibleTypesException
import ai.deepsense.deeplang.exceptions.DeepLangMultiException
import ai.deepsense.deeplang.inference.{InferContext, InferenceWarnings}
import ai.deepsense.deeplang.params.ParamsMatchers._

class FitSpec extends UnitSpec with DeeplangTestSupport {

  "Fit" should {
    "fit input Estimator on input DataFrame with proper parameters set" in {
      val estimator = new MockEstimator

      def testFit(op: Fit, expectedTransformer: Transformer): Unit = {
        val Vector(outputTransformer: Transformer) =
          op.executeUntyped(Vector(estimator, mock[DataFrame]))(createExecutionContext)
        outputTransformer shouldBe expectedTransformer
      }
      val op1 = Fit()
      testFit(op1, transformer1)

      val paramsForEstimator = JsObject(estimator.paramA.name -> JsNumber(2))
      val op2 = Fit().setEstimatorParams(paramsForEstimator)
      testFit(op2, transformer2)
    }
    "not modify params in input Estimator instance upon execution" in {
      val estimator = new MockEstimator
      val originalEstimator = estimator.replicate()

      val paramsForEstimator = JsObject(estimator.paramA.name -> JsNumber(2))
      val op = Fit().setEstimatorParams(paramsForEstimator)
      op.executeUntyped(Vector(estimator, mock[DataFrame]))(createExecutionContext)

      estimator should have (theSameParamsAs (originalEstimator))
    }
    "infer Transformer from input Estimator on input DataFrame with proper parameters set" in {
      val estimator = new MockEstimator

      def testInference(op: Fit, expectedTransformerKnowledge: DKnowledge[Transformer]): Unit = {
        val inputDF = DataFrame.forInference(createSchema())
        val (knowledge, warnings) =
          op.inferKnowledgeUntyped(Vector(DKnowledge(estimator), DKnowledge(inputDF)))(mock[InferContext])
        // Currently, InferenceWarnings are always empty.
        warnings shouldBe InferenceWarnings.empty
        val Vector(transformerKnowledge) = knowledge
        transformerKnowledge shouldBe expectedTransformerKnowledge
      }
      val op1 = Fit()
      testInference(op1, transformerKnowledge1)

      val paramsForEstimator = JsObject(estimator.paramA.name -> JsNumber(2))
      val op2 = Fit().setEstimatorParams(paramsForEstimator)
      testInference(op2, transformerKnowledge2)
    }
    "not modify params in input Estimator instance upon inference" in {
      val estimator = new MockEstimator
      val originalEstimator = estimator.replicate()

      val paramsForEstimator = JsObject(estimator.paramA.name -> JsNumber(2))
      val op = Fit().setEstimatorParams(paramsForEstimator)
      val inputDF = DataFrame.forInference(createSchema())
      op.inferKnowledgeUntyped(Vector(DKnowledge(estimator), DKnowledge(inputDF)))(mock[InferContext])

      estimator should have (theSameParamsAs (originalEstimator))
    }
    "throw Exception" when {
      "there are more than one Estimator in input Knowledge" in {
        val inputDF = DataFrame.forInference(createSchema())
        val estimators = Set[DOperable](new MockEstimator, new MockEstimator)

        val op = Fit()
        a[TooManyPossibleTypesException] shouldBe thrownBy {
          op.inferKnowledgeUntyped(Vector(DKnowledge(estimators), DKnowledge(inputDF)))(mock[InferContext])
        }
      }
      "Estimator's dynamic parameters are invalid" in {
        val inputDF = DataFrame.forInference(createSchema())
        val estimator = new MockEstimator
        val fit = Fit().setEstimatorParams(JsObject(estimator.paramA.name -> JsNumber(-2)))
        a[DeepLangMultiException] shouldBe thrownBy {
          fit.inferKnowledgeUntyped(Vector(DKnowledge(estimator), DKnowledge(inputDF)))(mock[InferContext])
        }
      }
    }
  }
} 
Example 17
Source File: GridSearchSpec.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperations

import spray.json.{JsNumber, JsObject}

import ai.deepsense.deeplang.doperables.dataframe.DataFrame
import ai.deepsense.deeplang.doperables.report.Report
import ai.deepsense.deeplang.doperations.MockDOperablesFactory.{MockEstimator, MockEvaluator}
import ai.deepsense.deeplang.exceptions.DeepLangMultiException
import ai.deepsense.deeplang.inference.{InferContext, InferenceWarnings}
import ai.deepsense.deeplang.{DKnowledge, DeeplangTestSupport, UnitSpec}

class GridSearchSpec extends UnitSpec with DeeplangTestSupport {
  "GridSearch" should {
    "infer knowledge when dynamic parameters are valid" in {
      val inputDF = DataFrame.forInference(createSchema())
      val estimator = new MockEstimator
      val evaluator = new MockEvaluator

      val gridSearch = GridSearch()
      gridSearch.inferKnowledgeUntyped(
          Vector(DKnowledge(estimator), DKnowledge(inputDF), DKnowledge(evaluator)))(mock[InferContext]) shouldBe
        (Vector(DKnowledge(Report())), InferenceWarnings.empty)
    }
    "throw Exception" when {
      "Estimator's dynamic parameters are invalid" in {
        checkMultiException(Some(-2), None)
      }
      "Evaluator's dynamic parameters are invalid" in {
        checkMultiException(None, Some(-2))
      }
      "Both Estimator's and Evaluator's dynamic parameters are invalid" in {
        checkMultiException(Some(-2), Some(-2))
      }
    }
  }

  private def checkMultiException(
      estimatorParamValue: Option[Double],
      evaluatorParamValue: Option[Double]): Unit = {

    val inputDF = DataFrame.forInference(createSchema())
    val estimator = new MockEstimator
    val evaluator = new MockEvaluator

    val gridSearch = GridSearch()
      .setEstimatorParams(prepareParamDictionary(estimator.paramA.name, estimatorParamValue))
      .setEvaluatorParams(prepareParamDictionary(evaluator.paramA.name, evaluatorParamValue))

    val multiException = the [DeepLangMultiException] thrownBy {
      gridSearch.inferKnowledgeUntyped(
        Vector(
          DKnowledge(estimator),
          DKnowledge(inputDF),
          DKnowledge(evaluator)))(mock[InferContext])
    }

    val invalidParamCount =
      estimatorParamValue.map(_ => 1).getOrElse(0) +
      evaluatorParamValue.map(_ => 1).getOrElse(0)

    multiException.exceptions should have size invalidParamCount
  }

  private def prepareParamDictionary(paramName: String, maybeValue: Option[Double]): JsObject = {
    val jsonEntries = maybeValue.map(
        value => Seq(paramName -> JsNumber(value)))
      .getOrElse(Seq())
    JsObject(jsonEntries: _*)
  }
} 
Example 18
Source File: FitPlusTransformSpec.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperations

import spray.json.{JsNumber, JsObject}

import ai.deepsense.deeplang.doperables.Transformer
import ai.deepsense.deeplang.doperables.dataframe.DataFrame
import ai.deepsense.deeplang.doperations.MockDOperablesFactory._
import ai.deepsense.deeplang.doperations.exceptions.TooManyPossibleTypesException
import ai.deepsense.deeplang.exceptions.DeepLangMultiException
import ai.deepsense.deeplang.inference.InferContext
import ai.deepsense.deeplang._

class FitPlusTransformSpec extends UnitSpec with DeeplangTestSupport {

  "FitPlusTransform" when {
    "executed" should {
      "pass parameters to the input Estimator produce a Transformer and transformed DataFrame" in {
        val estimator = new MockEstimator
        val initialParametersValues = estimator.extractParamMap()
        val fpt = new FitPlusTransform

        def testExecute(
          op: FitPlusTransform,
          expectedDataFrame: DataFrame,
          expectedTransformer: Transformer): Unit = {
          val results = op.executeUntyped(Vector(estimator, mock[DataFrame]))(createExecutionContext)
          val outputDataFrame = results(0).asInstanceOf[DataFrame]
          val outputTransformer = results(1).asInstanceOf[Transformer]

          outputDataFrame shouldBe expectedDataFrame
          outputTransformer shouldBe expectedTransformer
        }

        testExecute(fpt, transformedDataFrame1, transformer1)
        fpt.setEstimatorParams(JsObject(estimator.paramA.name -> JsNumber(2)))
        testExecute(fpt, transformedDataFrame2, transformer2)
        estimator.extractParamMap() shouldBe initialParametersValues
      }

    }
    "inferring knowledge" should {
      "take parameters from the input Estimator, infer Transformer and then a DataFrame" in {
        val estimator = new MockEstimator
        val initialParametersValues = estimator.extractParamMap()
        val fpt = new FitPlusTransform

        def testInference(
          op: FitPlusTransform,
          expectedDataFrameKnowledge: DKnowledge[DataFrame],
          expectedTransformerKnowledge: DKnowledge[Transformer]): Unit = {
          val (Vector(outputDataFrameKnowledge, outputTransformerKnowledge), _) =
            op.inferKnowledgeUntyped(Vector(DKnowledge(estimator), mock[DKnowledge[DataFrame]]))(mock[InferContext])

          outputDataFrameKnowledge shouldBe expectedDataFrameKnowledge
          outputTransformerKnowledge shouldBe expectedTransformerKnowledge
        }

        testInference(fpt, transformedDataFrameKnowledge1, transformerKnowledge1)
        fpt.setEstimatorParams(JsObject(estimator.paramA.name -> JsNumber(2)))
        testInference(fpt, transformedDataFrameKnowledge2, transformerKnowledge2)
        estimator.extractParamMap() shouldBe initialParametersValues
      }
      "throw exceptions" when {
        "input Estimator Knowledge consist more than one type" in {
          val estimators = Set[DOperable](new MockEstimator, new MockEstimator)
          val inputKnowledge: Vector[DKnowledge[DOperable]] =
            Vector(DKnowledge(estimators), mock[DKnowledge[DataFrame]])
          val fpt = new FitPlusTransform
          a[TooManyPossibleTypesException] shouldBe thrownBy {
            fpt.inferKnowledgeUntyped(inputKnowledge)(mock[InferContext])
          }
        }
        "Estimator's dynamic parameters are invalid" in {
          val estimator = new MockEstimator
          val inputKnowledge: Vector[DKnowledge[DOperable]] =
            Vector(DKnowledge(estimator), mock[DKnowledge[DataFrame]])
          val fpt = new FitPlusTransform
          fpt.setEstimatorParams(JsObject(estimator.paramA.name -> JsNumber(-2)))
          a[DeepLangMultiException] shouldBe thrownBy {
            fpt.inferKnowledgeUntyped(inputKnowledge)(mock[InferContext])
          }
        }
      }
    }
  }
} 
Example 19
Source File: IssueServiceImpl.scala    From BacklogMigration-Redmine   with MIT License 5 votes vote down vote up
package com.nulabinc.backlog.r2b.redmine.service

import javax.inject.Inject

import com.nulabinc.backlog.migration.common.utils.Logging
import com.nulabinc.backlog.r2b.redmine.conf.RedmineApiConfiguration
import com.nulabinc.backlog.r2b.redmine.domain.RedmineProjectId
import com.taskadapter.redmineapi.bean.Issue
import com.taskadapter.redmineapi.{Include, RedmineManager}
import spray.json.{JsNumber, JsonParser}

import scala.jdk.CollectionConverters._


class IssueServiceImpl @Inject()(apiConfig: RedmineApiConfiguration, projectId: RedmineProjectId, redmine: RedmineManager)
    extends IssueService
    with Logging {

  override def countIssues(): Int = {
    val url    = s"${apiConfig.url}/issues.json?limit=1&subproject_id=!*&project_id=${projectId.value}&key=${apiConfig.key}&status_id=*"
    val string = scala.io.Source.fromURL(url, "UTF-8").mkString
    JsonParser(string).asJsObject.getFields("total_count") match {
      case Seq(JsNumber(totalCount)) => totalCount.intValue
      case _                         => 0
    }
  }

  override def allIssues(params: Map[String, String]): Seq[Issue] =
    redmine.getIssueManager.getIssues(params.asJava).asScala.toSeq

  override def issueOfId(id: Integer, include: Include*): Issue = {
    logger.debug("Get an issue ID: " + id)
    redmine.getIssueManager.getIssueById(id, include: _*)
  }

  override def tryIssueOfId(id: Integer, include: Include*): Either[Throwable, Issue] =
    try {
      Right(redmine.getIssueManager.getIssueById(id, include: _*))
    } catch {
      case e: Throwable =>
        logger.warn(e.getMessage, e)
        Left(e)
    }

} 
Example 20
Source File: FitSpec.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperations

import spray.json.{JsNumber, JsObject}

import io.deepsense.deeplang._
import io.deepsense.deeplang.doperables.Transformer
import io.deepsense.deeplang.doperables.dataframe.DataFrame
import io.deepsense.deeplang.doperations.MockDOperablesFactory._
import io.deepsense.deeplang.doperations.exceptions.TooManyPossibleTypesException
import io.deepsense.deeplang.exceptions.DeepLangMultiException
import io.deepsense.deeplang.inference.{InferContext, InferenceWarnings}
import io.deepsense.deeplang.params.ParamsMatchers._

class FitSpec extends UnitSpec with DeeplangTestSupport {

  "Fit" should {
    "fit input Estimator on input DataFrame with proper parameters set" in {
      val estimator = new MockEstimator

      def testFit(op: Fit, expectedTransformer: Transformer): Unit = {
        val Vector(outputTransformer: Transformer) =
          op.executeUntyped(Vector(estimator, mock[DataFrame]))(mock[ExecutionContext])
        outputTransformer shouldBe expectedTransformer
      }
      val op1 = Fit()
      testFit(op1, transformer1)

      val paramsForEstimator = JsObject(estimator.paramA.name -> JsNumber(2))
      val op2 = Fit().setEstimatorParams(paramsForEstimator)
      testFit(op2, transformer2)
    }
    "not modify params in input Estimator instance upon execution" in {
      val estimator = new MockEstimator
      val originalEstimator = estimator.replicate()

      val paramsForEstimator = JsObject(estimator.paramA.name -> JsNumber(2))
      val op = Fit().setEstimatorParams(paramsForEstimator)
      op.executeUntyped(Vector(estimator, mock[DataFrame]))(mock[ExecutionContext])

      estimator should have (theSameParamsAs (originalEstimator))
    }
    "infer Transformer from input Estimator on input DataFrame with proper parameters set" in {
      val estimator = new MockEstimator

      def testInference(op: Fit, expectedTransformerKnowledge: DKnowledge[Transformer]): Unit = {
        val inputDF = DataFrame.forInference(createSchema())
        val (knowledge, warnings) =
          op.inferKnowledgeUntyped(Vector(DKnowledge(estimator), DKnowledge(inputDF)))(mock[InferContext])
        // Currently, InferenceWarnings are always empty.
        warnings shouldBe InferenceWarnings.empty
        val Vector(transformerKnowledge) = knowledge
        transformerKnowledge shouldBe expectedTransformerKnowledge
      }
      val op1 = Fit()
      testInference(op1, transformerKnowledge1)

      val paramsForEstimator = JsObject(estimator.paramA.name -> JsNumber(2))
      val op2 = Fit().setEstimatorParams(paramsForEstimator)
      testInference(op2, transformerKnowledge2)
    }
    "not modify params in input Estimator instance upon inference" in {
      val estimator = new MockEstimator
      val originalEstimator = estimator.replicate()

      val paramsForEstimator = JsObject(estimator.paramA.name -> JsNumber(2))
      val op = Fit().setEstimatorParams(paramsForEstimator)
      val inputDF = DataFrame.forInference(createSchema())
      op.inferKnowledgeUntyped(Vector(DKnowledge(estimator), DKnowledge(inputDF)))(mock[InferContext])

      estimator should have (theSameParamsAs (originalEstimator))
    }
    "throw Exception" when {
      "there are more than one Estimator in input Knowledge" in {
        val inputDF = DataFrame.forInference(createSchema())
        val estimators = Set[DOperable](new MockEstimator, new MockEstimator)

        val op = Fit()
        a[TooManyPossibleTypesException] shouldBe thrownBy {
          op.inferKnowledgeUntyped(Vector(DKnowledge(estimators), DKnowledge(inputDF)))(mock[InferContext])
        }
      }
      "Estimator's dynamic parameters are invalid" in {
        val inputDF = DataFrame.forInference(createSchema())
        val estimator = new MockEstimator
        val fit = Fit().setEstimatorParams(JsObject(estimator.paramA.name -> JsNumber(-2)))
        a[DeepLangMultiException] shouldBe thrownBy {
          fit.inferKnowledgeUntyped(Vector(DKnowledge(estimator), DKnowledge(inputDF)))(mock[InferContext])
        }
      }
    }
  }
} 
Example 21
Source File: GridSearchSpec.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperations

import spray.json.{JsNumber, JsObject}

import io.deepsense.deeplang.doperables.dataframe.DataFrame
import io.deepsense.deeplang.doperables.report.Report
import io.deepsense.deeplang.doperations.MockDOperablesFactory.{MockEstimator, MockEvaluator}
import io.deepsense.deeplang.exceptions.DeepLangMultiException
import io.deepsense.deeplang.inference.{InferContext, InferenceWarnings}
import io.deepsense.deeplang.{DKnowledge, DeeplangTestSupport, UnitSpec}

class GridSearchSpec extends UnitSpec with DeeplangTestSupport {
  "GridSearch" should {
    "infer knowledge when dynamic parameters are valid" in {
      val inputDF = DataFrame.forInference(createSchema())
      val estimator = new MockEstimator
      val evaluator = new MockEvaluator

      val gridSearch = GridSearch()
      gridSearch.inferKnowledgeUntyped(
          Vector(DKnowledge(estimator), DKnowledge(inputDF), DKnowledge(evaluator)))(mock[InferContext]) shouldBe
        (Vector(DKnowledge(Report())), InferenceWarnings.empty)
    }
    "throw Exception" when {
      "Estimator's dynamic parameters are invalid" in {
        checkMultiException(Some(-2), None)
      }
      "Evaluator's dynamic parameters are invalid" in {
        checkMultiException(None, Some(-2))
      }
      "Both Estimator's and Evaluator's dynamic parameters are invalid" in {
        checkMultiException(Some(-2), Some(-2))
      }
    }
  }

  private def checkMultiException(
      estimatorParamValue: Option[Double],
      evaluatorParamValue: Option[Double]): Unit = {

    val inputDF = DataFrame.forInference(createSchema())
    val estimator = new MockEstimator
    val evaluator = new MockEvaluator

    val gridSearch = GridSearch()
      .setEstimatorParams(prepareParamDictionary(estimator.paramA.name, estimatorParamValue))
      .setEvaluatorParams(prepareParamDictionary(evaluator.paramA.name, evaluatorParamValue))

    val multiException = the [DeepLangMultiException] thrownBy {
      gridSearch.inferKnowledgeUntyped(
        Vector(
          DKnowledge(estimator),
          DKnowledge(inputDF),
          DKnowledge(evaluator)))(mock[InferContext])
    }

    val invalidParamCount =
      estimatorParamValue.map(_ => 1).getOrElse(0) +
      evaluatorParamValue.map(_ => 1).getOrElse(0)

    multiException.exceptions should have size invalidParamCount
  }

  private def prepareParamDictionary(paramName: String, maybeValue: Option[Double]): JsObject = {
    val jsonEntries = maybeValue.map(
        value => Seq(paramName -> JsNumber(value)))
      .getOrElse(Seq())
    JsObject(jsonEntries: _*)
  }
} 
Example 22
Source File: FitPlusTransformSpec.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperations

import spray.json.{JsNumber, JsObject}

import io.deepsense.deeplang.doperables.Transformer
import io.deepsense.deeplang.doperables.dataframe.DataFrame
import io.deepsense.deeplang.doperations.MockDOperablesFactory._
import io.deepsense.deeplang.doperations.exceptions.TooManyPossibleTypesException
import io.deepsense.deeplang.exceptions.DeepLangMultiException
import io.deepsense.deeplang.inference.InferContext
import io.deepsense.deeplang.{DKnowledge, DOperable, ExecutionContext, UnitSpec}

class FitPlusTransformSpec extends UnitSpec {

  "FitPlusTransform" when {
    "executed" should {
      "pass parameters to the input Estimator produce a Transformer and transformed DataFrame" in {
        val estimator = new MockEstimator
        val initialParametersValues = estimator.extractParamMap()
        val fpt = new FitPlusTransform

        def testExecute(
          op: FitPlusTransform,
          expectedDataFrame: DataFrame,
          expectedTransformer: Transformer): Unit = {
          val results = op.executeUntyped(Vector(estimator, mock[DataFrame]))(mock[ExecutionContext])
          val outputDataFrame = results(0).asInstanceOf[DataFrame]
          val outputTransformer = results(1).asInstanceOf[Transformer]

          outputDataFrame shouldBe expectedDataFrame
          outputTransformer shouldBe expectedTransformer
        }

        testExecute(fpt, transformedDataFrame1, transformer1)
        fpt.setEstimatorParams(JsObject(estimator.paramA.name -> JsNumber(2)))
        testExecute(fpt, transformedDataFrame2, transformer2)
        estimator.extractParamMap() shouldBe initialParametersValues
      }

    }
    "inferring knowledge" should {
      "take parameters from the input Estimator, infer Transformer and then a DataFrame" in {
        val estimator = new MockEstimator
        val initialParametersValues = estimator.extractParamMap()
        val fpt = new FitPlusTransform

        def testInference(
          op: FitPlusTransform,
          expectedDataFrameKnowledge: DKnowledge[DataFrame],
          expectedTransformerKnowledge: DKnowledge[Transformer]): Unit = {
          val (Vector(outputDataFrameKnowledge, outputTransformerKnowledge), _) =
            op.inferKnowledgeUntyped(Vector(DKnowledge(estimator), mock[DKnowledge[DataFrame]]))(mock[InferContext])

          outputDataFrameKnowledge shouldBe expectedDataFrameKnowledge
          outputTransformerKnowledge shouldBe expectedTransformerKnowledge
        }

        testInference(fpt, transformedDataFrameKnowledge1, transformerKnowledge1)
        fpt.setEstimatorParams(JsObject(estimator.paramA.name -> JsNumber(2)))
        testInference(fpt, transformedDataFrameKnowledge2, transformerKnowledge2)
        estimator.extractParamMap() shouldBe initialParametersValues
      }
      "throw exceptions" when {
        "input Estimator Knowledge consist more than one type" in {
          val estimators = Set[DOperable](new MockEstimator, new MockEstimator)
          val inputKnowledge: Vector[DKnowledge[DOperable]] =
            Vector(DKnowledge(estimators), mock[DKnowledge[DataFrame]])
          val fpt = new FitPlusTransform
          a[TooManyPossibleTypesException] shouldBe thrownBy {
            fpt.inferKnowledgeUntyped(inputKnowledge)(mock[InferContext])
          }
        }
        "Estimator's dynamic parameters are invalid" in {
          val estimator = new MockEstimator
          val inputKnowledge: Vector[DKnowledge[DOperable]] =
            Vector(DKnowledge(estimator), mock[DKnowledge[DataFrame]])
          val fpt = new FitPlusTransform
          fpt.setEstimatorParams(JsObject(estimator.paramA.name -> JsNumber(-2)))
          a[DeepLangMultiException] shouldBe thrownBy {
            fpt.inferKnowledgeUntyped(inputKnowledge)(mock[InferContext])
          }
        }
      }
    }
  }
} 
Example 23
Source File: ClusterRequest.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.client.configurator
import oharastream.ohara.common.annotations.Optional
import oharastream.ohara.common.setting.ObjectKey
import oharastream.ohara.common.util.CommonUtils
import spray.json.DefaultJsonProtocol._
import spray.json.{JsArray, JsNumber, JsObject, JsString, JsValue}

import scala.jdk.CollectionConverters._
import scala.collection.mutable


  protected def key: ObjectKey = ObjectKey.of(
    settings.get(GROUP_KEY).map(_.convertTo[String]).getOrElse(GROUP_DEFAULT),
    settings(NAME_KEY).convertTo[String]
  )

  protected val settings: mutable.Map[String, JsValue] = mutable.Map()

  @Optional("default key is a random string. But it is required in updating")
  def key(key: ObjectKey): ClusterRequest.this.type = {
    setting(NAME_KEY, JsString(key.name()))
    setting(GROUP_KEY, JsString(key.group()))
  }

  @Optional("default name is a random string. But it is required in updating")
  def name(name: String): ClusterRequest.this.type =
    setting(NAME_KEY, JsString(CommonUtils.requireNonEmpty(name)))
  @Optional("default is GROUP_DEFAULT")
  def group(group: String): ClusterRequest.this.type =
    setting(GROUP_KEY, JsString(CommonUtils.requireNonEmpty(group)))
  def nodeName(nodeName: String): ClusterRequest.this.type = nodeNames(Set(CommonUtils.requireNonEmpty(nodeName)))
  def nodeNames(nodeNames: Set[String]): ClusterRequest.this.type =
    setting(NODE_NAMES_KEY, JsArray(CommonUtils.requireNonEmpty(nodeNames.asJava).asScala.map(JsString(_)).toVector))

  @Optional("default value is empty array")
  def routes(routes: Map[String, String]): ClusterRequest.this.type =
    setting(ROUTES_KEY, JsObject(routes.map {
      case (k, v) => k -> JsString(v)
    }))

  @Optional("default value is 1024")
  def initHeap(sizeInMB: Int): ClusterRequest.this.type =
    setting(INIT_HEAP_KEY, JsNumber(CommonUtils.requirePositiveInt(sizeInMB)))

  @Optional("default value is 1024")
  def maxHeap(sizeInMB: Int): ClusterRequest.this.type =
    setting(MAX_HEAP_KEY, JsNumber(CommonUtils.requirePositiveInt(sizeInMB)))

  @Optional("extra settings is empty by default")
  def setting(key: String, value: JsValue): ClusterRequest.this.type =
    settings(Map(key -> value))
  @Optional("extra settings is empty by default")
  def settings(settings: Map[String, JsValue]): ClusterRequest.this.type = {
    // We don't have to check the settings is empty here for the following reasons:
    // 1) we may want to use the benefit of default creation without specify settings
    // 2) actual checking will be done in the json parser phase of creation or update
    this.settings ++= settings
    this
  }
} 
Example 24
Source File: TestPerformance4SambaSource.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.performance

import oharastream.ohara.client.configurator.{ConnectorApi, TopicApi}
import oharastream.ohara.common.setting.ConnectorKey
import oharastream.ohara.common.util.CommonUtils
import oharastream.ohara.connector.smb.SmbSource
import oharastream.ohara.it.category.PerformanceGroup
import oharastream.ohara.kafka.connector.csv.CsvConnectorDefinitions
import org.junit.Test
import org.junit.experimental.categories.Category
import spray.json.{JsNumber, JsString}

@Category(Array(classOf[PerformanceGroup]))
class TestPerformance4SambaSource extends BasicTestPerformance4Samba {
  @Test
  def test(): Unit = {
    val samba = sambaClient()
    createTopic()
    val completedPath = "completed"
    val errorPath     = "error"
    val (path, _, _)  = setupInputData(timeoutOfInputData)

    try {
      loopInputDataThread(setupInputData)
      setupConnector(
        connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)),
        className = classOf[SmbSource].getName,
        settings = sambaSettings
          + (CsvConnectorDefinitions.INPUT_FOLDER_KEY -> JsString(path))
          + (CsvConnectorDefinitions.COMPLETED_FOLDER_KEY -> JsString(
            PerformanceTestingUtils.createFolder(samba, completedPath)
          ))
          + (CsvConnectorDefinitions.ERROR_FOLDER_KEY -> JsString(
            PerformanceTestingUtils.createFolder(samba, errorPath)
          ))
          + (CsvConnectorDefinitions.SIZE_OF_FILE_CACHE_KEY -> JsNumber(fileNameCacheSize))
      )
      sleepUntilEnd()
    } finally if (needDeleteData) {
      PerformanceTestingUtils.deleteFolder(samba, path)
      PerformanceTestingUtils.deleteFolder(samba, completedPath)
      PerformanceTestingUtils.deleteFolder(samba, errorPath)
    }
  }

  override protected def afterStoppingConnectors(
    connectorInfos: Seq[ConnectorApi.ConnectorInfo],
    topicInfos: Seq[TopicApi.TopicInfo]
  ): Unit = {}
} 
Example 25
Source File: BasicTestPerformance4Samba.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.performance

import java.io.{BufferedWriter, OutputStreamWriter}
import java.util.concurrent.atomic.LongAdder

import oharastream.ohara.client.filesystem.FileSystem
import oharastream.ohara.common.data.Row
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import org.junit.AssumptionViolatedException
import spray.json.{JsNumber, JsString, JsValue}

import scala.concurrent.duration.Duration
import scala.jdk.CollectionConverters._

abstract class BasicTestPerformance4Samba extends BasicTestPerformance {
  private[this] val sambaHostname: String = sys.env.getOrElse(
    PerformanceTestingUtils.SAMBA_HOSTNAME_KEY,
    throw new AssumptionViolatedException(s"${PerformanceTestingUtils.SAMBA_HOSTNAME_KEY} does not exists!!!")
  )

  private[this] val sambaUsername: String = sys.env.getOrElse(
    PerformanceTestingUtils.SAMBA_USER_KEY,
    throw new AssumptionViolatedException(s"${PerformanceTestingUtils.SAMBA_USER_KEY} does not exists!!!")
  )

  private[this] val sambaPassword: String = sys.env.getOrElse(
    PerformanceTestingUtils.SAMBA_PASSWORD_KEY,
    throw new AssumptionViolatedException(s"${PerformanceTestingUtils.SAMBA_PASSWORD_KEY} does not exists!!!")
  )

  private[this] val sambaPort: Int = sys.env
    .getOrElse(
      PerformanceTestingUtils.SAMBA_PORT_KEY,
      throw new AssumptionViolatedException(s"${PerformanceTestingUtils.SAMBA_PORT_KEY} does not exists!!!")
    )
    .toInt

  private[this] val sambaShare: String = sys.env.getOrElse(
    PerformanceTestingUtils.SAMBA_SHARE_KEY,
    throw new AssumptionViolatedException(s"${PerformanceTestingUtils.SAMBA_SHARE_KEY} does not exists!!!")
  )

  private[this] val csvInputFolderKey       = PerformanceTestingUtils.CSV_INPUT_KEY
  private[this] val csvOutputFolder: String = value(csvInputFolderKey).getOrElse("input")

  private[this] val NEED_DELETE_DATA_KEY: String = PerformanceTestingUtils.DATA_CLEANUP_KEY
  protected[this] val needDeleteData: Boolean    = sys.env.getOrElse(NEED_DELETE_DATA_KEY, "true").toBoolean

  protected val sambaSettings: Map[String, JsValue] = Map(
    oharastream.ohara.connector.smb.SMB_HOSTNAME_KEY   -> JsString(sambaHostname),
    oharastream.ohara.connector.smb.SMB_PORT_KEY       -> JsNumber(sambaPort),
    oharastream.ohara.connector.smb.SMB_USER_KEY       -> JsString(sambaUsername),
    oharastream.ohara.connector.smb.SMB_PASSWORD_KEY   -> JsString(sambaPassword),
    oharastream.ohara.connector.smb.SMB_SHARE_NAME_KEY -> JsString(sambaShare)
  )

  protected def setupInputData(timeout: Duration): (String, Long, Long) = {
    val client = sambaClient()
    try {
      if (!client.exists(csvOutputFolder)) PerformanceTestingUtils.createFolder(client, csvOutputFolder)

      val result = generateData(
        numberOfRowsToFlush,
        timeout,
        (rows: Seq[Row]) => {
          val file        = s"$csvOutputFolder/${CommonUtils.randomString()}"
          val writer      = new BufferedWriter(new OutputStreamWriter(client.create(file)))
          val count       = new LongAdder()
          val sizeInBytes = new LongAdder()

          try {
            val cellNames: Set[String] = rows.head.cells().asScala.map(_.name).toSet
            writer
              .append(cellNames.mkString(","))
              .append("\n")
            rows.foreach(row => {
              val content = row.cells().asScala.map(_.value).mkString(",")
              count.increment()
              sizeInBytes.add(content.length)
              writer
                .append(content)
                .append("\n")
            })
            (count.longValue(), sizeInBytes.longValue())
          } finally Releasable.close(writer)
        }
      )
      (csvOutputFolder, result._1, result._2)
    } finally Releasable.close(client)
  }

  protected[this] def sambaClient(): FileSystem =
    FileSystem.smbBuilder
      .hostname(sambaHostname)
      .port(sambaPort)
      .user(sambaUsername)
      .password(sambaPassword)
      .shareName(sambaShare)
      .build()
} 
Example 26
Source File: TestPerformance4HdfsSink.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.performance

import oharastream.ohara.client.configurator.ConnectorApi.ConnectorInfo
import oharastream.ohara.client.configurator.TopicApi.TopicInfo
import oharastream.ohara.client.filesystem.FileSystem
import oharastream.ohara.common.setting.ConnectorKey
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.connector.hdfs.sink.HDFSSink
import oharastream.ohara.it.category.PerformanceGroup
import oharastream.ohara.kafka.connector.csv.CsvConnectorDefinitions
import org.junit.experimental.categories.Category
import spray.json.{JsNumber, JsString}
import org.junit.Test

@Category(Array(classOf[PerformanceGroup]))
class TestPerformance4HdfsSink extends BasicTestPerformance {
  private[this] val NEED_DELETE_DATA_KEY: String = PerformanceTestingUtils.DATA_CLEANUP_KEY
  private[this] val needDeleteData: Boolean      = sys.env.getOrElse(NEED_DELETE_DATA_KEY, "true").toBoolean

  @Test
  def test(): Unit = {
    val hdfs = hdfsClient()
    try {
      createTopic()
      produce(timeoutOfInputData)
      loopInputDataThread(produce)
      setupConnector(
        connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)),
        className = classOf[HDFSSink].getName(),
        settings = Map(
          CsvConnectorDefinitions.FLUSH_SIZE_KEY             -> JsNumber(numberOfCsvFileToFlush),
          oharastream.ohara.connector.hdfs.sink.HDFS_URL_KEY -> JsString(PerformanceTestingUtils.hdfsURL),
          oharastream.ohara.connector.hdfs.sink.OUTPUT_FOLDER_KEY -> JsString(
            PerformanceTestingUtils.createFolder(hdfs, PerformanceTestingUtils.dataDir)
          )
        )
      )
      sleepUntilEnd()
    } finally Releasable.close(hdfs)
  }

  override protected def afterStoppingConnectors(
    connectorInfos: Seq[ConnectorInfo],
    topicInfos: Seq[TopicInfo]
  ): Unit = {
    if (needDeleteData) {
      //Delete file from the HDFS
      val hdfs = hdfsClient()
      try topicInfos.foreach { topicInfo =>
        val path = s"${PerformanceTestingUtils.dataDir}/${topicInfo.topicNameOnKafka}"
        PerformanceTestingUtils.deleteFolder(hdfs, path)
      } finally Releasable.close(hdfs)
    }
  }

  private[this] def hdfsClient(): FileSystem = {
    FileSystem.hdfsBuilder.url(PerformanceTestingUtils.hdfsURL).build
  }
} 
Example 27
Source File: TestPerformance4JDBCSourceToHDFSSink.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.performance

import oharastream.ohara.client.configurator.ConnectorApi.ConnectorInfo
import oharastream.ohara.client.configurator.TopicApi.TopicInfo
import oharastream.ohara.client.filesystem.FileSystem
import oharastream.ohara.common.setting.ConnectorKey
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.connector.hdfs.sink.HDFSSink
import oharastream.ohara.connector.jdbc.source.JDBCSourceConnector
import oharastream.ohara.it.category.PerformanceGroup
import org.junit.experimental.categories.Category
import org.junit.Test
import spray.json.{JsNumber, JsString}

@Category(Array(classOf[PerformanceGroup]))
class TestPerformance4JDBCSourceToHDFSSink extends BasicTestPerformance4Jdbc {
  override protected val tableName: String = s"TABLE${CommonUtils.randomString().toUpperCase()}"

  @Test
  def test(): Unit = {
    val hdfs = hdfsClient()
    try {
      createTable()
      setupInputData(timeoutOfInputData)
      loopInputDataThread(setupInputData)
      createTopic()

      //Running JDBC Source Connector
      setupConnector(
        connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)),
        className = classOf[JDBCSourceConnector].getName(),
        settings = Map(
          oharastream.ohara.connector.jdbc.source.DB_URL                -> JsString(url),
          oharastream.ohara.connector.jdbc.source.DB_USERNAME           -> JsString(user),
          oharastream.ohara.connector.jdbc.source.DB_PASSWORD           -> JsString(password),
          oharastream.ohara.connector.jdbc.source.DB_TABLENAME          -> JsString(tableName),
          oharastream.ohara.connector.jdbc.source.TIMESTAMP_COLUMN_NAME -> JsString(timestampColumnName),
          oharastream.ohara.connector.jdbc.source.DB_SCHEMA_PATTERN     -> JsString(user),
          oharastream.ohara.connector.jdbc.source.JDBC_FETCHDATA_SIZE   -> JsNumber(10000),
          oharastream.ohara.connector.jdbc.source.JDBC_FLUSHDATA_SIZE   -> JsNumber(10000)
        )
      )

      //Running HDFS Sink Connector
      setupConnector(
        connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)),
        className = classOf[HDFSSink].getName(),
        settings = Map(
          oharastream.ohara.connector.hdfs.sink.HDFS_URL_KEY   -> JsString(PerformanceTestingUtils.hdfsURL),
          oharastream.ohara.connector.hdfs.sink.FLUSH_SIZE_KEY -> JsNumber(numberOfCsvFileToFlush),
          oharastream.ohara.connector.hdfs.sink.OUTPUT_FOLDER_KEY -> JsString(
            PerformanceTestingUtils.createFolder(hdfs, PerformanceTestingUtils.dataDir)
          )
        )
      )
      sleepUntilEnd()
    } finally Releasable.close(hdfs)
  }

  override protected def afterStoppingConnectors(
    connectorInfos: Seq[ConnectorInfo],
    topicInfos: Seq[TopicInfo]
  ): Unit = {
    if (needDeleteData) {
      //Drop table for the database
      client.dropTable(tableName)

      //Delete file from the HDFS
      val hdfs = hdfsClient()
      try {
        topicInfos.foreach { topicInfo =>
          val path = s"${PerformanceTestingUtils.dataDir}/${topicInfo.topicNameOnKafka}"
          PerformanceTestingUtils.deleteFolder(hdfs, path)
        }
      } finally Releasable.close(hdfs)
    }
  }

  private[this] def hdfsClient(): FileSystem = {
    FileSystem.hdfsBuilder.url(PerformanceTestingUtils.hdfsURL).build
  }
} 
Example 28
Source File: TestPerformance4FtpSink.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.performance

import oharastream.ohara.client.configurator.ConnectorApi.ConnectorInfo
import oharastream.ohara.client.configurator.TopicApi.TopicInfo
import oharastream.ohara.common.setting.ConnectorKey
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.connector.ftp.FtpSink
import oharastream.ohara.it.category.PerformanceGroup
import oharastream.ohara.kafka.connector.csv.CsvConnectorDefinitions
import spray.json.{JsNumber, JsString}
import org.junit.Test
import org.junit.experimental.categories.Category

@Category(Array(classOf[PerformanceGroup]))
class TestPerformance4FtpSink extends BasicTestPerformance4Ftp {
  private[this] val dataDir: String = "/tmp"

  @Test
  def test(): Unit = {
    val ftp = ftpClient()
    try {
      createTopic()
      produce(timeoutOfInputData)
      loopInputDataThread(produce)
      setupConnector(
        connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)),
        className = classOf[FtpSink].getName(),
        settings = ftpSettings
          ++ Map(
            CsvConnectorDefinitions.OUTPUT_FOLDER_KEY -> JsString(PerformanceTestingUtils.createFolder(ftp, dataDir)),
            CsvConnectorDefinitions.FLUSH_SIZE_KEY    -> JsNumber(numberOfCsvFileToFlush)
          )
      )
      sleepUntilEnd()
    } finally Releasable.close(ftp)
  }

  override protected def afterStoppingConnectors(connectorInfos: Seq[ConnectorInfo], topicInfos: Seq[TopicInfo]): Unit =
    if (cleanupTestData)
      topicInfos.foreach { topicInfo =>
        val path = s"${dataDir}/${topicInfo.topicNameOnKafka}"
        val ftp  = ftpClient()
        try if (PerformanceTestingUtils.exists(ftp, path)) PerformanceTestingUtils.deleteFolder(ftp, path)
        finally Releasable.close(ftp)
      }
} 
Example 29
Source File: TestPerformance4FtpSourceToHDFSSink.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.performance

import oharastream.ohara.client.configurator.ConnectorApi.ConnectorInfo
import oharastream.ohara.client.configurator.TopicApi.TopicInfo
import oharastream.ohara.client.filesystem.FileSystem
import oharastream.ohara.common.setting.ConnectorKey
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.connector.ftp.FtpSource
import oharastream.ohara.connector.hdfs.sink.HDFSSink
import oharastream.ohara.it.category.PerformanceGroup
import oharastream.ohara.kafka.connector.csv.CsvConnectorDefinitions
import org.junit.Test
import org.junit.experimental.categories.Category
import spray.json.{JsNumber, JsString}

@Category(Array(classOf[PerformanceGroup]))
class TestPerformance4FtpSourceToHDFSSink extends BasicTestPerformance4Ftp {
  private[this] val ftpCompletedPath = "/completed"
  private[this] val ftpErrorPath     = "/error"
  private[this] val (path, _, _)     = setupInputData(timeoutOfInputData)

  @Test
  def test(): Unit = {
    val ftp  = ftpClient()
    val hdfs = hdfsClient()
    try {
      createTopic()
      loopInputDataThread(setupInputData)
      //Running FTP Source Connector
      setupConnector(
        connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)),
        className = classOf[FtpSource].getName,
        settings = ftpSettings
          + (CsvConnectorDefinitions.INPUT_FOLDER_KEY -> JsString(path))
          + (CsvConnectorDefinitions.COMPLETED_FOLDER_KEY -> JsString(
            PerformanceTestingUtils.createFolder(ftp, ftpCompletedPath)
          ))
          + (CsvConnectorDefinitions.ERROR_FOLDER_KEY -> JsString(
            PerformanceTestingUtils.createFolder(ftp, ftpErrorPath)
          ))
      )

      //Running HDFS Sink Connector
      setupConnector(
        connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)),
        className = classOf[HDFSSink].getName(),
        settings = Map(
          oharastream.ohara.connector.hdfs.sink.HDFS_URL_KEY   -> JsString(PerformanceTestingUtils.hdfsURL),
          oharastream.ohara.connector.hdfs.sink.FLUSH_SIZE_KEY -> JsNumber(numberOfCsvFileToFlush),
          oharastream.ohara.connector.hdfs.sink.OUTPUT_FOLDER_KEY -> JsString(
            PerformanceTestingUtils.createFolder(hdfs, PerformanceTestingUtils.dataDir)
          )
        )
      )
      sleepUntilEnd()
    } finally {
      Releasable.close(hdfs)
      Releasable.close(ftp)
    }
  }

  override protected def afterStoppingConnectors(
    connectorInfos: Seq[ConnectorInfo],
    topicInfos: Seq[TopicInfo]
  ): Unit = {
    if (cleanupTestData) {
      //Delete file for the FTP
      val ftp  = ftpClient()
      val hdfs = hdfsClient()
      try {
        PerformanceTestingUtils.deleteFolder(ftp, path)
        PerformanceTestingUtils.deleteFolder(ftp, ftpCompletedPath)
        PerformanceTestingUtils.deleteFolder(ftp, ftpErrorPath)

        //Delete file from the HDFS
        topicInfos.foreach { topicInfo =>
          val path = s"${PerformanceTestingUtils.dataDir}/${topicInfo.topicNameOnKafka}"
          PerformanceTestingUtils.deleteFolder(hdfs, path)
        }
      } finally {
        Releasable.close(hdfs)
        Releasable.close(ftp)
      }
    }
  }

  private[this] def hdfsClient(): FileSystem = {
    FileSystem.hdfsBuilder.url(PerformanceTestingUtils.hdfsURL).build
  }
} 
Example 30
Source File: TestPerformance4Oracle.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.performance

import oharastream.ohara.client.configurator.{ConnectorApi, TopicApi}
import oharastream.ohara.common.setting.ConnectorKey
import oharastream.ohara.common.util.CommonUtils
import oharastream.ohara.connector.jdbc.source.JDBCSourceConnector
import oharastream.ohara.it.category.PerformanceGroup
import org.junit.Test
import org.junit.experimental.categories.Category
import spray.json.{JsNumber, JsString}

@Category(Array(classOf[PerformanceGroup]))
class TestPerformance4Oracle extends BasicTestPerformance4Jdbc {
  override protected val tableName: String =
    s"TABLE${CommonUtils.randomString().toUpperCase()}"

  @Test
  def test(): Unit = {
    createTable()
    setupInputData(timeoutOfInputData)
    loopInputDataThread(setupInputData)
    createTopic()
    try {
      setupConnector(
        connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)),
        className = classOf[JDBCSourceConnector].getName(),
        settings = Map(
          oharastream.ohara.connector.jdbc.source.DB_URL                -> JsString(url),
          oharastream.ohara.connector.jdbc.source.DB_USERNAME           -> JsString(user),
          oharastream.ohara.connector.jdbc.source.DB_PASSWORD           -> JsString(password),
          oharastream.ohara.connector.jdbc.source.DB_TABLENAME          -> JsString(tableName),
          oharastream.ohara.connector.jdbc.source.TIMESTAMP_COLUMN_NAME -> JsString(timestampColumnName),
          oharastream.ohara.connector.jdbc.source.DB_SCHEMA_PATTERN     -> JsString(user),
          oharastream.ohara.connector.jdbc.source.JDBC_FETCHDATA_SIZE   -> JsNumber(10000),
          oharastream.ohara.connector.jdbc.source.JDBC_FLUSHDATA_SIZE   -> JsNumber(10000)
        )
      )
      sleepUntilEnd()
    } finally if (needDeleteData) client.dropTable(tableName)
  }

  override protected def afterStoppingConnectors(
    connectorInfos: Seq[ConnectorApi.ConnectorInfo],
    topicInfos: Seq[TopicApi.TopicInfo]
  ): Unit = {}
} 
Example 31
Source File: TestPerformance4SambaSink.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.performance

import oharastream.ohara.client.configurator.ConnectorApi.ConnectorInfo
import oharastream.ohara.client.configurator.TopicApi.TopicInfo
import oharastream.ohara.common.setting.ConnectorKey
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.connector.smb.SmbSink
import oharastream.ohara.it.category.PerformanceGroup
import oharastream.ohara.kafka.connector.csv.CsvConnectorDefinitions
import org.junit.Test
import org.junit.experimental.categories.Category
import spray.json.{JsNumber, JsString}

@Category(Array(classOf[PerformanceGroup]))
class TestPerformance4SambaSink extends BasicTestPerformance4Samba {
  private[this] val outputDir: String = "output"

  @Test
  def test(): Unit = {
    val samba = sambaClient()
    try {
      createTopic()
      produce(timeoutOfInputData)
      loopInputDataThread(produce)
      setupConnector(
        connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)),
        className = classOf[SmbSink].getName,
        settings = sambaSettings
          ++ Map(
            CsvConnectorDefinitions.OUTPUT_FOLDER_KEY -> JsString(
              PerformanceTestingUtils.createFolder(samba, outputDir)
            ),
            CsvConnectorDefinitions.FLUSH_SIZE_KEY -> JsNumber(numberOfCsvFileToFlush)
          )
      )
      sleepUntilEnd()
    } finally Releasable.close(samba)
  }

  override protected def afterStoppingConnectors(connectorInfos: Seq[ConnectorInfo], topicInfos: Seq[TopicInfo]): Unit =
    if (needDeleteData)
      topicInfos.foreach { topicInfo =>
        val path  = s"$outputDir/${topicInfo.topicNameOnKafka}"
        val samba = sambaClient()
        try {
          if (PerformanceTestingUtils.exists(samba, path)) PerformanceTestingUtils.deleteFolder(samba, path)
        } finally Releasable.close(samba)
      }
} 
Example 32
Source File: ArgumentsBuilder.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.agent

import oharastream.ohara.agent.ArgumentsBuilder.FileAppender
import oharastream.ohara.common.util.CommonUtils
import spray.json.{JsNull, JsNumber, JsString, JsValue}

import scala.collection.mutable


  def mainConfigFile(path: String): ArgumentsBuilder

  override def build: Seq[String]
}

object ArgumentsBuilder {
  trait FileAppender {
    private[this] val props                = mutable.Buffer[String]()
    def append(prop: Int): FileAppender    = append(prop.toString)
    def append(prop: String): FileAppender = append(Seq(prop))
    def append(props: Seq[String]): FileAppender = {
      this.props ++= props
      this
    }
    def append(key: String, value: Boolean): FileAppender = append(s"$key=$value")
    def append(key: String, value: Short): FileAppender   = append(s"$key=$value")
    def append(key: String, value: Int): FileAppender     = append(s"$key=$value")
    def append(key: String, value: String): FileAppender  = append(s"$key=$value")
    def append(key: String, value: JsValue): FileAppender = append(
      key,
      value match {
        case JsString(value) => value
        case JsNumber(value) => value.toString
        case JsNull          => throw new IllegalArgumentException(s"JsNull is not legal")
        case _               => value.toString()
      }
    )

    def done: ArgumentsBuilder = done(props.toSeq)

    protected def done(props: Seq[String]): ArgumentsBuilder
  }
  def apply(): ArgumentsBuilder = new ArgumentsBuilder {
    private[this] val files                  = mutable.Map[String, Seq[String]]()
    private[this] var mainConfigFile: String = _

    override def build: Seq[String] =
      if (CommonUtils.isEmpty(mainConfigFile))
        throw new IllegalArgumentException("you have to define the main configs")
      else
        // format: --file path=line0,line1 --file path1=line0,line1
        // NOTED: the path and props must be in different line. otherwise, k8s will merge them into single line and our
        // script will fail to parse the command-line arguments
        files.flatMap {
          case (path, props) => Seq("--file", s"$path=${props.mkString(",")}")
        }.toSeq ++ Seq("--config", mainConfigFile)

    override def file(path: String): FileAppender = (props: Seq[String]) => {
      this.files += (path -> props)
      this
    }

    override def mainConfigFile(path: String): ArgumentsBuilder = {
      this.mainConfigFile = CommonUtils.requireNonEmpty(path)
      this
    }
  }
} 
Example 33
Source File: TestPerformance4FtpSource.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.performance

import oharastream.ohara.client.configurator.{ConnectorApi, TopicApi}
import oharastream.ohara.common.setting.ConnectorKey
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.connector.ftp.FtpSource
import oharastream.ohara.it.category.PerformanceGroup
import oharastream.ohara.kafka.connector.csv.CsvConnectorDefinitions
import org.junit.Test
import org.junit.experimental.categories.Category
import spray.json.{JsNumber, JsString}

@Category(Array(classOf[PerformanceGroup]))
class TestPerformance4FtpSource extends BasicTestPerformance4Ftp {
  @Test
  def test(): Unit = {
    val ftp = ftpClient()
    try {
      createTopic()
      val completedPath = "/completed"
      val errorPath     = "/error"
      val (path, _, _)  = setupInputData(timeoutOfInputData)
      try {
        loopInputDataThread(setupInputData)
        setupConnector(
          connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)),
          className = classOf[FtpSource].getName,
          settings = ftpSettings
            + (CsvConnectorDefinitions.INPUT_FOLDER_KEY -> JsString(path))
            + (CsvConnectorDefinitions.COMPLETED_FOLDER_KEY -> JsString(
              PerformanceTestingUtils.createFolder(ftp, completedPath)
            ))
            + (CsvConnectorDefinitions.ERROR_FOLDER_KEY -> JsString(
              PerformanceTestingUtils.createFolder(ftp, errorPath)
            ))
            + (CsvConnectorDefinitions.SIZE_OF_FILE_CACHE_KEY -> JsNumber(fileNameCacheSize))
        )
        sleepUntilEnd()
      } finally if (cleanupTestData) {
        PerformanceTestingUtils.deleteFolder(ftp, path)
        PerformanceTestingUtils.deleteFolder(ftp, completedPath)
        PerformanceTestingUtils.deleteFolder(ftp, errorPath)
      }
    } finally Releasable.close(ftp)
  }

  override protected def afterStoppingConnectors(
    connectorInfos: Seq[ConnectorApi.ConnectorInfo],
    topicInfos: Seq[TopicApi.TopicInfo]
  ): Unit = {}
} 
Example 34
Source File: JsonSupport.scala    From akka-http-slick-sample   with MIT License 5 votes vote down vote up
package net.softler.data.model

import java.sql.Timestamp
import java.time.Instant
import java.util.UUID

import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
import spray.json.{DefaultJsonProtocol, JsNumber, JsString, JsValue, JsonFormat, RootJsonFormat}

trait BaseJsonProtocol extends DefaultJsonProtocol {
  implicit val timestampFormat: JsonFormat[Timestamp] = new JsonFormat[Timestamp] {
    override def write(obj: Timestamp): JsValue = JsNumber(obj.getTime)

    override def read(json: JsValue): Timestamp = json match {
      case JsNumber(x) => Timestamp.from(Instant.ofEpochMilli(x.toLong))
      case _ =>
        throw new IllegalArgumentException(
          s"Can not parse json value [$json] to a timestamp object")
    }
  }

  implicit val uuidJsonFormat: JsonFormat[UUID] = new JsonFormat[UUID] {
    override def write(x: UUID): JsValue = JsString(x.toString)

    override def read(value: JsValue): UUID = value match {
      case JsString(x) => UUID.fromString(x)
      case x =>
        throw new IllegalArgumentException("Expected UUID as JsString, but got " + x.getClass)
    }
  }
}


trait JsonProtocol extends SprayJsonSupport with BaseJsonProtocol {
  implicit val userFormat: RootJsonFormat[User] = jsonFormat10(User)
} 
Example 35
Source File: HydraKafkaJsonSupport.scala    From hydra   with Apache License 2.0 5 votes vote down vote up
package hydra.kafka.marshallers

import akka.http.scaladsl.marshalling.{Marshaller, Marshalling}
import akka.http.scaladsl.model.ContentTypes
import akka.util.ByteString
import hydra.core.marshallers.HydraJsonSupport
import org.apache.kafka.common.{Node, PartitionInfo}
import spray.json.{JsNumber, JsObject, JsString, JsValue, JsonFormat}

import scala.concurrent.Future


trait HydraKafkaJsonSupport extends HydraJsonSupport {

  implicit object NodeJsonFormat extends JsonFormat[Node] {

    override def write(node: Node): JsValue = {
      JsObject(
        "id" -> JsNumber(node.idString),
        "host" -> JsString(node.host),
        "port" -> JsNumber(node.port)
      )
    }

    override def read(json: JsValue): Node = {
      json.asJsObject.getFields("id", "host", "port") match {
        case Seq(id, host, port) =>
          new Node(
            id.convertTo[Int],
            host.convertTo[String],
            port.convertTo[Int]
          )
        case other =>
          spray.json.deserializationError(
            "Cannot deserialize Node. Invalid input: " + other
          )
      }
    }
  }

  implicit object PartitionInfoJsonFormat extends JsonFormat[PartitionInfo] {

    import spray.json._

    override def write(p: PartitionInfo): JsValue = {
      JsObject(
        "partition" -> JsNumber(p.partition()),
        "leader" -> p.leader().toJson,
        "isr" -> JsArray(p.inSyncReplicas().toJson)
      )
    }

    override def read(json: JsValue): PartitionInfo = ???
  }

  implicit val stringFormat = Marshaller[String, ByteString] { ec ⇒ s =>
    Future.successful {
      List(
        Marshalling.WithFixedContentType(
          ContentTypes.`application/json`,
          () => ByteString(s)
        )
      )
    }
  }
} 
Example 36
Source File: ClickhouseJsonSupport.scala    From clickhouse-scala-client   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.crobox.clickhouse.dsl.marshalling

import com.crobox.clickhouse.time.IntervalStart
import org.joda.time.format.{DateTimeFormatter, DateTimeFormatterBuilder, ISODateTimeFormat}
import org.joda.time.{DateTime, DateTimeZone}
import spray.json.{JsNumber, JsString, JsValue, JsonFormat, deserializationError, _}

import scala.util.Try

trait ClickhouseJsonSupport {

  
    override def read(json: JsValue): IntervalStart =
      json match {
        case JsString(value) =>
          value match {
            case month(relativeMonth, timezoneId) =>
              new DateTime(UnixStartTimeWithoutTimeZone)
                .withZoneRetainFields(DateTimeZone.forID(timezoneId))
                .plusMonths(relativeMonth.toInt - RelativeMonthsSinceUnixStart)
                .withZone(DateTimeZone.UTC)
            case date(dateOnly, timezoneId) =>
              //should handle quarter and year grouping as it returns a date
              formatter
                .parseDateTime(dateOnly)
                .withZoneRetainFields(DateTimeZone.forID(timezoneId))
                .withZone(DateTimeZone.UTC)
            case msTimestamp(millis) => new DateTime(millis.toLong, DateTimeZone.UTC)
            case timestamp(secs)     => new DateTime(secs.toLong * 1000, DateTimeZone.UTC)
            case _                   =>
              // sometimes clickhouse mistakenly returns a long / int value as JsString. Therefor, first try to
              // parse it as a long...
              val dateTime = Try {
                new DateTime(value.toLong, DateTimeZone.UTC)
              }.toOption

              // continue with parsing using the formatter
              dateTime.getOrElse {
                try {
                  formatter.parseDateTime(value)
                } catch {
                  case _: IllegalArgumentException => error(s"Couldn't parse $value into valid date time")
                  case _: UnsupportedOperationException =>
                    error("Unsupported operation, programmatic misconfiguration?")
                }
              }
          }
        case JsNumber(millis) => new DateTime(millis.longValue, DateTimeZone.UTC)
        case _                => throw DeserializationException(s"Unknown date format read from clickhouse for $json")
      }

    def error(v: Any): DateTime = {
      val example = readFormatter.print(0)
      deserializationError(
        f"'$v' is not a valid date value. Dates must be in compact ISO-8601 format, e.g. '$example'"
      )
    }
  }

}
object ClickhouseJsonSupport extends DefaultJsonProtocol with ClickhouseJsonSupport 
Example 37
Source File: ClickhouseIntervalStartFormatTest.scala    From clickhouse-scala-client   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.crobox.clickhouse.dsl.marshalling

import com.crobox.clickhouse.dsl.marshalling.ClickhouseJsonSupport.ClickhouseIntervalStartFormat
import org.joda.time.{DateTime, DateTimeZone}
import spray.json.{JsNumber, JsString}
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers

class ClickhouseIntervalStartFormatTest extends AnyFlatSpec with Matchers {

  val zone = DateTimeZone.forID("Europe/Bucharest")

  it should "read using month relative" in {
    ClickhouseIntervalStartFormat.read(
      JsString(s"${ClickhouseIntervalStartFormat.RelativeMonthsSinceUnixStart + 3}_$zone")
    ) should be(new DateTime("1970-04-01T00:00:00.000+02:00", DateTimeZone.UTC))
  }

  it should "read using 0 as JsString" in {
    ClickhouseIntervalStartFormat.read(JsString("0")) should be(
      new DateTime("1970-01-01T00:00:00.000+00:00", DateTimeZone.UTC)
    )
  }

  it should "read using 0 as JsNumber" in {
    ClickhouseIntervalStartFormat.read(JsNumber(0)) should be(
      new DateTime("1970-01-01T00:00:00.000+00:00", DateTimeZone.UTC)
    )
  }

  it should "read date only" in {
    ClickhouseIntervalStartFormat.read(JsString(s"1970-12-17_$zone")) should be(
      new DateTime("1970-12-17T00:00:00.000+02:00", DateTimeZone.UTC)
    )
  }

  it should "read timestamp" in {
    val date = DateTime.now(DateTimeZone.UTC)
    ClickhouseIntervalStartFormat.read(JsString(s"${date.getMillis}")) should be(date)
    ClickhouseIntervalStartFormat.read(JsNumber(date.getMillis)) should be(date)
  }

} 
Example 38
Source File: UnmarshallersTest.scala    From JustinDB   with Apache License 2.0 5 votes vote down vote up
package justin.httpapi

import java.util.UUID

import org.scalatest.{FlatSpec, Matchers}
import spray.json.{DeserializationException, JsNumber, JsString}

class UnmarshallersTest extends FlatSpec with Matchers {

  behavior of "Unmarshaller"

  it should "encode JSON into UUID" in {
    val uuid = UUID.randomUUID()
    val jsString = JsString(uuid.toString)

    Unmarshallers.UuidFormat.read(jsString) shouldBe uuid
  }

  it should "decode UUID into JSON" in {
    val uuid = UUID.randomUUID()
    val expectedJSON = Unmarshallers.UuidFormat.write(uuid)

    expectedJSON shouldBe JsString(uuid.toString)
  }

  it should "handle not expected format of JSON" in {
    val jsNumber = JsNumber(1)

    intercept[DeserializationException] {
      Unmarshallers.UuidFormat.read(jsNumber)
    }
  }

  it should "handle wrong format of UUID" in {
    val fakeUUID = "1-2-3-4"
    val jsString = JsString(fakeUUID)

    intercept[DeserializationException] {
      Unmarshallers.UuidFormat.read(jsString)
    }
  }
} 
Example 39
Source File: InvokerInstanceIdTests.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.entity.test

import org.apache.openwhisk.core.entity.size.SizeInt
import org.apache.openwhisk.core.entity.{ByteSize, InstanceId, InvokerInstanceId}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{FlatSpec, Matchers}
import spray.json.{JsNumber, JsObject, JsString}

import scala.util.Success

@RunWith(classOf[JUnitRunner])
class InvokerInstanceIdTests extends FlatSpec with Matchers {

  behavior of "InvokerInstanceIdTests"

  val defaultUserMemory: ByteSize = 1024.MB
  it should "serialize and deserialize InvokerInstanceId" in {
    val i = InvokerInstanceId(0, userMemory = defaultUserMemory)
    i.serialize shouldBe JsObject(
      "instance" -> JsNumber(i.instance),
      "userMemory" -> JsString(i.userMemory.toString),
      "instanceType" -> JsString(i.instanceType)).compactPrint
    i.serialize shouldBe i.toJson.compactPrint
    InstanceId.parse(i.serialize) shouldBe Success(i)
  }

  it should "serialize and deserialize InvokerInstanceId with optional field" in {
    val i1 = InvokerInstanceId(0, uniqueName = Some("uniqueInvoker"), userMemory = defaultUserMemory)
    i1.serialize shouldBe JsObject(
      "instance" -> JsNumber(i1.instance),
      "userMemory" -> JsString(i1.userMemory.toString),
      "instanceType" -> JsString(i1.instanceType),
      "uniqueName" -> JsString(i1.uniqueName.getOrElse(""))).compactPrint
    i1.serialize shouldBe i1.toJson.compactPrint
    InstanceId.parse(i1.serialize) shouldBe Success(i1)

    val i2 = InvokerInstanceId(
      0,
      uniqueName = Some("uniqueInvoker"),
      displayedName = Some("displayedInvoker"),
      userMemory = defaultUserMemory)
    i2.serialize shouldBe JsObject(
      "instance" -> JsNumber(i2.instance),
      "userMemory" -> JsString(i2.userMemory.toString),
      "instanceType" -> JsString(i2.instanceType),
      "uniqueName" -> JsString(i2.uniqueName.getOrElse("")),
      "displayedName" -> JsString(i2.displayedName.getOrElse(""))).compactPrint
    i2.serialize shouldBe i2.toJson.compactPrint
    InstanceId.parse(i2.serialize) shouldBe Success(i2)
  }
} 
Example 40
Source File: TimeLimit.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.entity

import pureconfig._
import pureconfig.generic.auto._

import scala.concurrent.duration._
import scala.util.Failure
import scala.util.Success
import scala.util.Try
import spray.json.JsNumber
import spray.json.JsValue
import spray.json.RootJsonFormat
import spray.json.deserializationError
import org.apache.openwhisk.core.ConfigKeys


  @throws[IllegalArgumentException]
  protected[core] def apply(duration: FiniteDuration): TimeLimit = {
    require(duration != null, s"duration undefined")
    require(
      duration >= MIN_DURATION,
      s"duration ${duration.toMillis} milliseconds below allowed threshold of ${MIN_DURATION.toMillis} milliseconds")
    require(
      duration <= MAX_DURATION,
      s"duration ${duration.toMillis} milliseconds exceeds allowed threshold of ${MAX_DURATION.toMillis} milliseconds")
    new TimeLimit(duration)
  }

  override protected[core] implicit val serdes = new RootJsonFormat[TimeLimit] {
    def write(t: TimeLimit) = JsNumber(t.millis)

    def read(value: JsValue) =
      Try {
        val JsNumber(ms) = value
        require(ms.isWhole, "time limit must be whole number")
        TimeLimit(Duration(ms.intValue, MILLISECONDS))
      } match {
        case Success(limit)                       => limit
        case Failure(e: IllegalArgumentException) => deserializationError(e.getMessage, e)
        case Failure(e: Throwable)                => deserializationError("time limit malformed", e)
      }
  }
} 
Example 41
Source File: NoopActivationStore.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.database.memory

import java.time.Instant

import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import org.apache.openwhisk.common.{Logging, TransactionId, WhiskInstants}
import org.apache.openwhisk.core.database.{
  ActivationStore,
  ActivationStoreProvider,
  CacheChangeNotification,
  UserContext
}
import org.apache.openwhisk.core.entity.{ActivationId, DocInfo, EntityName, EntityPath, Subject, WhiskActivation}
import spray.json.{JsNumber, JsObject}

import scala.concurrent.Future

object NoopActivationStore extends ActivationStore with WhiskInstants {
  private val emptyInfo = DocInfo("foo")
  private val emptyCount = JsObject("activations" -> JsNumber(0))
  private val dummyActivation = WhiskActivation(
    EntityPath("testnamespace"),
    EntityName("activation"),
    Subject(),
    ActivationId.generate(),
    start = Instant.now.inMills,
    end = Instant.now.inMills)

  override def store(activation: WhiskActivation, context: UserContext)(
    implicit transid: TransactionId,
    notifier: Option[CacheChangeNotification]): Future[DocInfo] = Future.successful(emptyInfo)

  override def get(activationId: ActivationId, context: UserContext)(
    implicit transid: TransactionId): Future[WhiskActivation] = {
    val activation = dummyActivation.copy(activationId = activationId)
    Future.successful(activation)
  }

  override def delete(activationId: ActivationId, context: UserContext)(
    implicit transid: TransactionId,
    notifier: Option[CacheChangeNotification]): Future[Boolean] = Future.successful(true)

  override def countActivationsInNamespace(namespace: EntityPath,
                                           name: Option[EntityPath],
                                           skip: Int,
                                           since: Option[Instant],
                                           upto: Option[Instant],
                                           context: UserContext)(implicit transid: TransactionId): Future[JsObject] =
    Future.successful(emptyCount)

  override def listActivationsMatchingName(
    namespace: EntityPath,
    name: EntityPath,
    skip: Int,
    limit: Int,
    includeDocs: Boolean,
    since: Option[Instant],
    upto: Option[Instant],
    context: UserContext)(implicit transid: TransactionId): Future[Either[List[JsObject], List[WhiskActivation]]] =
    Future.successful(Right(List.empty))

  override def listActivationsInNamespace(
    namespace: EntityPath,
    skip: Int,
    limit: Int,
    includeDocs: Boolean,
    since: Option[Instant],
    upto: Option[Instant],
    context: UserContext)(implicit transid: TransactionId): Future[Either[List[JsObject], List[WhiskActivation]]] =
    Future.successful(Right(List.empty))
}

object NoopActivationStoreProvider extends ActivationStoreProvider {
  override def instance(actorSystem: ActorSystem, actorMaterializer: ActorMaterializer, logging: Logging) =
    NoopActivationStore
} 
Example 42
Source File: YARNComponentActor.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.yarn

import akka.actor.{Actor, ActorSystem}
import akka.http.scaladsl.model.{HttpMethods, StatusCodes}
import akka.stream.ActorMaterializer
import org.apache.openwhisk.common.Logging
import org.apache.openwhisk.core.entity.ExecManifest.ImageName
import org.apache.openwhisk.core.yarn.YARNComponentActor.{CreateContainerAsync, RemoveContainer}
import spray.json.{JsArray, JsNumber, JsObject, JsString}

import scala.concurrent.ExecutionContext


object YARNComponentActor {
  case object CreateContainerAsync
  case class RemoveContainer(component_instance_name: String)
}

class YARNComponentActor(actorSystem: ActorSystem,
                         logging: Logging,
                         yarnConfig: YARNConfig,
                         serviceName: String,
                         imageName: ImageName)
    extends Actor {

  implicit val as: ActorSystem = actorSystem
  implicit val materializer: ActorMaterializer = ActorMaterializer()
  implicit val ec: ExecutionContext = actorSystem.dispatcher

  //Adding a container via the YARN REST API is actually done by flexing the component's container pool to a certain size.
  // This actor must track the current containerCount in order to make the correct scale-up request.
  var containerCount: Int = 0

  def receive: PartialFunction[Any, Unit] = {
    case CreateContainerAsync =>
      sender ! createContainerAsync

    case RemoveContainer(component_instance_name) =>
      sender ! removeContainer(component_instance_name)

    case input =>
      throw new IllegalArgumentException("Unknown input: " + input)
      sender ! false
  }

  def createContainerAsync(): Unit = {
    logging.info(this, s"Using YARN to create a container with image ${imageName.name}...")

    val body = JsObject("number_of_containers" -> JsNumber(containerCount + 1)).compactPrint
    val response = YARNRESTUtil.submitRequestWithAuth(
      yarnConfig.authType,
      HttpMethods.PUT,
      s"${yarnConfig.masterUrl}/app/v1/services/$serviceName/components/${imageName.name}",
      body)
    response match {
      case httpresponse(StatusCodes.OK, content) =>
        logging.info(this, s"Added container: ${imageName.name}. Response: $content")
        containerCount += 1

      case httpresponse(_, _) => YARNRESTUtil.handleYARNRESTError(logging)
    }
  }

  def removeContainer(component_instance_name: String): Unit = {
    logging.info(this, s"Removing ${imageName.name} container: $component_instance_name ")
    if (containerCount <= 0) {
      logging.warn(this, "Already at 0 containers")
    } else {
      val body = JsObject(
        "components" -> JsArray(
          JsObject(
            "name" -> JsString(imageName.name),
            "decommissioned_instances" -> JsArray(JsString(component_instance_name))))).compactPrint
      val response = YARNRESTUtil.submitRequestWithAuth(
        yarnConfig.authType,
        HttpMethods.PUT,
        s"${yarnConfig.masterUrl}/app/v1/services/$serviceName",
        body)
      response match {
        case httpresponse(StatusCodes.OK, content) =>
          logging.info(
            this,
            s"Successfully removed ${imageName.name} container: $component_instance_name. Response: $content")
          containerCount -= 1

        case httpresponse(_, _) => YARNRESTUtil.handleYARNRESTError(logging)
      }
    }
  }
}