org.json4s.JsonAST.JObject Scala Examples

The following examples show how to use org.json4s.JsonAST.JObject. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: JsonProtocol.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy

import org.json4s.JsonAST.JObject
import org.json4s.JsonDSL._

import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, WorkerInfo}
import org.apache.spark.deploy.worker.ExecutorRunner

private[deploy] object JsonProtocol {
 def writeWorkerInfo(obj: WorkerInfo): JObject = {
   ("id" -> obj.id) ~
   ("host" -> obj.host) ~
   ("port" -> obj.port) ~
   ("webuiaddress" -> obj.webUiAddress) ~
   ("cores" -> obj.cores) ~
   ("coresused" -> obj.coresUsed) ~
   ("coresfree" -> obj.coresFree) ~
   ("memory" -> obj.memory) ~
   ("memoryused" -> obj.memoryUsed) ~
   ("memoryfree" -> obj.memoryFree) ~
   ("state" -> obj.state.toString) ~
   ("lastheartbeat" -> obj.lastHeartbeat)
 }

  def writeApplicationInfo(obj: ApplicationInfo): JObject = {
    ("starttime" -> obj.startTime) ~
    ("id" -> obj.id) ~
    ("name" -> obj.desc.name) ~
    ("cores" -> obj.desc.maxCores) ~
    ("user" ->  obj.desc.user) ~
    ("memoryperslave" -> obj.desc.memoryPerExecutorMB) ~
    ("submitdate" -> obj.submitDate.toString) ~
    ("state" -> obj.state.toString) ~
    ("duration" -> obj.duration)
  }

  def writeApplicationDescription(obj: ApplicationDescription): JObject = {
    ("name" -> obj.name) ~
    ("cores" -> obj.maxCores) ~
    ("memoryperslave" -> obj.memoryPerExecutorMB) ~
    ("user" -> obj.user) ~
    ("command" -> obj.command.toString)
  }

  def writeExecutorRunner(obj: ExecutorRunner): JObject = {
    ("id" -> obj.execId) ~
    ("memory" -> obj.memory) ~
    ("appid" -> obj.appId) ~
    ("appdesc" -> writeApplicationDescription(obj.appDesc))
  }

  def writeDriverInfo(obj: DriverInfo): JObject = {
    ("id" -> obj.id) ~
    ("starttime" -> obj.startTime.toString) ~
    ("state" -> obj.state.toString) ~
    ("cores" -> obj.desc.cores) ~
    ("memory" -> obj.desc.mem)
  }

  def writeMasterState(obj: MasterStateResponse): JObject = {
    val aliveWorkers = obj.workers.filter(_.isAlive())
    ("url" -> obj.uri) ~
    ("workers" -> obj.workers.toList.map(writeWorkerInfo)) ~
    ("cores" -> aliveWorkers.map(_.cores).sum) ~
    ("coresused" -> aliveWorkers.map(_.coresUsed).sum) ~
    ("memory" -> aliveWorkers.map(_.memory).sum) ~
    ("memoryused" -> aliveWorkers.map(_.memoryUsed).sum) ~
    ("activeapps" -> obj.activeApps.toList.map(writeApplicationInfo)) ~
    ("completedapps" -> obj.completedApps.toList.map(writeApplicationInfo)) ~
    ("activedrivers" -> obj.activeDrivers.toList.map(writeDriverInfo)) ~
    ("status" -> obj.status.toString)
  }

  def writeWorkerState(obj: WorkerStateResponse): JObject = {
    ("id" -> obj.workerId) ~
    ("masterurl" -> obj.masterUrl) ~
    ("masterwebuiurl" -> obj.masterWebUiUrl) ~
    ("cores" -> obj.cores) ~
    ("coresused" -> obj.coresUsed) ~
    ("memory" -> obj.memory) ~
    ("memoryused" -> obj.memoryUsed) ~
    ("executors" -> obj.executors.toList.map(writeExecutorRunner)) ~
    ("finishedexecutors" -> obj.finishedExecutors.toList.map(writeExecutorRunner))
  }
} 
Example 2
Source File: PaymentPath.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model

import org.json4s.JsonAST.JObject
import org.json4s.{DefaultFormats, Formats, JArray, JValue}
import stellar.sdk.KeyPair
import stellar.sdk.model.AmountParser.{AssetDeserializer, parseAsset}
import stellar.sdk.model.response.ResponseParser

case class PaymentPath(source: Amount, destination: Amount, path: Seq[Asset])

object PaymentPathDeserializer extends ResponseParser[PaymentPath]({
  o: JObject =>
    implicit val formats = DefaultFormats
    implicit val assetDeserializer = AssetDeserializer
    
    PaymentPath(
      source = AmountParser.amount("source_", o),
      destination = AmountParser.amount("destination_", o),
      path = {
        val JArray(values) = (o \ "path").extract[JArray]
        values.map { jv => parseAsset("", jv) }
      }
    )
})

object AmountParser {

  implicit val formats = DefaultFormats

  def parseAsset(prefix: String, o: JValue)(implicit formats: Formats): Asset = {
    val assetType = (o \ s"${prefix}asset_type").extract[String]
    def code = (o \ s"${prefix}asset_code").extract[String]
    def issuer = KeyPair.fromAccountId((o \ s"${prefix}asset_issuer").extract[String])
    assetType match {
      case "native" => NativeAsset
      case "credit_alphanum4" => IssuedAsset4(code, issuer)
      case "credit_alphanum12" => IssuedAsset12(code, issuer)
      case t => throw new RuntimeException(s"Unrecognised ${prefix}asset type: $t")
    }
  }

  def amount(prefix: String, o: JObject)(implicit formats: Formats): Amount = {
    val asset = parseAsset(prefix, o)
    val units = Amount.toBaseUnits((o \ s"${prefix}amount").extract[String]).get
    Amount(units, asset)
  }

  object AssetDeserializer extends ResponseParser[Asset](parseAsset("", _))
} 
Example 3
Source File: Trade.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model

import java.time.ZonedDateTime

import org.json4s.DefaultFormats
import org.json4s.JsonAST.JObject
import stellar.sdk.model.response.ResponseParser
import stellar.sdk.{KeyPair, PublicKeyOps}

case class Trade(id: String, ledgerCloseTime: ZonedDateTime, offerId: Long,
                 baseOfferId: Long, counterOfferId: Long,
                 baseAccount: PublicKeyOps, baseAmount: Amount,
                 counterAccount: PublicKeyOps, counterAmount: Amount,
                 baseIsSeller: Boolean)


object TradeDeserializer extends ResponseParser[Trade]({
  o: JObject =>
    implicit val formats = DefaultFormats

    def account(accountKey: String = "account") = KeyPair.fromAccountId((o \ accountKey).extract[String])

    def date(key: String) = ZonedDateTime.parse((o \ key).extract[String])

    def doubleFromString(key: String) = (o \ key).extract[String].toDouble

    def asset(prefix: String = "", issuerKey: String = "asset_issuer") = {
      def assetCode = (o \ s"${prefix}asset_code").extract[String]

      def assetIssuer = KeyPair.fromAccountId((o \ s"$prefix$issuerKey").extract[String])

      (o \ s"${prefix}asset_type").extract[String] match {
        case "native" => NativeAsset
        case "credit_alphanum4" => IssuedAsset4(assetCode, assetIssuer)
        case "credit_alphanum12" => IssuedAsset12(assetCode, assetIssuer)
        case t => throw new RuntimeException(s"Unrecognised asset type '$t'")
      }
    }

    def amount(prefix: String = "") = {
      val units = Amount.toBaseUnits(doubleFromString(s"${prefix}amount")).get
      asset(prefix) match {
        case nna: NonNativeAsset => IssuedAmount(units, nna)
        case NativeAsset => NativeAmount(units)
      }
    }

    Trade(
      id = (o \ "id").extract[String],
      ledgerCloseTime = date("ledger_close_time"),
      offerId = (o \ "offer_id").extract[String].toLong,
      baseOfferId = (o \ "base_offer_id").extract[String].toLong,
      counterOfferId = (o \ "counter_offer_id").extract[String].toLong,
      baseAccount = account("base_account"),
      baseAmount = amount("base_"),
      counterAccount = account("counter_account"),
      counterAmount = amount("counter_"),
      baseIsSeller = (o \ "base_is_seller").extract[Boolean]
    )
}) 
Example 4
Source File: TradeAggregation.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model

import java.time.Instant
import java.util.concurrent.TimeUnit

import org.json4s.JsonAST.JObject
import org.json4s.{DefaultFormats, JValue}
import stellar.sdk.model.response.ResponseParser

import scala.concurrent.duration.Duration

case class TradeAggregation(instant: Instant, tradeCount: Int, baseVolume: Double, counterVolume: Double,
                            average: Double, open: Price, high: Price, low: Price, close: Price)

object TradeAggregationDeserializer extends ResponseParser[TradeAggregation]({ o: JObject =>
  implicit val formats = DefaultFormats

  def price(p: JValue): Price = Price((p \ "N").extract[Int], (p \ "D").extract[Int])

  TradeAggregation(
    instant = Instant.ofEpochMilli((o \ "timestamp").extract[String].toLong),
    tradeCount = (o \ "trade_count").extract[String].toInt,
    baseVolume = (o \ "base_volume").extract[String].toDouble,
    counterVolume = (o \ "counter_volume").extract[String].toDouble,
    average = (o \ "avg").extract[String].toDouble,
    open = price(o \ "open_r"),
    high = price(o \ "high_r"),
    low = price(o \ "low_r"),
    close = price(o \ "close_r"))
})

object TradeAggregation {

  sealed class Resolution(val duration: Duration)

  val OneMinute = new Resolution(Duration.create(1, TimeUnit.MINUTES))
  val FiveMinutes = new Resolution(OneMinute.duration * 5.0)
  val FifteenMinutes = new Resolution(FiveMinutes.duration * 3.0)
  val OneHour = new Resolution(FifteenMinutes.duration * 4.0)
  val OneDay = new Resolution(OneHour.duration * 24.0)
  val OneWeek = new Resolution(OneDay.duration * 7.0)

} 
Example 5
Source File: TransactionHistory.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.result

import java.time.ZonedDateTime

import org.json4s.{DefaultFormats, Formats}
import org.json4s.JsonAST.JObject
import stellar.sdk.model._
import stellar.sdk.model.ledger.TransactionLedgerEntries.arr
import stellar.sdk.model.ledger.{LedgerEntryChange, LedgerEntryChanges, TransactionLedgerEntries}
import stellar.sdk.model.response.ResponseParser
import stellar.sdk.util.ByteArrays.base64
import stellar.sdk.{KeyPair, PublicKey}

import scala.util.Try


case class TransactionHistory(hash: String, ledgerId: Long, createdAt: ZonedDateTime, account: PublicKey,
                              sequence: Long, maxFee: NativeAmount, feeCharged: NativeAmount, operationCount: Int,
                              memo: Memo, signatures: Seq[String], envelopeXDR: String, resultXDR: String,
                              resultMetaXDR: String, feeMetaXDR: String, validAfter: Option[ZonedDateTime],
                              validBefore: Option[ZonedDateTime], feeBump: Option[FeeBumpHistory]) {

  lazy val result: TransactionResult = TransactionResult.decodeXDR(resultXDR)

  def ledgerEntries: TransactionLedgerEntries = TransactionLedgerEntries.decodeXDR(resultMetaXDR)
  def feeLedgerEntries: Seq[LedgerEntryChange] = LedgerEntryChanges.decodeXDR(feeMetaXDR)

  @deprecated("Replaced by `feeCharged`", "v0.7.2")
  val feePaid: NativeAmount = feeCharged

}


object TransactionHistoryDeserializer extends {
} with ResponseParser[TransactionHistory]({
  o: JObject =>
    implicit val formats: Formats = DefaultFormats

    val maxFee = NativeAmount((o \ "max_fee").extract[String].toInt)
    val signatures = (o \ "signatures").extract[List[String]]
    val hash = (o \ "hash").extract[String]

    val inner = for {
      hash <- (o \ "inner_transaction" \ "hash").extractOpt[String]
      maxFee <- (o \ "inner_transaction" \ "max_fee").extractOpt[Int].map(NativeAmount(_))
      signatures <- (o \ "inner_transaction" \ "signatures").extractOpt[List[String]]
    } yield (hash, maxFee, signatures)

    TransactionHistory(
      hash = inner.map(_._1).getOrElse(hash),
      ledgerId = (o \ "ledger").extract[Long],
      createdAt = ZonedDateTime.parse((o \ "created_at").extract[String]),
      account = KeyPair.fromAccountId((o \ "source_account").extract[String]),
      sequence = (o \ "source_account_sequence").extract[String].toLong,
      maxFee = inner.map(_._2).getOrElse(maxFee),
      feeCharged = NativeAmount((o \ "fee_charged").extract[String].toInt),
      operationCount = (o \ "operation_count").extract[Int],
      memo = (o \ "memo_type").extract[String] match {
        case "none" => NoMemo
        case "id" => MemoId(BigInt((o \ "memo").extract[String]).toLong)
        case "text" => MemoText((o \ "memo").extractOpt[String].getOrElse(""))
        case "hash" => MemoHash(base64((o \ "memo").extract[String]).toIndexedSeq)
        case "return" => MemoReturnHash(base64((o \ "memo").extract[String]).toIndexedSeq)
      },
      signatures = inner.map(_._3).getOrElse(signatures),
      envelopeXDR = (o \ "envelope_xdr").extract[String],
      resultXDR = (o \ "result_xdr").extract[String],
      resultMetaXDR = (o \ "result_meta_xdr").extract[String],
      feeMetaXDR = (o \ "fee_meta_xdr").extract[String],
      // TODO (jem) - Remove the Try wrappers when https://github.com/stellar/go/issues/1381 is fixed.
      validBefore = Try((o \ "valid_before").extractOpt[String].map(ZonedDateTime.parse)).getOrElse(None),
      validAfter = Try((o \ "valid_after").extractOpt[String].map(ZonedDateTime.parse)).getOrElse(None),
      feeBump = inner.map { _ => FeeBumpHistory(maxFee, hash, signatures) }
    )
}) 
Example 6
Source File: PageSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.inet

import java.net.HttpURLConnection.{HTTP_BAD_REQUEST, HTTP_NOT_FOUND}

import okhttp3.HttpUrl
import org.json4s.DefaultFormats
import org.json4s.JsonAST.JObject
import org.json4s.native.JsonMethods
import org.specs2.mutable.Specification
import stellar.sdk.model.response.ResponseParser

class PageSpec extends Specification {

  implicit val formats = DefaultFormats + RawPageDeserializer + HelloDeserializer

  "page parsing" should {
    "return an empty page if no results were found" >> {
      val page = PageParser.parse[String](HttpUrl.parse("http://localhost/"), HTTP_NOT_FOUND, "")
      page.xs must beEmpty
    }

    "throw a bad request exception with the reasons when provided" >> {
      val url = HttpUrl.parse("http://localhost/")
      PageParser.parse[String](url, HTTP_BAD_REQUEST,
        """{
          |  "type": "https://stellar.org/horizon-errors/bad_request",
          |  "title": "Bad Request",
          |  "status": 400,
          |  "detail": "The request you sent was invalid in some way.",
          |  "extras": {
          |    "invalid_field": "cursor",
          |    "reason": "cursor must contain exactly one colon"
          |  }
          |}""".stripMargin) must throwA[HorizonBadRequest].like { e =>
        e.getMessage mustEqual "Bad request. http://localhost/ -> cursor must contain exactly one colon"
      }
    }

    "throw a bad request exception with the full document when the reason is not provided" >> {
      val url = HttpUrl.parse("http://localhost/")
      PageParser.parse[String](url, HTTP_BAD_REQUEST, "random text") must throwA[HorizonBadRequest].like { e =>
        e.getMessage mustEqual "Bad request. http://localhost/ -> random text"
      }
    }

    "parse the member values and provide a link to the next page" >> {
      val doc =
        """
          |{
          |  "_links": {
          |    "self": {
          |      "href": "https://horizon-testnet.stellar.org/hello?cursor=\u0026limit=10\u0026order=asc"
          |    },
          |    "next": {
          |      "href": "https://horizon-testnet.stellar.org/hello?cursor=2045052972961793-0\u0026limit=10\u0026order=asc"
          |    },
          |    "prev": {
          |      "href": "https://horizon-testnet.stellar.org/hello?cursor=940258535411713-0\u0026limit=10\u0026order=desc"
          |    }
          |  },
          |  "_embedded": {
          |    "records": [
          |      {"hello":"world"},
          |      {"hello":"whirled"}
          |    ]
          |  }
          |}
        """.stripMargin

      JsonMethods.parse(doc).extract[RawPage].parse[String](HttpUrl.parse("http://localhost/")) mustEqual Page(
        List("world", "whirled"),
        nextLink = Some(HttpUrl.parse("https://horizon-testnet.stellar.org/hello?cursor=2045052972961793-0&limit=10&order=asc"))
      )
    }
  }

  object HelloDeserializer extends ResponseParser[String]({ o: JObject =>
    implicit val formats = DefaultFormats
    (o \ "hello").extract[String]
  })

} 
Example 7
Source File: DataTypeUtils.scala    From incubator-livy   with Apache License 2.0 5 votes vote down vote up
package org.apache.livy.thriftserver.types

import org.json4s.{DefaultFormats, JValue}
import org.json4s.JsonAST.{JObject, JString}
import org.json4s.jackson.JsonMethods.parse


  def schemaFromSparkJson(sparkJson: String): Schema = {
    val schema = parse(sparkJson) \ "fields"
    val fields = schema.children.map { field =>
      val name = (field \ "name").extract[String]
      val hiveType = toFieldType(field \ "type")
      // TODO: retrieve comment from metadata
      Field(name, hiveType, "")
    }
    Schema(fields.toArray)
  }
} 
Example 8
Source File: JsonProtocol.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy

import org.json4s.JsonAST.JObject
import org.json4s.JsonDSL._

import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, WorkerInfo}
import org.apache.spark.deploy.worker.ExecutorRunner

private[deploy] object JsonProtocol {
 def writeWorkerInfo(obj: WorkerInfo): JObject = {
   ("id" -> obj.id) ~
   ("host" -> obj.host) ~
   ("port" -> obj.port) ~
   ("webuiaddress" -> obj.webUiAddress) ~
   ("cores" -> obj.cores) ~
   ("coresused" -> obj.coresUsed) ~
   ("coresfree" -> obj.coresFree) ~
   ("memory" -> obj.memory) ~
   ("memoryused" -> obj.memoryUsed) ~
   ("memoryfree" -> obj.memoryFree) ~
   ("state" -> obj.state.toString) ~
   ("lastheartbeat" -> obj.lastHeartbeat)
 }

  def writeApplicationInfo(obj: ApplicationInfo): JObject = {
    ("starttime" -> obj.startTime) ~
    ("id" -> obj.id) ~
    ("name" -> obj.desc.name) ~
    ("cores" -> obj.desc.maxCores) ~
    ("user" -> obj.desc.user) ~
    ("memoryperslave" -> obj.desc.memoryPerExecutorMB) ~
    ("submitdate" -> obj.submitDate.toString) ~
    ("state" -> obj.state.toString) ~
    ("duration" -> obj.duration)
  }

  def writeApplicationDescription(obj: ApplicationDescription): JObject = {
    ("name" -> obj.name) ~
    ("cores" -> obj.maxCores) ~
    ("memoryperslave" -> obj.memoryPerExecutorMB) ~
    ("user" -> obj.user) ~
    ("command" -> obj.command.toString)
  }

  def writeExecutorRunner(obj: ExecutorRunner): JObject = {
    ("id" -> obj.execId) ~
    ("memory" -> obj.memory) ~
    ("appid" -> obj.appId) ~
    ("appdesc" -> writeApplicationDescription(obj.appDesc))
  }

  def writeDriverInfo(obj: DriverInfo): JObject = {
    ("id" -> obj.id) ~
    ("starttime" -> obj.startTime.toString) ~
    ("state" -> obj.state.toString) ~
    ("cores" -> obj.desc.cores) ~
    ("memory" -> obj.desc.mem)
  }

  def writeMasterState(obj: MasterStateResponse): JObject = {
    val aliveWorkers = obj.workers.filter(_.isAlive())
    ("url" -> obj.uri) ~
    ("workers" -> obj.workers.toList.map(writeWorkerInfo)) ~
    ("cores" -> aliveWorkers.map(_.cores).sum) ~
    ("coresused" -> aliveWorkers.map(_.coresUsed).sum) ~
    ("memory" -> aliveWorkers.map(_.memory).sum) ~
    ("memoryused" -> aliveWorkers.map(_.memoryUsed).sum) ~
    ("activeapps" -> obj.activeApps.toList.map(writeApplicationInfo)) ~
    ("completedapps" -> obj.completedApps.toList.map(writeApplicationInfo)) ~
    ("activedrivers" -> obj.activeDrivers.toList.map(writeDriverInfo)) ~
    ("status" -> obj.status.toString)
  }

  def writeWorkerState(obj: WorkerStateResponse): JObject = {
    ("id" -> obj.workerId) ~
    ("masterurl" -> obj.masterUrl) ~
    ("masterwebuiurl" -> obj.masterWebUiUrl) ~
    ("cores" -> obj.cores) ~
    ("coresused" -> obj.coresUsed) ~
    ("memory" -> obj.memory) ~
    ("memoryused" -> obj.memoryUsed) ~
    ("executors" -> obj.executors.toList.map(writeExecutorRunner)) ~
    ("finishedexecutors" -> obj.finishedExecutors.toList.map(writeExecutorRunner))
  }
} 
Example 9
Source File: FilterCluKeysFromDirectory.scala    From eidos   with Apache License 2.0 5 votes vote down vote up
package org.clulab.wm.eidos.apps.batch

import org.clulab.wm.eidos.utils.FileUtils
import org.clulab.wm.eidos.utils.meta.CluText
import org.json4s.JsonAST.JField
import org.json4s.JsonAST.JObject
import org.json4s.JsonAST.JString

import scala.collection.mutable

object FilterCluKeysFromDirectory extends App {
  val metaDir = args(0)
  val files = FileUtils.findFiles(metaDir, "json")
  val keys = mutable.Set.empty[String]

  files.foreach { file =>
    try {
      println(s"Extracting from ${file.getName}")
      val jValue = CluText.getJValue(file)

      val newKeys = for {
        JObject(mt) <- jValue
        JField("N", JString(key)) <- mt
      } yield
        key.toString

      keys ++= newKeys
      println(keys)
    }
    catch {
      case exception: Exception =>
        println(s"Exception for file $file")
        exception.printStackTrace()
    }
  }
  println(keys)
} 
Example 10
Source File: BatchClientSuite.scala    From hail   with MIT License 5 votes vote down vote up
package is.hail.services.batch_client

import is.hail.utils._

import org.json4s.JsonAST.{JArray, JBool, JInt, JObject, JString}
import org.json4s.{DefaultFormats, Formats}
import org.scalatest.testng.TestNGSuite
import org.testng.annotations.Test

class BatchClientSuite extends TestNGSuite {
  @Test def testBasic(): Unit = {
    val client = new BatchClient()
    val token = tokenUrlSafe(32)
    val batch = client.run(
      JObject(
        "billing_project" -> JString("test"),
        "n_jobs" -> JInt(1),
        "token" -> JString(token)),
      FastIndexedSeq(
        JObject(
          "always_run" -> JBool(false),
          "image" -> JString("ubuntu:18.04"),
          "mount_docker_socket" -> JBool(false),
          "command" -> JArray(List(
            JString("/bin/bash"),
            JString("-c"),
            JString("echo 'Hello, world!'"))),
          "job_id" -> JInt(0),
          "parent_ids" -> JArray(List()))))
    implicit val formats: Formats = DefaultFormats
    assert((batch \ "state").extract[String] == "success")
  }
} 
Example 11
Source File: JsonProtocol.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy

import org.json4s.JsonAST.JObject
import org.json4s.JsonDSL._

import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, WorkerInfo}
import org.apache.spark.deploy.worker.ExecutorRunner

private[deploy] object JsonProtocol {
 def writeWorkerInfo(obj: WorkerInfo): JObject = {
   ("id" -> obj.id) ~
   ("host" -> obj.host) ~
   ("port" -> obj.port) ~
   ("webuiaddress" -> obj.webUiAddress) ~
   ("cores" -> obj.cores) ~
   ("coresused" -> obj.coresUsed) ~
   ("coresfree" -> obj.coresFree) ~
   ("memory" -> obj.memory) ~
   ("memoryused" -> obj.memoryUsed) ~
   ("memoryfree" -> obj.memoryFree) ~
   ("state" -> obj.state.toString) ~
   ("lastheartbeat" -> obj.lastHeartbeat)
 }

  def writeApplicationInfo(obj: ApplicationInfo): JObject = {
    ("starttime" -> obj.startTime) ~
    ("id" -> obj.id) ~
    ("name" -> obj.desc.name) ~
    ("cores" -> obj.desc.maxCores) ~
    ("user" -> obj.desc.user) ~
    ("memoryperslave" -> obj.desc.memoryPerExecutorMB) ~
    ("submitdate" -> obj.submitDate.toString) ~
    ("state" -> obj.state.toString) ~
    ("duration" -> obj.duration)
  }

  def writeApplicationDescription(obj: ApplicationDescription): JObject = {
    ("name" -> obj.name) ~
    ("cores" -> obj.maxCores) ~
    ("memoryperslave" -> obj.memoryPerExecutorMB) ~
    ("user" -> obj.user) ~
    ("command" -> obj.command.toString)
  }

  def writeExecutorRunner(obj: ExecutorRunner): JObject = {
    ("id" -> obj.execId) ~
    ("memory" -> obj.memory) ~
    ("appid" -> obj.appId) ~
    ("appdesc" -> writeApplicationDescription(obj.appDesc))
  }

  def writeDriverInfo(obj: DriverInfo): JObject = {
    ("id" -> obj.id) ~
    ("starttime" -> obj.startTime.toString) ~
    ("state" -> obj.state.toString) ~
    ("cores" -> obj.desc.cores) ~
    ("memory" -> obj.desc.mem)
  }

  def writeMasterState(obj: MasterStateResponse): JObject = {
    val aliveWorkers = obj.workers.filter(_.isAlive())
    ("url" -> obj.uri) ~
    ("workers" -> obj.workers.toList.map(writeWorkerInfo)) ~
    ("cores" -> aliveWorkers.map(_.cores).sum) ~
    ("coresused" -> aliveWorkers.map(_.coresUsed).sum) ~
    ("memory" -> aliveWorkers.map(_.memory).sum) ~
    ("memoryused" -> aliveWorkers.map(_.memoryUsed).sum) ~
    ("activeapps" -> obj.activeApps.toList.map(writeApplicationInfo)) ~
    ("completedapps" -> obj.completedApps.toList.map(writeApplicationInfo)) ~
    ("activedrivers" -> obj.activeDrivers.toList.map(writeDriverInfo)) ~
    ("status" -> obj.status.toString)
  }

  def writeWorkerState(obj: WorkerStateResponse): JObject = {
    ("id" -> obj.workerId) ~
    ("masterurl" -> obj.masterUrl) ~
    ("masterwebuiurl" -> obj.masterWebUiUrl) ~
    ("cores" -> obj.cores) ~
    ("coresused" -> obj.coresUsed) ~
    ("memory" -> obj.memory) ~
    ("memoryused" -> obj.memoryUsed) ~
    ("executors" -> obj.executors.toList.map(writeExecutorRunner)) ~
    ("finishedexecutors" -> obj.finishedExecutors.toList.map(writeExecutorRunner))
  }
} 
Example 12
Source File: JsonProtocol.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy

import org.json4s.JsonAST.JObject
import org.json4s.JsonDSL._

import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, WorkerInfo}
import org.apache.spark.deploy.worker.ExecutorRunner

private[deploy] object JsonProtocol {
 def writeWorkerInfo(obj: WorkerInfo): JObject = {
   ("id" -> obj.id) ~
   ("host" -> obj.host) ~
   ("port" -> obj.port) ~
   ("webuiaddress" -> obj.webUiAddress) ~
   ("cores" -> obj.cores) ~
   ("coresused" -> obj.coresUsed) ~
   ("coresfree" -> obj.coresFree) ~
   ("memory" -> obj.memory) ~
   ("memoryused" -> obj.memoryUsed) ~
   ("memoryfree" -> obj.memoryFree) ~
   ("state" -> obj.state.toString) ~
   ("lastheartbeat" -> obj.lastHeartbeat)
 }

  def writeApplicationInfo(obj: ApplicationInfo): JObject = {
    ("starttime" -> obj.startTime) ~
    ("id" -> obj.id) ~
    ("name" -> obj.desc.name) ~
    ("cores" -> obj.desc.maxCores) ~
    ("user" ->  obj.desc.user) ~
    ("memoryperslave" -> obj.desc.memoryPerExecutorMB) ~
    ("submitdate" -> obj.submitDate.toString) ~
    ("state" -> obj.state.toString) ~
    ("duration" -> obj.duration)
  }

  def writeApplicationDescription(obj: ApplicationDescription): JObject = {
    ("name" -> obj.name) ~
    ("cores" -> obj.maxCores) ~
    ("memoryperslave" -> obj.memoryPerExecutorMB) ~
    ("user" -> obj.user) ~
    ("command" -> obj.command.toString)
  }

  def writeExecutorRunner(obj: ExecutorRunner): JObject = {
    ("id" -> obj.execId) ~
    ("memory" -> obj.memory) ~
    ("appid" -> obj.appId) ~
    ("appdesc" -> writeApplicationDescription(obj.appDesc))
  }

  def writeDriverInfo(obj: DriverInfo): JObject = {
    ("id" -> obj.id) ~
    ("starttime" -> obj.startTime.toString) ~
    ("state" -> obj.state.toString) ~
    ("cores" -> obj.desc.cores) ~
    ("memory" -> obj.desc.mem)
  }

  def writeMasterState(obj: MasterStateResponse): JObject = {
    ("url" -> obj.uri) ~
    ("workers" -> obj.workers.toList.map(writeWorkerInfo)) ~
    ("cores" -> obj.workers.map(_.cores).sum) ~
    ("coresused" -> obj.workers.map(_.coresUsed).sum) ~
    ("memory" -> obj.workers.map(_.memory).sum) ~
    ("memoryused" -> obj.workers.map(_.memoryUsed).sum) ~
    ("activeapps" -> obj.activeApps.toList.map(writeApplicationInfo)) ~
    ("completedapps" -> obj.completedApps.toList.map(writeApplicationInfo)) ~
    ("activedrivers" -> obj.activeDrivers.toList.map(writeDriverInfo)) ~
    ("status" -> obj.status.toString)
  }

  def writeWorkerState(obj: WorkerStateResponse): JObject = {
    ("id" -> obj.workerId) ~
    ("masterurl" -> obj.masterUrl) ~
    ("masterwebuiurl" -> obj.masterWebUiUrl) ~
    ("cores" -> obj.cores) ~
    ("coresused" -> obj.coresUsed) ~
    ("memory" -> obj.memory) ~
    ("memoryused" -> obj.memoryUsed) ~
    ("executors" -> obj.executors.toList.map(writeExecutorRunner)) ~
    ("finishedexecutors" -> obj.finishedExecutors.toList.map(writeExecutorRunner))
  }
} 
Example 13
Source File: OpPipelineStageWriter.scala    From TransmogrifAI   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.salesforce.op.stages

import com.salesforce.op.stages.OpPipelineStageReaderWriter._
import com.salesforce.op.stages.sparkwrappers.generic.SparkWrapperParams
import org.apache.hadoop.fs.Path
import org.apache.spark.ml.util.MLWriter
import org.apache.spark.ml.{Estimator, SparkDefaultParamsReadWrite}
import org.json4s.JsonAST.{JObject, JValue}
import org.json4s.jackson.JsonMethods.{compact, render}

import scala.util.{Failure, Success}


  def writeToJson(path: String): JObject = {
    stage match {
      case _: Estimator[_] => return JObject() // no need to serialize estimators
      case s: SparkWrapperParams[_] =>
        // Set save path for all Spark wrapped stages of type [[SparkWrapperParams]] so they can save
        s.setStageSavePath(path)
      case _ =>
    }
    // We produce stage metadata for all the Spark params
    val metadata = SparkDefaultParamsReadWrite.getMetadataToSave(stage)

    // Write out the stage using the specified writer instance
    val writer = readerWriterFor[OpPipelineStageBase](stage.getClass.asInstanceOf[Class[OpPipelineStageBase]])
    val stageJson: JValue = writer.write(stage) match {
      case Failure(err) => throw new RuntimeException(s"Failed to write out stage '${stage.uid}'", err)
      case Success(json) => json
    }

    // Join metadata & with stage ctor args
    val j = metadata.merge(JObject(FieldNames.CtorArgs.entryName -> stageJson))
    render(j).asInstanceOf[JObject]
  }

} 
Example 14
Source File: SparkStageParam.scala    From TransmogrifAI   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.salesforce.op.stages

import com.salesforce.op.stages.sparkwrappers.generic.SparkWrapperParams
import org.apache.hadoop.fs.Path
import org.apache.spark.ml.PipelineStage
import org.apache.spark.ml.param.{Param, ParamPair, Params}
import org.apache.spark.ml.util.{Identifiable, MLReader, MLWritable}
import org.apache.spark.util.SparkUtils
import org.json4s.JsonAST.{JObject, JValue}
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods.{compact, parse, render}
import org.json4s.{DefaultFormats, Formats, JString}

class SparkStageParam[S <: PipelineStage with Params]
(
  parent: String,
  name: String,
  doc: String,
  isValid: Option[S] => Boolean
) extends Param[Option[S]](parent, name, doc, isValid) {

  import SparkStageParam._

  
  override def jsonDecode(jsonStr: String): Option[S] = {
    val json = parse(jsonStr)
    val uid = (json \ "uid").extractOpt[String]
    val path = (json \ "path").extractOpt[String]

    path -> uid match {
      case (None, _) | (_, None) | (_, Some(NoUID)) =>
        savePath = None
        None
      case (Some(p), Some(stageUid)) =>
        savePath = Option(p)
        val stagePath = new Path(p, stageUid).toString
        val className = (json \ "className").extract[String]
        val cls = SparkUtils.classForName(className)
        val stage = cls.getMethod("read").invoke(null).asInstanceOf[MLReader[PipelineStage]].load(stagePath)
        Option(stage).map(_.asInstanceOf[S])
    }
  }
}

object SparkStageParam {
  implicit val formats: Formats = DefaultFormats
  val NoClass = ""
  val NoUID = ""

  def updateParamsMetadataWithPath(jValue: JValue, path: String): JValue = jValue match {
    case JObject(pairs) => JObject(
      pairs.map {
        case (SparkWrapperParams.SparkStageParamName, j) =>
          SparkWrapperParams.SparkStageParamName -> j.merge(JObject("path" -> JString(path)))
        case param => param
      }
    )
    case j => throw new IllegalArgumentException(s"Cannot recognize JSON Spark params metadata: $j")
  }

} 
Example 15
Source File: FeatureJsonHelper.scala    From TransmogrifAI   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.salesforce.op.features

import com.salesforce.op.features.types._
import com.salesforce.op.stages.{OPStage, OpPipelineStage}
import org.json4s.JsonAST.{JObject, JValue}
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods
import org.json4s.jackson.JsonMethods._
import org.json4s.{DefaultFormats, Formats}

import scala.reflect.runtime.universe.WeakTypeTag
import scala.util.Try



  def fromJson(
    json: JValue,
    stages: Map[String, OPStage],
    features: Map[String, OPFeature]
  ): Try[OPFeature] = Try {
    val typeName = (json \ "typeName").extract[String]
    val uid = (json \ "uid").extract[String]
    val name = (json \ "name").extract[String]
    val isResponse = (json \ "isResponse").extract[Boolean]
    val originStageUid = (json \ "originStage").extract[String]
    val parentUids = (json \ "parents").extract[Array[String]]

    val originStage: Option[OPStage] = stages.get(originStageUid)
    if (originStage.isEmpty) {
      throw new RuntimeException(s"Origin stage $originStageUid not found for feature $name ($uid)")
    }

    // Order is important and so are duplicates, eg f = f1 + f1 has 2 parents but both the same feature
    val parents: Seq[OPFeature] = parentUids.flatMap(id => features.get(id))
    if (parents.length != parentUids.length) {
      throw new RuntimeException(s"Not all the parent features were found for feature $name ($uid)")
    }

    val wtt = FeatureType.featureTypeTag(typeName).asInstanceOf[WeakTypeTag[FeatureType]]
    Feature[FeatureType](
      uid = uid,
      name = name,
      isResponse = isResponse,
      parents = parents,
      originStage = originStage.get.asInstanceOf[OpPipelineStage[FeatureType]]
    )(wtt = wtt)

  }

} 
Example 16
Source File: JsonProtocol.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy

import org.json4s.JsonAST.JObject
import org.json4s.JsonDSL._

import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, WorkerInfo}
import org.apache.spark.deploy.worker.ExecutorRunner

private[deploy] object JsonProtocol {
 def writeWorkerInfo(obj: WorkerInfo): JObject = {
   ("id" -> obj.id) ~
   ("host" -> obj.host) ~
   ("port" -> obj.port) ~
   ("webuiaddress" -> obj.webUiAddress) ~
   ("cores" -> obj.cores) ~
   ("coresused" -> obj.coresUsed) ~
   ("coresfree" -> obj.coresFree) ~
   ("memory" -> obj.memory) ~
   ("memoryused" -> obj.memoryUsed) ~
   ("memoryfree" -> obj.memoryFree) ~
   ("state" -> obj.state.toString) ~
   ("lastheartbeat" -> obj.lastHeartbeat)
 }

  def writeApplicationInfo(obj: ApplicationInfo): JObject = {
    ("starttime" -> obj.startTime) ~
    ("id" -> obj.id) ~
    ("name" -> obj.desc.name) ~
    ("cores" -> obj.desc.maxCores) ~
    ("user" ->  obj.desc.user) ~
    ("memoryperslave" -> obj.desc.memoryPerExecutorMB) ~
    ("submitdate" -> obj.submitDate.toString) ~
    ("state" -> obj.state.toString) ~
    ("duration" -> obj.duration)
  }

  def writeApplicationDescription(obj: ApplicationDescription): JObject = {
    ("name" -> obj.name) ~
    ("cores" -> obj.maxCores) ~
    ("memoryperslave" -> obj.memoryPerExecutorMB) ~
    ("user" -> obj.user) ~
    ("command" -> obj.command.toString)
  }

  def writeExecutorRunner(obj: ExecutorRunner): JObject = {
    ("id" -> obj.execId) ~
    ("memory" -> obj.memory) ~
    ("appid" -> obj.appId) ~
    ("appdesc" -> writeApplicationDescription(obj.appDesc))
  }

  def writeDriverInfo(obj: DriverInfo): JObject = {
    ("id" -> obj.id) ~
    ("starttime" -> obj.startTime.toString) ~
    ("state" -> obj.state.toString) ~
    ("cores" -> obj.desc.cores) ~
    ("memory" -> obj.desc.mem)
  }

  def writeMasterState(obj: MasterStateResponse): JObject = {
    val aliveWorkers = obj.workers.filter(_.isAlive())
    ("url" -> obj.uri) ~
    ("workers" -> obj.workers.toList.map(writeWorkerInfo)) ~
    ("cores" -> aliveWorkers.map(_.cores).sum) ~
    ("coresused" -> aliveWorkers.map(_.coresUsed).sum) ~
    ("memory" -> aliveWorkers.map(_.memory).sum) ~
    ("memoryused" -> aliveWorkers.map(_.memoryUsed).sum) ~
    ("activeapps" -> obj.activeApps.toList.map(writeApplicationInfo)) ~
    ("completedapps" -> obj.completedApps.toList.map(writeApplicationInfo)) ~
    ("activedrivers" -> obj.activeDrivers.toList.map(writeDriverInfo)) ~
    ("status" -> obj.status.toString)
  }

  def writeWorkerState(obj: WorkerStateResponse): JObject = {
    ("id" -> obj.workerId) ~
    ("masterurl" -> obj.masterUrl) ~
    ("masterwebuiurl" -> obj.masterWebUiUrl) ~
    ("cores" -> obj.cores) ~
    ("coresused" -> obj.coresUsed) ~
    ("memory" -> obj.memory) ~
    ("memoryused" -> obj.memoryUsed) ~
    ("executors" -> obj.executors.toList.map(writeExecutorRunner)) ~
    ("finishedexecutors" -> obj.finishedExecutors.toList.map(writeExecutorRunner))
  }
} 
Example 17
Source File: AssetResponse.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.response

import org.json4s.DefaultFormats
import org.json4s.JsonAST.JObject
import stellar.sdk._
import stellar.sdk.model.{Amount, IssuedAsset12, IssuedAsset4, NonNativeAsset}

case class AssetResponse(asset: NonNativeAsset, amount: Long, numAccounts: Int, authRequired: Boolean, authRevocable: Boolean)

object AssetRespDeserializer extends ResponseParser[AssetResponse]({ o: JObject =>
  implicit val formats = DefaultFormats
  val asset = {
    val code = (o \ "asset_code").extract[String]
    val issuer = KeyPair.fromAccountId((o \ "asset_issuer").extract[String])
    (o \ "asset_type").extract[String] match {
      case "credit_alphanum4" => IssuedAsset4(code, issuer)
      case "credit_alphanum12" => IssuedAsset12(code, issuer)
      case t => throw new RuntimeException(s"Unrecognised asset type: $t")
    }
  }
  val amount = Amount.toBaseUnits((o \ "amount").extract[String].toDouble).getOrElse(
    throw new RuntimeException(s"Invalid asset amount: ${(o \ "amount").extract[Double]}"))
  val numAccounts = (o \ "num_accounts").extract[Int]
  val authRequired = (o \ "flags" \ "auth_required").extract[Boolean]
  val authRevocable = (o \ "flags" \ "auth_revocable").extract[Boolean]
  AssetResponse(asset, amount, numAccounts, authRequired, authRevocable)
}) 
Example 18
Source File: AdminSvc.scala    From squbs   with Apache License 2.0 5 votes vote down vote up
package org.squbs.admin

import java.net.URLEncoder

import akka.http.scaladsl.model.Uri.Path
import akka.http.scaladsl.model._
import org.json4s.JsonAST.{JObject, JString}
import org.json4s.jackson.JsonMethods._
import org.squbs.unicomplex.{RouteDefinition, WebContext}
import org.squbs.util.ConfigUtil._

class AdminSvc extends RouteDefinition with WebContext {

  val prefix = if (webContext == "") "/bean" else s"/$webContext/bean"

  val exclusions = context.system.settings.config.get[Seq[String]]("squbs.admin.exclusions", Seq.empty[String]).toSet
  val (exBeans, exFieldSet) = exclusions partition { !_.contains("::") }

  val exFields = exFieldSet map { fieldSpec =>
    val fields = fieldSpec split "::"
    fields(0) -> fields(1)
  } groupBy (_._1) mapValues { _.map(_._2) }


  val route =
    get {
      pathEndOrSingleSlash {
        extractUri { uri =>
          complete {
            val kv = MBeanUtil.allObjectNames collect {
              case name if !(exBeans contains name) =>
                val resource = Path(s"$prefix/${URLEncoder.encode(name.replace('=', '~'), "UTF-8")}")
                name -> JString(uri.withPath(resource).toString())
            }
            HttpResponse(entity = HttpEntity(ContentTypes.`application/json`, pretty(render(JObject(kv)))))
          }
        }
      } ~
      path("bean" / Segment) { encName =>
        complete {
          val name = encName.replace('~', '=').replace('%', '/')
          val response: HttpResponse =
            if (exBeans contains name) HttpResponse(StatusCodes.NotFound, entity = StatusCodes.NotFound.defaultMessage)
            else MBeanUtil.asJSON(name, exFields getOrElse (name, Set.empty))
              .map { json => HttpResponse(entity = json) }
              .getOrElse (HttpResponse(StatusCodes.NotFound, entity = StatusCodes.NotFound.defaultMessage))
          response
        }
      }
    }
} 
Example 19
Source File: ThrowableSerializer.scala    From vamp   with Apache License 2.0 5 votes vote down vote up
package io.vamp.common.json

import org.json4s.JsonAST.{ JObject, JString }
import org.json4s._

object ThrowableSerializer {
  def apply(message: Option[String]): SerializationFormat = new SerializationFormat {
    override def customSerializers = super.customSerializers :+ new ThrowableSerializer(message)
  }
}

class ThrowableSerializer(message: Option[String] = None) extends Serializer[Throwable] {

  override def serialize(implicit format: Formats): PartialFunction[Any, JValue] = {
    case t: Throwable ⇒ new JObject(List(JField("message", JString(message.getOrElse(t.getMessage)))))
  }

  override def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), Throwable] = SerializationFormat.unsupported
} 
Example 20
Source File: MapSerializer.scala    From vamp   with Apache License 2.0 5 votes vote down vote up
package io.vamp.common.json

import io.vamp.common.util.TextUtil
import org.json4s.JsonAST.JObject
import org.json4s._

object MapSerializer extends SerializationFormat {
  override def customSerializers: List[Serializer[_]] = super.customSerializers :+ new MapSerializer()
}

class MapSerializer extends Serializer[Map[_, _]] {
  override def serialize(implicit format: Formats): PartialFunction[Any, JValue] = {
    case map: Map[_, _] ⇒
      new JObject(map.map {
        case (name, value) ⇒
          val newName = if (name.toString.contains("-") || name.toString.contains("_") || name.toString.contains("/")) name.toString else TextUtil.toSnakeCase(name.toString, dash = false)
          JField(newName, Extraction.decompose(value))
      }.toList)
  }

  def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), Map[_, _]] = SerializationFormat.unsupported
} 
Example 21
Source File: ManifestUploader.scala    From teamcity-s3-plugin   with Apache License 2.0 5 votes vote down vote up
package com.gu.teamcity

import java.io.ByteArrayInputStream
import java.util.Date

import jetbrains.buildServer.messages.{BuildMessage1, DefaultMessagesInfo, Status}
import jetbrains.buildServer.serverSide.{BuildServerAdapter, SRunningBuild}
import org.joda.time.{DateTime, DateTimeZone}
import org.json4s.JsonAST.JObject
import org.json4s.JsonDSL._
import org.json4s.native.JsonMethods._

import scala.util.{Failure, Success}

class ManifestUploader(config: S3ConfigManager, s3: S3) extends BuildServerAdapter {

  override def beforeBuildFinish(runningBuild: SRunningBuild) {
    import scala.collection.convert.wrapAsScala._

    if (!runningBuild.isHasInternalArtifactsOnly) {
      val properties = Seq(
        "projectName" -> S3Plugin.cleanFullName(runningBuild),
        "buildNumber" -> runningBuild.getBuildNumber,
        "startTime" -> new DateTime(runningBuild.getStartDate).withZone(DateTimeZone.UTC).toString //Joda default is ISO8601
      ) ++ runningBuild.getRevisions.flatMap(revision => Seq(
        "revision" -> revision.getRevision,
        "vcsURL" -> revision.getRoot.getProperties.get("url")
      )) ++ Option(runningBuild.getBranch).map(b =>
        "branch" -> b.getDisplayName
      ).orElse(runningBuild.getVcsRootEntries.headOption.map(r =>
        "branch" -> r.getProperties.get("branch")
      ))

      val propertiesJSON = pretty(render(properties.foldLeft(JObject())(_ ~ _)))
      val jsBytes = propertiesJSON.getBytes("UTF-8")

      config.buildManifestBucket.map { bucket =>
        s3.upload(bucket, runningBuild, "build.json", new ByteArrayInputStream(jsBytes), jsBytes.length) match {			
          case Failure(e) => runningBuild.getBuildLog().message(s"Error uploading manifest: ${e.getMessage}",
              Status.ERROR,new Date,DefaultMessagesInfo.MSG_BUILD_FAILURE,DefaultMessagesInfo.SOURCE_ID,null)
          case Success(_) => runningBuild.getBuildLog().message("Manifest S3 upload complete",
              Status.NORMAL,new Date,DefaultMessagesInfo.MSG_TEXT,DefaultMessagesInfo.SOURCE_ID,null) 
        }
      }
    }
  }

  private def normalMessage(text: String) =
    new BuildMessage1(DefaultMessagesInfo.SOURCE_ID, DefaultMessagesInfo.MSG_TEXT, Status.NORMAL, new Date, text)
} 
Example 22
Source File: JObjectParam.scala    From sona   with Apache License 2.0 5 votes vote down vote up
package com.tencent.angel.sona.ml.param
import com.tencent.angel.sona.ml.util.Identifiable
import org.json4s.DefaultFormats
import org.json4s.JsonAST.JObject
import org.json4s.jackson.JsonMethods.{compact, parse, render}

class JObjectParam(parent: String, name: String, doc: String, isValid: JObject => Boolean)
  extends Param[JObject](parent, name, doc, isValid) {
  def this(parent: String, name: String, doc: String) =
    this(parent, name, doc, (value: JObject) => value != null)

  def this(parent: Identifiable, name: String, doc: String, isValid: JObject => Boolean) =
    this(parent.uid, name, doc, isValid)

  def this(parent: Identifiable, name: String, doc: String) = this(parent.uid, name, doc)

  override def w(value: JObject): ParamPair[JObject] = super.w(value)

  override def jsonEncode(value: JObject): String = {
    compact(render(value))
  }

  override def jsonDecode(json: String): JObject = {
    implicit val formats: DefaultFormats = DefaultFormats
    parse(json).asInstanceOf[JObject]
  }
} 
Example 23
Source File: JsonActor.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.transform

import akka.actor.Props
import io.coral.actors.{SimpleEmitTrigger, CoralActor}
import io.coral.lib.JsonTemplate
import org.json4s.JsonAST.{JObject, JValue}
import org.json4s._

object JsonActor {
	implicit val formats = org.json4s.DefaultFormats

	def getParams(json: JValue) = {
		for {
			template <- (json \ "params" \ "template").extractOpt[JObject]
			if (JsonTemplate.validate(template))
		} yield {
			template
		}
	}

	def apply(json: JValue): Option[Props] = {
		getParams(json).map(_ => Props(classOf[JsonActor], json))
	}

}

class JsonActor(json: JObject)
	extends CoralActor(json)
	with SimpleEmitTrigger {

	val template = JsonTemplate(JsonActor.getParams(json).get)

	override def simpleEmitTrigger(json: JObject): Option[JValue] = {
		Some(template.interpret(json))
	}

} 
Example 24
Source File: LogActor.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.connector

import java.io.FileWriter
import akka.actor.{ActorLogging, Props}
import io.coral.actors.CoralActor
import org.json4s.JsonAST.JObject
import org.json4s._
import org.json4s.jackson.JsonMethods._
import scala.concurrent.Future


object LogActor {
	implicit val formats = org.json4s.DefaultFormats

	def getParams(json: JValue) = {
		val file = (json \ "params" \ "file").extractOpt[String]
		val append = (json \ "params" \ "append").extractOpt[Boolean]
		Some((file, append getOrElse false))
	}

	def apply(json: JValue): Option[Props] = {
		getParams(json).map(_ => Props(classOf[LogActor], json))
	}
}

class LogActor(json: JObject) extends CoralActor(json) with ActorLogging {
	val (file, append) = LogActor.getParams(json).get
	var fileWriter: Option[FileWriter] = None

	override def preStart() = {
		if (file.isDefined) {
			fileWriter = Some(new FileWriter(file.get, append))
		}
	}

	override def postStop() = {
		fileWriter match {
			case None =>
			case Some(f) => f.close()

		}
	}

	override def trigger = {
		json => Future {
			fileWriter match {
				case None =>
					log.info(compact(json))
				case Some(f) =>
					f.write(compact(json) + "\n")
					f.flush()
			}

			Some(JNothing)
		}
	}
} 
Example 25
Source File: KafkaProducerActor.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.connector

import java.util.Properties

import akka.actor.{Props, ActorLogging}
import io.coral.actors.{NoEmitTrigger, CoralActor}
import io.coral.lib.KafkaJsonProducer.KafkaEncoder
import io.coral.lib.{KafkaJsonProducer, ConfigurationBuilder}
import org.json4s.JsonAST.{JObject, JValue}
import kafka.serializer.Encoder

object KafkaProducerActor {
	implicit val formats = org.json4s.DefaultFormats
	val builder = new ConfigurationBuilder("kafka.producer")

	def getParams(json: JValue) = {
		for {
			kafka <- (json \ "params" \ "kafka").extractOpt[JObject]
			topic <- (json \ "params" \ "topic").extractOpt[String]
		} yield {
			val properties = producerProperties(kafka)
			(properties, topic)
		}
	}

	private def producerProperties(json: JObject): Properties = {
		val properties = builder.properties
		json.values.foreach { case (k: String, v: String) => properties.setProperty(k, v) }
		properties
	}

	def apply(json: JValue): Option[Props] = {
		getParams(json).map(_ => Props(classOf[KafkaProducerActor[KafkaEncoder]], json, KafkaJsonProducer()))
	}

	def apply[T <: KafkaEncoder](json: JValue, encoder: Class[T]): Option[Props] = {
		getParams(json).map(_ => Props(classOf[KafkaProducerActor[T]], json, KafkaJsonProducer(encoder)))
	}
}

class KafkaProducerActor[T <: Encoder[JValue]](json: JObject, connection: KafkaJsonProducer[T])
	extends CoralActor(json)
	with NoEmitTrigger
	with ActorLogging {
	val (properties, topic) = KafkaProducerActor.getParams(json).get
	lazy val kafkaSender = connection.createSender(topic, properties)

	override def noEmitTrigger(json: JObject) = {
		val key = (json \ "key").extractOpt[String]
		val message = (json \ "message").extract[JObject]
		send(key, message)
	}

	private def send(key: Option[String], message: JObject) = {
		try {
			kafkaSender.send(key, message)
		} catch {
			case e: Exception => log.error(e, "failed to send message to Kafka")
		}
	}
} 
Example 26
Source File: KafkaConsumerActor.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.connector

import java.util.Properties

import akka.actor.Props
import io.coral.actors.CoralActor
import io.coral.actors.connector.KafkaConsumerActor.{StopReadingMessageQueue, ReadMessageQueue}
import io.coral.lib.{ConfigurationBuilder, KafkaJsonConsumer}
import kafka.serializer.Decoder
import kafka.tools.MessageFormatter
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.json4s.JsonAST.{JNothing, JObject, JValue}

object KafkaConsumerActor {
	case class ReadMessageQueue()
	case class StopReadingMessageQueue()

	implicit val formats = org.json4s.DefaultFormats
	val builder = new ConfigurationBuilder("kafka.consumer")

	def getParams(json: JValue) = {
		for {
			kafka <- (json \ "params" \ "kafka").extractOpt[JObject]
			topic <- (json \ "params" \ "topic").extractOpt[String]
		} yield {
			val properties = consumerProperties(kafka)
			(properties, topic)
		}
	}

	def consumerProperties(json: JObject): Properties = {
		val properties = builder.properties

		json.values.foreach {
			case (k: String, v: String) =>
				properties.setProperty(k, v)
		}

		properties
	}

	def apply(json: JValue): Option[Props] = {
		getParams(json).map(_ => Props(classOf[KafkaConsumerActor], json, KafkaJsonConsumer()))
	}

	def apply(json: JValue, decoder: Decoder[JValue]): Option[Props] = {
		getParams(json).map(_ => Props(classOf[KafkaConsumerActor], json, KafkaJsonConsumer(decoder)))
	}
}

class KafkaConsumerActor(json: JObject, connection: KafkaJsonConsumer) extends CoralActor(json) {
	val (properties, topic) = KafkaConsumerActor.getParams(json).get
	lazy val stream = connection.stream(topic, properties)
	var shouldStop = false

	override def preStart(): Unit = {
		super.preStart()
	}

	override def receiveExtra: Receive = {
		case ReadMessageQueue() if stream.hasNextInTime =>
			val message: JValue = stream.next
			stream.commitOffsets

			if (message != JNothing) {
				emit(message)
			}

			if (!shouldStop) {
				self ! ReadMessageQueue()
			}
		case ReadMessageQueue() =>
			self ! ReadMessageQueue()
		case StopReadingMessageQueue() =>
			shouldStop = true
	}

	
} 
Example 27
Source File: Trigger.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors

import org.json4s._

import scala.concurrent.Future
import org.json4s.JsonAST.{JObject, JValue}

trait Trigger {
	type TriggerType = JObject => Future[Option[JValue]]

	def trigger: TriggerType
}

trait NoEmitTrigger extends Trigger {
	override def trigger: TriggerType =
		json => {
			noEmitTrigger(json)
			Future.successful(Some(JNothing))
		}

	def noEmitTrigger(json: JObject): Unit
}

trait NoTrigger extends Trigger {
	override def trigger: TriggerType = json => Future.successful(Some(JNothing))
}

trait SimpleEmitTrigger extends Trigger {
	override def trigger: TriggerType = {
		json =>
			Future.successful(simpleEmitTrigger(json))
	}

	def simpleEmitTrigger(json: JObject): Option[JValue]
} 
Example 28
Source File: KafkaJsonProducer.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.lib

import java.util.Properties

import io.coral.lib.KafkaJsonProducer.KafkaEncoder
import kafka.producer.{KeyedMessage, ProducerConfig, Producer}
import kafka.serializer.Encoder
import kafka.utils.VerifiableProperties
import org.json4s.JsonAST.{JObject, JValue}
import org.json4s.jackson.JsonMethods._

object KafkaJsonProducer {
	type KafkaEncoder = Encoder[JValue]
	def apply() = new KafkaJsonProducer(classOf[JsonEncoder])
	def apply[T <: KafkaEncoder](encoder: Class[T]) = new KafkaJsonProducer(encoder)
}

class KafkaJsonProducer[T <: KafkaEncoder](encoderClass: Class[T]) {
	def createSender(topic: String, properties: Properties): KafkaSender = {
		val props = properties.clone.asInstanceOf[Properties]
		props.put("serializer.class", encoderClass.getName)
		val producer = createProducer(props)
		new KafkaSender(topic, producer)
	}

	def createProducer(props: Properties): Producer[String, JValue] = {
		new Producer[String, JValue](new ProducerConfig(props))
	}
}

class KafkaSender(topic: String, producer: Producer[String, JValue]) {
	def send(key: Option[String], message: JObject) = {
		val keyedMessage: KeyedMessage[String, JValue] = key match {
			case Some(key) => new KeyedMessage(topic, key, message)
			case None => new KeyedMessage(topic, message)
		}

		producer.send(keyedMessage)
	}
}

class JsonEncoder(verifiableProperties: VerifiableProperties) extends KafkaEncoder {
	override def toBytes(value: JValue): Array[Byte] = {
		compact(value).getBytes("UTF-8")
	}
} 
Example 29
Source File: KafkaJsonProducerSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.lib

import java.util.Properties

import io.coral.lib.KafkaJsonProducer.KafkaEncoder
import kafka.utils.VerifiableProperties
import org.json4s.JsonAST.{JObject, JValue}
import org.scalatest.{Matchers, WordSpec}
import org.json4s.jackson.JsonMethods._
import kafka.producer.{ProducerConfig, KeyedMessage, Producer}
import org.mockito.{Mockito, ArgumentCaptor}
import org.mockito.Mockito._
import scala.collection.mutable

class KafkaJsonProducerSpec extends WordSpec with Matchers {
	"A KafkaJsonProducer" should {
		"create a KafkaJsonProducer with the JsonEncoder" in {
			val producer = KafkaJsonProducer()
			assert(producer.getClass == classOf[KafkaJsonProducer[JsonEncoder]])
		}

		"create a KafkaJsonProducer with the specified Encoder" in {
			val producer = KafkaJsonProducer(classOf[MyEncoder])
			assert(producer.getClass == classOf[KafkaJsonProducer[MyEncoder]])
		}

		"create a sender" in {
			val producer = new MyKafkaJsonProducer
			producer.createSender("topic", new Properties)
			val serializer = producer.receivedProperties.get("serializer.class")
			assert(serializer == classOf[MyEncoder].getName)
		}
	}

	"A KafkaSender" should {
		"send the JSON provided without a key to Kafka" in {
			val messageJson = """{"key1": "value1", "key2": "value2"}"""

			val keyedMessage = sendMessage(None, messageJson)

			assert(keyedMessage.topic == "test")
			assert(keyedMessage.hasKey == false)
			assert(keyedMessage.message == parse(messageJson))
		}

		"send the JSON provided with a key to Kafka" in {
			val messageJson = """{"key3": "value3", "key4": "value4"}"""

			val keyedMessage = sendMessage(Some("key"), messageJson)

			assert(keyedMessage.key == "key")
			assert(keyedMessage.topic == "test")
			assert(keyedMessage.message == parse(messageJson))
		}
	}

	"A JsonEncoder" should {
		"encode the provided json" in {
			val json = """{"key1": "value1"}"""
			val encoder = new JsonEncoder(new VerifiableProperties)
			val result = encoder.toBytes(parse(json))
			assert(parse(new String(result, "UTF-8")) == parse(json))
		}
	}

	private def sendMessage(key: Option[String], messageJson: String): KeyedMessage[String, JValue] = {
		val producer = Mockito.mock(classOf[Producer[String, JValue]])
		val sender = new KafkaSender("test", producer)
		sender.send(key, parse(messageJson).asInstanceOf[JObject])

		val argumentCaptor = ArgumentCaptor.forClass(classOf[KeyedMessage[String, JValue]])
		verify(producer).send(argumentCaptor.capture())

		val keyedMessages = argumentCaptor.getAllValues
		assert(keyedMessages.size == 1)

		// The following construction is necessary because capturing of parameters
		// with Mockito, Scala type interference, and multiple arguments
		// don't work together without explicit casts.
		keyedMessages.get(0).asInstanceOf[mutable.WrappedArray.ofRef[KeyedMessage[String, JValue]]](0)
	}
}

class MyEncoder(verifiableProperties: VerifiableProperties) extends KafkaEncoder {
	override def toBytes(value: JValue): Array[Byte] = {
		Array()
	}
}

class MyKafkaJsonProducer extends KafkaJsonProducer(classOf[MyEncoder]) {
	var receivedProperties: Properties = _

	override def createProducer(props: Properties): Producer[String, JValue] = {
		receivedProperties = props
		Mockito.mock(classOf[Producer[String, JValue]])
	}
} 
Example 30
Source File: Library.scala    From scala-clippy   with Apache License 2.0 5 votes vote down vote up
package com.softwaremill.clippy

import org.json4s.JsonAST.{JField, JObject, JString, JValue}

case class Library(groupId: String, artifactId: String, version: String) {
  def toJson: JValue = JObject(
    "groupId"    -> JString(groupId),
    "artifactId" -> JString(artifactId),
    "version"    -> JString(version)
  )

  override def toString = s"$groupId:$artifactId:$version"
}

object Library {
  def fromJson(jvalue: JValue): Option[Library] =
    (for {
      JObject(fields)                           <- jvalue
      JField("groupId", JString(groupId))       <- fields
      JField("artifactId", JString(artifactId)) <- fields
      JField("version", JString(version))       <- fields
    } yield Library(groupId, artifactId, version)).headOption
} 
Example 31
Source File: AnyFormat.scala    From scalapb-json4s   with Apache License 2.0 5 votes vote down vote up
package scalapb.json4s

import com.google.protobuf.any.{Any => PBAny}
import org.json4s.JsonAST.{JNothing, JObject, JString, JValue}

import scala.language.existentials

object AnyFormat {
  val anyWriter: (Printer, PBAny) => JValue = {
    case (printer, any) =>
      // Find the companion so it can be used to JSON-serialize the message. Perhaps this can be circumvented by
      // including the original GeneratedMessage with the Any (at least in memory).
      val cmp = printer.typeRegistry
        .findType(any.typeUrl)
        .getOrElse(
          throw new IllegalStateException(
            s"Unknown type ${any.typeUrl} in Any.  Add a TypeRegistry that supports this type to the Printer."
          )
        )

      // Unpack the message...
      val message = any.unpack(cmp)

      // ... and add the @type marker to the resulting JSON
      printer.toJson(message) match {
        case JObject(fields) =>
          JObject(("@type" -> JString(any.typeUrl)) +: fields)
        case value =>
          // Safety net, this shouldn't happen
          throw new IllegalStateException(
            s"Message of type ${any.typeUrl} emitted non-object JSON: $value"
          )
      }
  }

  val anyParser: (Parser, JValue) => PBAny = {
    case (parser, obj @ JObject(fields)) =>
      obj \ "@type" match {
        case JString(typeUrl) =>
          val cmp = parser.typeRegistry
            .findType(typeUrl)
            .getOrElse(
              throw new JsonFormatException(
                s"Unknown type ${typeUrl} in Any.  Add a TypeRegistry that supports this type to the Parser."
              )
            )
          val message = parser.fromJson(obj, true)(cmp)
          PBAny(typeUrl = typeUrl, value = message.toByteString)

        case JNothing =>
          throw new JsonFormatException(s"Missing type url when parsing $obj")

        case unknown =>
          throw new JsonFormatException(
            s"Expected string @type field, got $unknown"
          )
      }

    case (_, unknown) =>
      throw new JsonFormatException(s"Expected an object, got $unknown")
  }
} 
Example 32
Source File: L6-12StaticPool.scala    From prosparkstreaming   with Apache License 2.0 5 votes vote down vote up
package org.apress.prospark

import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.streaming.Seconds
import org.apache.spark.streaming.StreamingContext
import org.eclipse.paho.client.mqttv3.MqttClient
import org.eclipse.paho.client.mqttv3.MqttMessage
import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence
import org.json4s.DefaultFormats
import org.json4s.JField
import org.json4s.JsonAST.JObject
import org.json4s.jvalue2extractable
import org.json4s.jvalue2monadic
import org.json4s.native.JsonMethods.parse
import org.json4s.string2JsonInput

object MqttSinkAppF {

  def main(args: Array[String]) {
    if (args.length != 3) {
      System.err.println(
        "Usage: MqttSinkApp <appname> <outputBrokerUrl> <topic>")
      System.exit(1)
    }

    val Seq(appName, outputBrokerUrl, topic) = args.toSeq

    val conf = new SparkConf()
      .setAppName(appName)
      .setJars(SparkContext.jarOfClass(this.getClass).toSeq)

    val batchInterval = 10

    val ssc = new StreamingContext(conf, Seconds(batchInterval))

    val mqttSink = ssc.sparkContext.broadcast(MqttSinkLazy(outputBrokerUrl))

    HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env",
      interval = batchInterval)
      .flatMap(rec => {
        val query = parse(rec) \ "query"
        ((query \ "results" \ "quote").children).map(rec => JObject(JField("Timestamp", query \ "created")).merge(rec))
      })
      .map(rec => {
        implicit val formats = DefaultFormats
        rec.children.map(f => f.extract[String]) mkString ","
      })
      .foreachRDD { rdd =>
        rdd.foreachPartition { par =>
          par.foreach(message => mqttSink.value.client.publish(topic, new MqttMessage(message.getBytes(StandardCharsets.UTF_8))))
        }
      }

    ssc.start()
    ssc.awaitTermination()
  }

}

class MqttSinkLazy(brokerUrl: String) extends Serializable {
  lazy val client = {
    val client = new MqttClient(brokerUrl, MqttClient.generateClientId(), new MemoryPersistence())
    client.connect()
    sys.addShutdownHook {
      client.disconnect()
      client.close()
    }
    client
  }
}

object MqttSinkLazy {
  val brokerUrl = "tcp://localhost:1883"
  val client = new MqttSinkLazy(brokerUrl)

  def apply(brokerUrl: String): MqttSinkLazy = {
    client
  }
} 
Example 33
Source File: EncryptedKeyJsonCodec.scala    From mantis   with Apache License 2.0 5 votes vote down vote up
package io.iohk.ethereum.keystore

import java.util.UUID

import akka.util.ByteString
import io.iohk.ethereum.domain.Address
import io.iohk.ethereum.keystore.EncryptedKey._
import org.json4s.JsonAST.{JObject, JString, JValue}
import org.json4s.JsonDSL._
import org.json4s.native.JsonMethods._
import org.json4s.{CustomSerializer, DefaultFormats, Extraction, JField}
import org.spongycastle.util.encoders.Hex

import scala.util.Try

object EncryptedKeyJsonCodec {

  private val byteStringSerializer = new CustomSerializer[ByteString](_ => (
    { case JString(s) => ByteString(Hex.decode(s)) },
    { case bs: ByteString => JString(Hex.toHexString(bs.toArray)) }
  ))

  private implicit val formats = DefaultFormats + byteStringSerializer

  private def asHex(bs: ByteString): String =
    Hex.toHexString(bs.toArray)

  def toJson(encKey: EncryptedKey): String = {
    import encKey._
    import cryptoSpec._

    val json =
      ("id" -> id.toString) ~
      ("address" -> asHex(address.bytes)) ~
      ("version" -> version) ~
      ("crypto" -> (
        ("cipher" -> cipher) ~
        ("ciphertext" -> asHex(ciphertext)) ~
        ("cipherparams" -> ("iv" -> asHex(iv))) ~
        encodeKdf(kdfParams) ~
        ("mac" -> asHex(mac))
      ))

    pretty(render(json))
  }

  def fromJson(jsonStr: String): Either[String, EncryptedKey] = Try {
    val json = parse(jsonStr).transformField { case JField(k, v) => JField(k.toLowerCase, v) }

    val uuid = UUID.fromString((json \ "id").extract[String])
    val address = Address((json \ "address").extract[String])
    val version = (json \ "version").extract[Int]

    val crypto = json \ "crypto"
    val cipher = (crypto \ "cipher").extract[String]
    val ciphertext = (crypto \ "ciphertext").extract[ByteString]
    val iv = (crypto \ "cipherparams" \ "iv").extract[ByteString]
    val mac = (crypto \ "mac").extract[ByteString]

    val kdfParams = extractKdf(crypto)
    val cryptoSpec = CryptoSpec(cipher, ciphertext, iv, kdfParams, mac)
    EncryptedKey(uuid, address, cryptoSpec, version)

  }.fold(ex => Left(ex.toString), encKey => Right(encKey))

  private def encodeKdf(kdfParams: KdfParams): JObject =
    kdfParams match {
      case ScryptParams(salt, n, r, p, dklen) =>
        ("kdf" -> Scrypt) ~
        ("kdfparams" -> Extraction.decompose(kdfParams))

      case Pbkdf2Params(salt, prf, c, dklen) =>
        ("kdf" -> Pbkdf2) ~
        ("kdfparams" -> Extraction.decompose(kdfParams))
    }

  private def extractKdf(crypto: JValue): KdfParams = {
    val kdf = (crypto \ "kdf").extract[String]
    kdf.toLowerCase match {
      case Scrypt =>
        (crypto \ "kdfparams").extract[ScryptParams]

      case Pbkdf2 =>
        (crypto \ "kdfparams").extract[Pbkdf2Params]
    }
  }

} 
Example 34
Source File: ResultResource.scala    From Linkis   with Apache License 2.0 5 votes vote down vote up
package com.webank.wedatasphere.linkis.resourcemanager

import org.json4s.JsonAST.JObject
import org.json4s.JsonDSL._
import org.json4s.{CustomSerializer, Extraction}


trait ResultResource

case class NotEnoughResource(reason: String = null) extends ResultResource

case class AvailableResource(ticketId: String) extends ResultResource

case class UserResultResource(ticketId: String, user: String) extends ResultResource

object ResultResourceSerializer extends CustomSerializer[ResultResource](implicit formats => ( {
  case JObject(List(("NotEnoughResource", JObject(List(("reason", reason)))))) => NotEnoughResource(reason.extract[String])
  case JObject(List(("AvailableResource", JObject(List(("ticketId", ticketId)))))) => AvailableResource(ticketId.extract[String])
  case JObject(List(("UserResultResource", JObject(List(("ticketId", ticketId), ("user", user)))))) =>
    UserResultResource(ticketId.extract[String], user.extract[String])
}, {
  case r: NotEnoughResource => ("NotEnoughResource", ("reason", Extraction.decompose(r.reason)))
  case r: AvailableResource => ("AvailableResource", ("ticketId", Extraction.decompose(r.ticketId)))
  case r: UserResultResource => ("UserResultResource", ("ticketId", r.ticketId) ~ ("user", r.user))
})) 
Example 35
Source File: ModuleResourceInfo.scala    From Linkis   with Apache License 2.0 5 votes vote down vote up
package com.webank.wedatasphere.linkis.resourcemanager.domain

import com.webank.wedatasphere.linkis.common.ServiceInstance
import com.webank.wedatasphere.linkis.resourcemanager.{Resource, ResourceSerializer}
import org.json4s.JsonAST.JObject
import org.json4s.JsonDSL._
import org.json4s.{CustomSerializer, DefaultFormats, Extraction}


case class ModuleResourceInfo(moduleInstance: ServiceInstance, totalResource: Resource, usedResource: Resource)

object ModuleResourceInfoSerializer extends CustomSerializer[ModuleResourceInfo](implicit formats => ( {
  case JObject(List(("ModuleResourceInfo", JObject(List(("moduleInstance", moduleInstance), ("totalResource", totalResource), ("usedResource", usedResource)))))) =>
    implicit val formats = DefaultFormats + ModuleInstanceSerializer + ResourceSerializer
    ModuleResourceInfo(moduleInstance.extract[ServiceInstance], totalResource.extract[Resource], usedResource.extract[Resource])
}, {
  case m: ModuleResourceInfo =>
    implicit val formats = DefaultFormats + ModuleInstanceSerializer + ResourceSerializer
    ("ModuleResourceInfo", new JObject(List(("moduleInstance", Extraction.decompose(m.moduleInstance)), ("totalResource", Extraction.decompose(m.totalResource)), ("usedResource", Extraction.decompose(m.usedResource)))))
}))

case class ModuleResourceRecord(moduleInfo: ModuleInfo, moduleUsedResources: Resource, moduleLeftResource: Resource, moduleLockedResource: Resource, registerTime: Long = System.currentTimeMillis()) 
Example 36
Source File: ModuleInfo.scala    From Linkis   with Apache License 2.0 5 votes vote down vote up
package com.webank.wedatasphere.linkis.resourcemanager.domain

import com.webank.wedatasphere.linkis.common.ServiceInstance
import com.webank.wedatasphere.linkis.resourcemanager.ResourceRequestPolicy.ResourceRequestPolicy
import com.webank.wedatasphere.linkis.resourcemanager.{Resource, ResourceRequestPolicy, ResourceSerializer}
import org.json4s.JsonAST.JObject
import org.json4s.{CustomSerializer, DefaultFormats, Extraction}


case class ModuleInfo(moduleInstance: ServiceInstance,
                      totalResource: Resource,
                      protectedResource: Resource, //Enter the protection mode when the resource reaches(当资源达到多少时,进入保护模式)
                      resourceRequestPolicy: ResourceRequestPolicy
                     )

object ModuleInfoSerializer extends CustomSerializer[ModuleInfo](implicit formats => ( {
  case JObject(List(("moduleInstance", moduleInstance), ("totalResource", totalResource), ("protectedResource", protectedResource), ("resourceRequestPolicy", resourceRequestPolicy))) =>
    implicit val formats = DefaultFormats + ResourceSerializer + ModuleInstanceSerializer
    new ModuleInfo(moduleInstance.extract[ServiceInstance], totalResource.extract[Resource], protectedResource.extract[Resource], ResourceRequestPolicy.withName(resourceRequestPolicy.extract[String]))
}, {
  case i: ModuleInfo =>
    implicit val formats = DefaultFormats + ResourceSerializer + ModuleInstanceSerializer
    val policy = Extraction.decompose(i.resourceRequestPolicy.toString)
    new JObject(List(("moduleInstance", Extraction.decompose(i.moduleInstance)), ("totalResource", Extraction.decompose(i.totalResource)), ("protectedResource", Extraction.decompose(i.protectedResource)), ("resourceRequestPolicy", policy)))
})
) 
Example 37
Source File: UserResourceInfo.scala    From Linkis   with Apache License 2.0 5 votes vote down vote up
package com.webank.wedatasphere.linkis.resourcemanager.domain

import com.webank.wedatasphere.linkis.common.ServiceInstance
import com.webank.wedatasphere.linkis.resourcemanager.{Resource, ResourceSerializer}
import org.json4s.JsonAST.JObject
import org.json4s.JsonDSL._
import org.json4s.{CustomSerializer, DefaultFormats, Extraction}



trait UserResourceInfo

case class UserPreUsedResource(ticketId: String, moduleInstance: ServiceInstance, resource: Resource) extends UserResourceInfo

case class UserUsedResource(ticketId: String, moduleInstance: ServiceInstance, resource: Resource, engineInstance: ServiceInstance = null) extends UserResourceInfo

case class UserReleasedResource(ticketId: String, moduleInstance: ServiceInstance) extends UserResourceInfo


object UserResourceInfoSerializer extends CustomSerializer[UserResourceInfo](implicit formats => ( {
  case JObject(List(("UserPreUsedResource", JObject(List(("ticketId", ticketId), ("moduleInstance", moduleInstance), ("resource", resource)))))) =>
    implicit val formats = DefaultFormats + ModuleInstanceSerializer + ResourceSerializer
    new UserPreUsedResource(ticketId.extract[String], moduleInstance.extract[ServiceInstance], resource.extract[Resource])
  case JObject(List(("UserUsedResource", JObject(List(("ticketId", ticketId), ("moduleInstance", moduleInstance), ("resource", resource), ("engineInstance", engineInstance)))))) =>
    implicit val formats = DefaultFormats + ModuleInstanceSerializer + ResourceSerializer
    new UserUsedResource(ticketId.extract[String], moduleInstance.extract[ServiceInstance], resource.extract[Resource], engineInstance.extract[ServiceInstance])
  case JObject(List(("UserReleasedResource", JObject(List(("ticketId", ticketId), ("moduleInstance", moduleInstance)))))) =>
    implicit val formats = DefaultFormats + ModuleInstanceSerializer + ResourceSerializer
    new UserReleasedResource(ticketId.extract[String], moduleInstance.extract[ServiceInstance])
}, {
  case d: UserPreUsedResource =>
    implicit val formats = DefaultFormats + ModuleInstanceSerializer + ResourceSerializer
    ("UserPreUsedResource", new JObject(List(("ticketId", Extraction.decompose(d.ticketId)), ("moduleInstance", Extraction.decompose(d.moduleInstance)), ("resource", Extraction.decompose(d.resource)))))
  case d: UserUsedResource =>
    implicit val formats = DefaultFormats + ModuleInstanceSerializer + ResourceSerializer
    ("UserUsedResource", new JObject(List(("ticketId", Extraction.decompose(d.ticketId)), ("moduleInstance", Extraction.decompose(d.moduleInstance)), ("resource", Extraction.decompose(d.resource)), ("engineInstance", Extraction.decompose(d.engineInstance)))))
  case d: UserReleasedResource =>
    implicit val formats = DefaultFormats + ModuleInstanceSerializer + ResourceSerializer
    ("UserReleasedResource", new JObject(List(("ticketId", Extraction.decompose(d.ticketId)), ("moduleInstance", Extraction.decompose(d.moduleInstance)))))
})
) 
Example 38
Source File: RPCFormatsTest.scala    From Linkis   with Apache License 2.0 5 votes vote down vote up
package com.webank.wedatasphere.linkis.rpc

import java.lang.reflect.ParameterizedType
import java.util

import com.webank.wedatasphere.linkis.rpc.transform.{JavaCollectionSerializer, JavaMapSerializer}
import org.apache.commons.lang.ClassUtils
import org.json4s.JsonAST.JObject
import org.json4s.JsonDSL._
import org.json4s.jackson.Serialization
import org.json4s.reflect.ManifestFactory
import org.json4s.{CustomSerializer, DefaultFormats, Extraction}

object RPCFormatsTest {

  trait ResultResource
  class AvailableResource(val ticketId: String) extends ResultResource

  object ResultResourceSerializer extends CustomSerializer[ResultResource](implicit formats => ({
    case JObject(List(("AvailableResource", JObject(List(("ticketId", ticketId)))))) => new AvailableResource(ticketId.extract[String])
  },{
    case r: AvailableResource => ("AvailableResource", ("ticketId", Extraction.decompose(r.ticketId)))
  }))

  def testRPC1(args: Array[String]): Unit = {
    implicit val formats = DefaultFormats + ResultResourceSerializer
    val serializerClasses = formats.customSerializers.map(_.getClass.getGenericSuperclass match {
      case p: ParameterizedType =>
        val params = p.getActualTypeArguments
        if(params == null || params.isEmpty) null
        else params(0).asInstanceOf[Class[_]]
    }).filter(_ != null)
    val a = new AvailableResource("aaa")
    val str = Serialization.write(a)
    println(str)
    val clazz = classOf[AvailableResource]
    println(serializerClasses)
    val realClass1 = serializerClasses.find(ClassUtils.isAssignable(clazz, _))
    println(realClass1)
    val realClass = realClass1.getOrElse(clazz)
    val obj = Serialization.read(str)(formats, ManifestFactory.manifestOf(realClass))
    println(obj)
    println(classOf[Array[_]].getClass.getName)
  }

  case class TestCollection1(a: String, list: java.util.List[String])
  case class TestCollection2(a: String, list: java.util.Map[String, Integer])

  def testRPC2(args: Array[String]): Unit = {
    implicit val formats = DefaultFormats + JavaCollectionSerializer + JavaMapSerializer
    //    val a = TestCollection1("1", new util.ArrayList[String]())
    val a = TestCollection2("1", new util.HashMap[String, Integer]())
    //    a.list.add("1111")
    a.list.put("1111", 2)
    val str = Serialization.write(a)
    println(str)
    val realClass = classOf[TestCollection2]
    val obj = Serialization.read(str)(formats, ManifestFactory.manifestOf(realClass))
    println(obj)
  }

  def main(args: Array[String]): Unit = {
    testRPC2(args)
  }
} 
Example 39
Source File: CustomSerializerWithTypeHints.scala    From reliable-http-client   with Apache License 2.0 5 votes vote down vote up
package rhttpc.transport.json4s

import org.json4s.JsonAST.{JObject, JString}
import org.json4s._

import scala.reflect.ClassTag

class CustomSerializerWithTypeHints[T: Manifest, JV <: JValue: ClassTag](
  ser: Formats => (PartialFunction[JV, T], PartialFunction[T, JV]))
  extends CustomSubTypesSerializer[T, JObject](implicit formats => {
  val (deserialize, serialize) = ser(formats)
  (
    {
      case JObject(_ :: ("value", jValue: JV) :: Nil) if deserialize.isDefinedAt(jValue) =>
        deserialize(jValue)
    },
    {
      case obj: T if serialize.isDefinedAt(obj) =>
        JObject(
          formats.typeHintFieldName -> JString(obj.getClass.getName),
          "value" -> serialize(obj)
        )
    }
  )
}) 
Example 40
Source File: DescTableReponse.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources.druid

import org.json4s.JsonAST.{JArray, JObject, JValue}
import org.json4s.JsonDSL._
import scala.collection.{immutable, mutable}

case class DescTableRequest(dataSource: String) {

  def toJson: JValue = {
    JObject("queryType" -> "segmentMetadata", "dataSource" -> dataSource)
  }
}

case class DescTableResponse(data: Seq[(String, Any)])

object DescTableResponse {
  def parse(js: JValue): DescTableResponse = {
    var arr = new mutable.HashMap[String, Any]
    js match {
      case JArray(results) =>
        val columns = (results.last \ "columns").asInstanceOf[JObject].values
        columns.foreach { col =>
          arr += (col._1 -> col._2
            .asInstanceOf[immutable.HashMap[String, String]]
            .get("type")
            .get)
        }
        DescTableResponse(arr.toSeq.sortBy(_._1))
      case err @ _ =>
        throw new IllegalArgumentException("Invalid time series response: " + err)
    }
  }
} 
Example 41
Source File: PostAggregation.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources.druid

import org.json4s.JsonAST.{JObject, JValue}
import org.json4s.JsonDSL._

trait PostAggregationFieldSpec extends Expression {
  private def arith(rhs: PostAggregationFieldSpec, fn: String): PostAggregation =
    ArithmeticPostAggregation("n/a", fn, Seq(this, rhs))

  def *(rhs: PostAggregationFieldSpec): PostAggregation = arith(rhs, "*")

  def /(rhs: PostAggregationFieldSpec): PostAggregation = arith(rhs, "/")

  def +(rhs: PostAggregationFieldSpec): PostAggregation = arith(rhs, "+")

  def -(rhs: PostAggregationFieldSpec): PostAggregation = arith(rhs, "-")
}

trait PostAggregation extends PostAggregationFieldSpec {
  def as(outputName: String): PostAggregation
}

object PostAggregation {
  def constant(value: Double): ConstantPostAggregation =
    ConstantPostAggregation("constant", value)

  case class FieldAccess(fieldName: String) extends PostAggregationFieldSpec {
    def toJson: JValue = JObject("type" -> "fieldAccess", "fieldName" -> fieldName)
  }

}

case class ConstantPostAggregation(outputName: String, value: Double) extends PostAggregation {
  def toJson: JValue = JObject("type" -> "constant", "name" -> outputName, "value" -> value)

  def as(outputName: String): PostAggregation = copy(outputName = outputName)
}

case class ArithmeticPostAggregation(
    outputName: String,
    fn: String,
    fields: Seq[PostAggregationFieldSpec])
  extends PostAggregation {
  def toJson: JValue =
    JObject(
      "type" -> "arithmetic",
      "name" -> outputName,
      "fn" -> fn,
      "fields" -> fields.map(_.toJson))

  def as(outputName: String): ArithmeticPostAggregation = copy(outputName = outputName)
} 
Example 42
Source File: QueryFilter.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources.druid

import org.json4s.JsonAST.{JArray, JNull, JObject, JValue}
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._

sealed trait QueryFilter extends Expression {
  def and(other: QueryFilter): QueryFilter = And(Seq(this, other))

  def or(other: QueryFilter): QueryFilter = Or(Seq(this, other))
}

case class And(filters: Seq[Expression]) extends QueryFilter {

  override def and(other: QueryFilter): QueryFilter = copy(other +: filters)

  def toJson: JValue = JObject("type" -> "and", "fields" -> JArray(filters.toList.map(_.toJson)))
}

case class Or(filters: Seq[Expression]) extends QueryFilter {

  override def or(other: QueryFilter): QueryFilter = copy(other +: filters)

  def toJson: JValue = JObject("type" -> "or", "fields" -> JArray(filters.toList.map(_.toJson)))
}

case class Not(filter: Expression) extends QueryFilter {

  def toJson: JValue = JObject("type" -> "not", "field" -> filter.toJson)
}

case class IsNotNull(attributeNotNull: String) extends QueryFilter {

//  {
//    "field": {
//      "type": "selector",
//      "dimension": "added",
//      "value": ""
//    },
//    "type": "not"
//  }
  def toJson: JValue =
    JObject(
      "field" -> JObject("type" -> "selector", "dimension" -> attributeNotNull, "value" -> ""),
      "type" -> "not")
}

case class ExprQueryFilter(typeName: String, dimension: String, value: String)
  extends QueryFilter {
  def toJson: JValue = JObject("type" -> typeName, "dimension" -> dimension, "value" -> value)
}

case class SelectorQueryFilter(dimension: String, value: String) extends QueryFilter {
  def toJson: JValue = JObject("type" -> "selector", "dimension" -> dimension, "value" -> value)
}

case class RegexQueryFilter(dimension: String, pattern: String) extends QueryFilter {
  def toJson: JValue = JObject("type" -> "regex", "dimension" -> dimension, "pattern" -> pattern)
}

case class AllQueryFilter(condition: java.util.HashMap[String, Any]) extends QueryFilter {
  //  val json = JSONObject.fromObject(condition.get("filter")).toString
  //  def toJson: JValue = parse(json)
  def toJson: JValue = fromJsonNode(mapper.valueToTree(condition.get("filter")))
}

object QueryFilter {

  def custom(typeName: String, dimension: String, value: String): ExprQueryFilter =
    ExprQueryFilter(typeName, dimension, value)

  def where(dimension: String, value: String): SelectorQueryFilter =
    SelectorQueryFilter(dimension, value)

  def regex(dimension: String, pattern: String): RegexQueryFilter =
    RegexQueryFilter(dimension, pattern)

  val All = new QueryFilter {
    def toJson: JValue = JNull
  }
} 
Example 43
Source File: PartitionLevel.scala    From maha   with Apache License 2.0 5 votes vote down vote up
// Copyright 2017, Yahoo Holdings Inc.
// Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms.
package com.yahoo.maha.core

import org.json4s.JsonAST.JObject
import org.json4s.scalaz.JsonScalaz._


sealed trait PartitionLevel {
  def level: Int

  def asJSON: JObject = makeObj(List("level" -> toJSON(level)))
}

case object NoPartitionLevel extends PartitionLevel {
  val level: Int = 0
}
case object FirstPartitionLevel extends PartitionLevel {
  val level: Int = 1
}
case object SecondPartitionLevel extends PartitionLevel {
  val level: Int = 2
}
case object ThirdPartitionLevel extends PartitionLevel {
  val level: Int = 3
} 
Example 44
Source File: ColumnAnnotation.scala    From maha   with Apache License 2.0 5 votes vote down vote up
// Copyright 2017, Yahoo Holdings Inc.
// Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms.
package com.yahoo.maha.core

import org.json4s.JsonAST.{JNull, JObject, JValue}
import org.json4s.scalaz.JsonScalaz._


sealed trait ColumnAnnotation {
  def asJSON: JObject = makeObj(
    List(
      ("annotation" -> toJSON(this.getClass.getSimpleName))
    )
  )
}

sealed trait SingletonColumn

trait ClassNameHashCode {
  override final val hashCode: Int = this.getClass.toString.hashCode
  override def equals(other: Any) : Boolean =  this.hashCode == other.hashCode()
}

sealed trait ColumnAnnotationInstance extends ColumnAnnotation with ClassNameHashCode {
  def instance: ColumnAnnotation
}

case class HiveShardingExpression(expression: HiveDerivedExpression) extends ColumnAnnotationInstance with WithHiveEngine {
  def instance: ColumnAnnotation = HiveShardingExpression.instance

  val jUtils = JsonUtils

  override def asJSON(): JObject =
    makeObj(
      List(
        ("annotation" -> toJSON(this.getClass.getSimpleName))
        ,("expression" -> jUtils.asJSON(expression))
      )
    )
}
case object HiveShardingExpression {
  val instance: ColumnAnnotation = HiveShardingExpression(null)
}

case class PrestoShardingExpression(expression: PrestoDerivedExpression) extends ColumnAnnotationInstance with WithPrestoEngine {
  def instance: ColumnAnnotation = PrestoShardingExpression.instance

  val jUtils = JsonUtils

  override def asJSON(): JObject =
    makeObj(
      List(
        ("annotation" -> toJSON(this.getClass.getSimpleName))
        ,("expression" -> jUtils.asJSON(expression))
      )
    )
}
case object PrestoShardingExpression {
  val instance: ColumnAnnotation = PrestoShardingExpression(null)
}

case object PrimaryKey extends ColumnAnnotation
case object EscapingRequired extends ColumnAnnotation
case object HiveSnapshotTimestamp extends ColumnAnnotation with SingletonColumn with WithHiveEngine
case object OracleSnapshotTimestamp extends ColumnAnnotation with SingletonColumn with WithOracleEngine
case object PostgresSnapshotTimestamp extends ColumnAnnotation with SingletonColumn with WithPostgresEngine
case object IsAggregation extends ColumnAnnotation
case object CaseInsensitive extends ColumnAnnotation
case class ForeignKey(publicDimName: String) extends ColumnAnnotationInstance {
  def instance: ColumnAnnotation = ForeignKey.instance

  override def asJSON(): JObject =
    makeObj(
      List(
        ("annotation" -> toJSON(this.getClass.getSimpleName))
        ,("publicDimName" -> toJSON(publicDimName))
      )
    )
}
case object ForeignKey {
  val instance: ColumnAnnotation = ForeignKey("instance")
}
case class DayColumn(fmt: String) extends ColumnAnnotationInstance {
  def instance: ColumnAnnotation = DayColumn.instance

  override def asJSON(): JObject =
    makeObj(
      List(
        ("annotation" -> toJSON(this.getClass.getSimpleName))
        ,("fmt" -> toJSON(fmt))
      )
    )
}
case object DayColumn {
  val instance: ColumnAnnotation = DayColumn("instance")
} 
Example 45
Source File: RollupExpression.scala    From maha   with Apache License 2.0 5 votes vote down vote up
// Copyright 2017, Yahoo Holdings Inc.
// Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms.
package com.yahoo.maha.core.fact

import com.yahoo.maha.core._
import org.json4s.JsonAST.{JArray, JObject}
import org.json4s.scalaz.JsonScalaz._


case class HiveCustomRollup(expression: HiveDerivedExpression) extends CustomRollup with WithHiveEngine {
  override val hasDerivedExpression: Boolean = true
  override lazy val sourceColumns: Set[String] = expression.sourceColumns
  override lazy val sourcePrimitiveColumns: Set[String] = expression.sourcePrimitiveColumns
}
case class PrestoCustomRollup(expression: PrestoDerivedExpression) extends CustomRollup with WithPrestoEngine {
  override val hasDerivedExpression: Boolean = true
  override lazy val sourceColumns: Set[String] = expression.sourceColumns
  override lazy val sourcePrimitiveColumns: Set[String] = expression.sourcePrimitiveColumns
}
case class OracleCustomRollup(expression: OracleDerivedExpression) extends CustomRollup with WithOracleEngine {
  override val hasDerivedExpression: Boolean = true
  override lazy val sourceColumns: Set[String] = expression.sourceColumns
  override lazy val sourcePrimitiveColumns: Set[String] = expression.sourcePrimitiveColumns
}
case class PostgresCustomRollup(expression: PostgresDerivedExpression) extends CustomRollup with WithPostgresEngine {
  override val hasDerivedExpression: Boolean = true
  override lazy val sourceColumns: Set[String] = expression.sourceColumns
  override lazy val sourcePrimitiveColumns: Set[String] = expression.sourcePrimitiveColumns
}
case class DruidCustomRollup(expression: DruidDerivedExpression) extends CustomRollup with WithDruidEngine {
  override val hasDerivedExpression: Boolean = true
  override lazy val sourceColumns: Set[String] = expression.sourceColumns
  override lazy val sourcePrimitiveColumns: Set[String] = expression.sourcePrimitiveColumns
}
case class DruidFilteredRollup(filter: Filter, factCol: DruidExpression.FieldAccess,
                               delegateAggregatorRollupExpression: RollupExpression) extends CustomRollup with WithDruidEngine {
  override val hasDerivedExpression: Boolean = true
  override lazy val sourceColumns: Set[String] = Set(filter.field, factCol.name) ++ delegateAggregatorRollupExpression.sourceColumns
}
case class DruidFilteredListRollup(filter: List[Filter], factCol: DruidExpression.FieldAccess,
                               delegateAggregatorRollupExpression: RollupExpression) extends CustomRollup with WithDruidEngine {
  override val hasDerivedExpression: Boolean = true
  override lazy val sourceColumns: Set[String] = filter.map(fil => fil.field).toSet ++ delegateAggregatorRollupExpression.sourceColumns ++ Set(factCol.name)
}

case class DruidHyperUniqueRollup(column: String) extends CustomRollup with WithDruidEngine {
  override val hasDerivedExpression: Boolean = true
  override lazy val sourceColumns: Set[String] = Set(column)
}
case object DruidThetaSketchRollup extends CustomRollup with WithDruidEngine 
Example 46
Source File: L6-6PerRecord.scala    From prosparkstreaming   with Apache License 2.0 5 votes vote down vote up
package org.apress.prospark

import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.streaming.Seconds
import org.apache.spark.streaming.StreamingContext
import org.eclipse.paho.client.mqttv3.MqttClient
import org.eclipse.paho.client.mqttv3.MqttMessage
import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence
import org.json4s.DefaultFormats
import org.json4s.JField
import org.json4s.JsonAST.JObject
import org.json4s.jvalue2extractable
import org.json4s.jvalue2monadic
import org.json4s.native.JsonMethods.parse
import org.json4s.string2JsonInput

object MqttSinkAppB {

  def main(args: Array[String]) {
    if (args.length != 3) {
      System.err.println(
        "Usage: MqttSinkApp <appname> <outputBrokerUrl> <topic>")
      System.exit(1)
    }

    val Seq(appName, outputBrokerUrl, topic) = args.toSeq

    val conf = new SparkConf()
      .setAppName(appName)
      .setJars(SparkContext.jarOfClass(this.getClass).toSeq)

    val batchInterval = 10

    val ssc = new StreamingContext(conf, Seconds(batchInterval))

    HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env",
      interval = batchInterval)
      .flatMap(rec => {
        val query = parse(rec) \ "query"
        ((query \ "results" \ "quote").children).map(rec => JObject(JField("Timestamp", query \ "created")).merge(rec))
      })
      .map(rec => {
        implicit val formats = DefaultFormats
        rec.children.map(f => f.extract[String]) mkString ","
      })
      .foreachRDD { rdd =>
        rdd.foreach { rec =>
          {
            val client = new MqttClient(outputBrokerUrl, MqttClient.generateClientId(), new MemoryPersistence())
            client.connect()
            client.publish(topic, new MqttMessage(rec.getBytes(StandardCharsets.UTF_8)))
            client.disconnect()
            client.close()
          }
        }
      }

    ssc.start()
    ssc.awaitTermination()
  }

} 
Example 47
Source File: JsonProtocol.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy

import org.json4s.JsonAST.JObject
import org.json4s.JsonDSL._

import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, WorkerInfo}
import org.apache.spark.deploy.worker.ExecutorRunner

private[deploy] object JsonProtocol {
 def writeWorkerInfo(obj: WorkerInfo): JObject = {
   ("id" -> obj.id) ~
   ("host" -> obj.host) ~
   ("port" -> obj.port) ~
   ("webuiaddress" -> obj.webUiAddress) ~
   ("cores" -> obj.cores) ~
   ("coresused" -> obj.coresUsed) ~
   ("coresfree" -> obj.coresFree) ~
   ("memory" -> obj.memory) ~
   ("memoryused" -> obj.memoryUsed) ~
   ("memoryfree" -> obj.memoryFree) ~
   ("state" -> obj.state.toString) ~
   ("lastheartbeat" -> obj.lastHeartbeat)
 }

  def writeApplicationInfo(obj: ApplicationInfo): JObject = {
    ("starttime" -> obj.startTime) ~
    ("id" -> obj.id) ~
    ("name" -> obj.desc.name) ~
    ("cores" -> obj.desc.maxCores) ~
    ("user" -> obj.desc.user) ~
    ("memoryperslave" -> obj.desc.memoryPerExecutorMB) ~
    ("submitdate" -> obj.submitDate.toString) ~
    ("state" -> obj.state.toString) ~
    ("duration" -> obj.duration)
  }

  def writeApplicationDescription(obj: ApplicationDescription): JObject = {
    ("name" -> obj.name) ~
    ("cores" -> obj.maxCores) ~
    ("memoryperslave" -> obj.memoryPerExecutorMB) ~
    ("user" -> obj.user) ~
    ("command" -> obj.command.toString)
  }

  def writeExecutorRunner(obj: ExecutorRunner): JObject = {
    ("id" -> obj.execId) ~
    ("memory" -> obj.memory) ~
    ("appid" -> obj.appId) ~
    ("appdesc" -> writeApplicationDescription(obj.appDesc))
  }

  def writeDriverInfo(obj: DriverInfo): JObject = {
    ("id" -> obj.id) ~
    ("starttime" -> obj.startTime.toString) ~
    ("state" -> obj.state.toString) ~
    ("cores" -> obj.desc.cores) ~
    ("memory" -> obj.desc.mem)
  }

  def writeMasterState(obj: MasterStateResponse): JObject = {
    val aliveWorkers = obj.workers.filter(_.isAlive())
    ("url" -> obj.uri) ~
    ("workers" -> obj.workers.toList.map(writeWorkerInfo)) ~
    ("cores" -> aliveWorkers.map(_.cores).sum) ~
    ("coresused" -> aliveWorkers.map(_.coresUsed).sum) ~
    ("memory" -> aliveWorkers.map(_.memory).sum) ~
    ("memoryused" -> aliveWorkers.map(_.memoryUsed).sum) ~
    ("activeapps" -> obj.activeApps.toList.map(writeApplicationInfo)) ~
    ("completedapps" -> obj.completedApps.toList.map(writeApplicationInfo)) ~
    ("activedrivers" -> obj.activeDrivers.toList.map(writeDriverInfo)) ~
    ("status" -> obj.status.toString)
  }

  def writeWorkerState(obj: WorkerStateResponse): JObject = {
    ("id" -> obj.workerId) ~
    ("masterurl" -> obj.masterUrl) ~
    ("masterwebuiurl" -> obj.masterWebUiUrl) ~
    ("cores" -> obj.cores) ~
    ("coresused" -> obj.coresUsed) ~
    ("memory" -> obj.memory) ~
    ("memoryused" -> obj.memoryUsed) ~
    ("executors" -> obj.executors.toList.map(writeExecutorRunner)) ~
    ("finishedexecutors" -> obj.finishedExecutors.toList.map(writeExecutorRunner))
  }
} 
Example 48
Source File: L6-8Static.scala    From prosparkstreaming   with Apache License 2.0 5 votes vote down vote up
package org.apress.prospark

import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.streaming.Seconds
import org.apache.spark.streaming.StreamingContext
import org.eclipse.paho.client.mqttv3.MqttClient
import org.eclipse.paho.client.mqttv3.MqttMessage
import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence
import org.json4s.DefaultFormats
import org.json4s.JField
import org.json4s.JsonAST.JObject
import org.json4s.jvalue2extractable
import org.json4s.jvalue2monadic
import org.json4s.native.JsonMethods.parse
import org.json4s.string2JsonInput

object MqttSinkAppD {

  def main(args: Array[String]) {
    if (args.length != 3) {
      System.err.println(
        "Usage: MqttSinkApp <appname> <outputBrokerUrl> <topic>")
      System.exit(1)
    }

    val Seq(appName, outputBrokerUrl, topic) = args.toSeq

    val conf = new SparkConf()
      .setAppName(appName)
      .setJars(SparkContext.jarOfClass(this.getClass).toSeq)

    val batchInterval = 10

    val ssc = new StreamingContext(conf, Seconds(batchInterval))

    HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env",
      interval = batchInterval)
      .flatMap(rec => {
        val query = parse(rec) \ "query"
        ((query \ "results" \ "quote").children).map(rec => JObject(JField("Timestamp", query \ "created")).merge(rec))
      })
      .map(rec => {
        implicit val formats = DefaultFormats
        rec.children.map(f => f.extract[String]) mkString ","
      })
      .foreachRDD { rdd =>
        rdd.foreachPartition { par =>
          par.foreach(message => MqttSink().publish(topic, new MqttMessage(message.getBytes(StandardCharsets.UTF_8))))
        }
      }

    ssc.start()
    ssc.awaitTermination()
  }
}

object MqttSink {
  val brokerUrl = "tcp://localhost:1883"
  val client = new MqttClient(brokerUrl, MqttClient.generateClientId(), new MemoryPersistence())
  client.connect()
  sys.addShutdownHook {
    client.disconnect()
    client.close()
  }

  def apply(): MqttClient = {
    client
  }
} 
Example 49
Source File: L6-18Cassandra.scala    From prosparkstreaming   with Apache License 2.0 5 votes vote down vote up
package org.apress.prospark

import java.nio.charset.StandardCharsets
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.streaming.Seconds
import org.apache.spark.streaming.StreamingContext
import org.json4s.DefaultFormats
import org.json4s.JField
import org.json4s.JsonAST.JObject
import org.json4s.jvalue2extractable
import org.json4s.jvalue2monadic
import org.json4s.native.JsonMethods.parse
import org.json4s.string2JsonInput
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.io.Text
import java.nio.ByteBuffer
import org.apache.cassandra.hadoop.ColumnFamilyOutputFormat
import org.apache.cassandra.hadoop.ConfigHelper
import org.apache.cassandra.thrift.ColumnOrSuperColumn
import org.apache.cassandra.thrift.Column
import org.apache.cassandra.utils.ByteBufferUtil
import org.apache.cassandra.thrift.Mutation
import java.util.Arrays

object CassandraSinkApp {

  def main(args: Array[String]) {
    if (args.length != 6) {
      System.err.println(
        "Usage: CassandraSinkApp <appname> <cassandraHost> <cassandraPort> <keyspace> <columnFamilyName> <columnName>")
      System.exit(1)
    }

    val Seq(appName, cassandraHost, cassandraPort, keyspace, columnFamilyName, columnName) = args.toSeq

    val conf = new SparkConf()
      .setAppName(appName)
      .setJars(SparkContext.jarOfClass(this.getClass).toSeq)

    val batchInterval = 10
    val windowSize = 20
    val slideInterval = 10

    val ssc = new StreamingContext(conf, Seconds(batchInterval))

    HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env",
      interval = batchInterval)
      .flatMap(rec => {
        implicit val formats = DefaultFormats
        val query = parse(rec) \ "query"
        ((query \ "results" \ "quote").children)
          .map(rec => ((rec \ "symbol").extract[String], (rec \ "LastTradePriceOnly").extract[String].toFloat))
      })
      .reduceByKeyAndWindow((x: Float, y: Float) => (x + y), Seconds(windowSize), Seconds(slideInterval))
      .foreachRDD(rdd => {
        val jobConf = new Configuration()
        ConfigHelper.setOutputRpcPort(jobConf, cassandraPort)
        ConfigHelper.setOutputInitialAddress(jobConf, cassandraHost)
        ConfigHelper.setOutputColumnFamily(jobConf, keyspace, columnFamilyName)
        ConfigHelper.setOutputPartitioner(jobConf, "Murmur3Partitioner")
        rdd.map(rec => {
          val c = new Column()
          c.setName(ByteBufferUtil.bytes(columnName))
          c.setValue(ByteBufferUtil.bytes(rec._2 / (windowSize / batchInterval)))
          c.setTimestamp(System.currentTimeMillis)
          val m = new Mutation()
          m.setColumn_or_supercolumn(new ColumnOrSuperColumn())
          m.column_or_supercolumn.setColumn(c)
          (ByteBufferUtil.bytes(rec._1), Arrays.asList(m))
        }).saveAsNewAPIHadoopFile(keyspace, classOf[ByteBuffer], classOf[List[Mutation]], classOf[ColumnFamilyOutputFormat], jobConf)
      })

    ssc.start()
    ssc.awaitTermination()
  }
} 
Example 50
Source File: L6-5Exception.scala    From prosparkstreaming   with Apache License 2.0 5 votes vote down vote up
package org.apress.prospark

import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.streaming.Seconds
import org.apache.spark.streaming.StreamingContext
import org.eclipse.paho.client.mqttv3.MqttClient
import org.eclipse.paho.client.mqttv3.MqttMessage
import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence
import org.json4s.DefaultFormats
import org.json4s.JField
import org.json4s.JsonAST.JObject
import org.json4s.jvalue2extractable
import org.json4s.jvalue2monadic
import org.json4s.native.JsonMethods.parse
import org.json4s.string2JsonInput

object MqttSinkAppA {

  def main(args: Array[String]) {
    if (args.length != 3) {
      System.err.println(
        "Usage: MqttSinkApp <appname> <outputBrokerUrl> <topic>")
      System.exit(1)
    }

    val Seq(appName, outputBrokerUrl, topic) = args.toSeq

    val conf = new SparkConf()
      .setAppName(appName)
      .setJars(SparkContext.jarOfClass(this.getClass).toSeq)

    val batchInterval = 10

    val ssc = new StreamingContext(conf, Seconds(batchInterval))

    HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env",
      interval = batchInterval)
      .flatMap(rec => {
        val query = parse(rec) \ "query"
        ((query \ "results" \ "quote").children).map(rec => JObject(JField("Timestamp", query \ "created")).merge(rec))
      })
      .map(rec => {
        implicit val formats = DefaultFormats
        rec.children.map(f => f.extract[String]) mkString ","
      })
      .foreachRDD { rdd =>
        val client = new MqttClient(outputBrokerUrl, MqttClient.generateClientId(), new MemoryPersistence())
        client.connect()
        rdd.foreach(rec => client.publish(topic, new MqttMessage(rec.getBytes(StandardCharsets.UTF_8))))
        client.disconnect()
        client.close()
      }

    ssc.start()
    ssc.awaitTermination()
  }

} 
Example 51
Source File: L6-10LazyStatic.scala    From prosparkstreaming   with Apache License 2.0 5 votes vote down vote up
package org.apress.prospark

import java.nio.charset.StandardCharsets
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.streaming.Seconds
import org.apache.spark.streaming.StreamingContext
import org.eclipse.paho.client.mqttv3.MqttClient
import org.eclipse.paho.client.mqttv3.MqttMessage
import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence
import org.json4s.DefaultFormats
import org.json4s.JField
import org.json4s.JsonAST.JObject
import org.json4s.jvalue2extractable
import org.json4s.jvalue2monadic
import org.json4s.native.JsonMethods.parse
import org.json4s.string2JsonInput
import org.apache.commons.pool2.PooledObject
import org.apache.commons.pool2.BasePooledObjectFactory
import org.apache.commons.pool2.impl.DefaultPooledObject
import org.apache.commons.pool2.impl.GenericObjectPool
import org.apache.commons.pool2.ObjectPool

object MqttSinkAppE {

  def main(args: Array[String]) {
    if (args.length != 3) {
      System.err.println(
        "Usage: MqttSinkApp <appname> <outputBrokerUrl> <topic>")
      System.exit(1)
    }

    val Seq(appName, outputBrokerUrl, topic) = args.toSeq

    val conf = new SparkConf()
      .setAppName(appName)
      .setJars(SparkContext.jarOfClass(this.getClass).toSeq)

    val batchInterval = 10

    val ssc = new StreamingContext(conf, Seconds(batchInterval))

    HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env",
      interval = batchInterval)
      .flatMap(rec => {
        val query = parse(rec) \ "query"
        ((query \ "results" \ "quote").children).map(rec => JObject(JField("Timestamp", query \ "created")).merge(rec))
      })
      .map(rec => {
        implicit val formats = DefaultFormats
        rec.children.map(f => f.extract[String]) mkString ","
      })
      .foreachRDD { rdd =>
        rdd.foreachPartition { par =>
          val mqttSink = MqttSinkPool().borrowObject()
          par.foreach(message => mqttSink.publish(topic, new MqttMessage(message.getBytes(StandardCharsets.UTF_8))))
          MqttSinkPool().returnObject(mqttSink)
        }
      }

    ssc.start()
    ssc.awaitTermination()
  }
}

object MqttSinkPool {
  val poolSize = 8
  val brokerUrl = "tcp://localhost:1883"
  val mqttPool = new GenericObjectPool[MqttClient](new MqttClientFactory(brokerUrl))
  mqttPool.setMaxTotal(poolSize)
  sys.addShutdownHook {
    mqttPool.close()
  }
  
  def apply(): GenericObjectPool[MqttClient] = {
    mqttPool
  }
}

class MqttClientFactory(brokerUrl: String) extends BasePooledObjectFactory[MqttClient] {
  override def create() = {
    val client = new MqttClient(brokerUrl, MqttClient.generateClientId(), new MemoryPersistence())
    client.connect()
    client
  }
  override def wrap(client: MqttClient) = new DefaultPooledObject[MqttClient](client)
  override def validateObject(pObj: PooledObject[MqttClient]) = pObj.getObject.isConnected()
  override def destroyObject(pObj: PooledObject[MqttClient]) = {
    pObj.getObject.disconnect()
    pObj.getObject.close()
  }
  override def passivateObject(pObj: PooledObject[MqttClient]) = {}
} 
Example 52
Source File: L6-7PerPartition.scala    From prosparkstreaming   with Apache License 2.0 5 votes vote down vote up
package org.apress.prospark

import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.streaming.Seconds
import org.apache.spark.streaming.StreamingContext
import org.eclipse.paho.client.mqttv3.MqttClient
import org.eclipse.paho.client.mqttv3.MqttMessage
import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence
import org.json4s.DefaultFormats
import org.json4s.JField
import org.json4s.JsonAST.JObject
import org.json4s.jvalue2extractable
import org.json4s.jvalue2monadic
import org.json4s.native.JsonMethods.parse
import org.json4s.string2JsonInput

object MqttSinkAppC {

  def main(args: Array[String]) {
    if (args.length != 3) {
      System.err.println(
        "Usage: MqttSinkApp <appname> <outputBrokerUrl> <topic>")
      System.exit(1)
    }

    val Seq(appName, outputBrokerUrl, topic) = args.toSeq

    val conf = new SparkConf()
      .setAppName(appName)
      .setJars(SparkContext.jarOfClass(this.getClass).toSeq)

    val batchInterval = 10

    val ssc = new StreamingContext(conf, Seconds(batchInterval))

    HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env",
      interval = batchInterval)
      .flatMap(rec => {
        val query = parse(rec) \ "query"
        ((query \ "results" \ "quote").children).map(rec => JObject(JField("Timestamp", query \ "created")).merge(rec))
      })
      .map(rec => {
        implicit val formats = DefaultFormats
        rec.children.map(f => f.extract[String]) mkString ","
      })
      .foreachRDD { rdd =>
        rdd.foreachPartition { par =>
          val client = new MqttClient(outputBrokerUrl, MqttClient.generateClientId(), new MemoryPersistence())
          client.connect()
          par.foreach(rec => client.publish(topic, new MqttMessage(rec.getBytes(StandardCharsets.UTF_8))))
          client.disconnect()
          client.close()
        }
      }

    ssc.start()
    ssc.awaitTermination()
  }
} 
Example 53
Source File: SendSlackMessage.scala    From hyperion   with Apache License 2.0 5 votes vote down vote up
package com.krux.hyperion.contrib.activity.notification

import java.net.{ HttpURLConnection, URL }

import org.json4s.JsonAST.{ JString, JObject }
import org.json4s.jackson.JsonMethods._
import scopt.OptionParser

object SendSlackMessage {
  case class Options(
    failOnError: Boolean = false,
    webhookUrl: String = "",
    user: Option[String] = None,
    message: Seq[String] = Seq.empty,
    iconEmoji: Option[String] = None,
    channel: Option[String] = None
  )

  def apply(options: Options): Boolean = try {
    // Setup the connection
    val connection = new URL(options.webhookUrl).openConnection().asInstanceOf[HttpURLConnection]
    connection.setDoOutput(true)
    connection.setRequestProperty("Content-Type", "application/json")
    connection.setRequestProperty("Accept", "application/json")

    // Write the message
    val output = connection.getOutputStream
    try {
      val message = Seq(
        "icon_emoji" -> options.iconEmoji,
        "channel" -> options.channel,
        "username" -> options.user,
        "text" -> Option(options.message.mkString("\n"))
      ).flatMap {
        case (k, None) => None
        case (k, Some(v)) => Option(k -> JString(v))
      }

      output.write(compact(render(JObject(message: _*))).getBytes)
    } finally {
      output.close()
    }

    // Check the response code
    connection.getResponseCode == 200 || !options.failOnError
  } catch {
    case e: Throwable =>
      System.err.println(e.toString)
      !options.failOnError
  }

  def main(args: Array[String]): Unit = {
    val parser = new OptionParser[Options](s"hyperion-notification-slack-activity") {
      override def showUsageOnError = true

      note("Sends a notification message to a Slack incoming webhook.")
      help("help").text("prints this usage text")
      opt[Unit]("fail-on-error").optional().action((_, c) => c.copy(failOnError = true))
        .text("Causes the activity to fail if any error received from the webhook")
      opt[String]("webhook-url").valueName("WEBHOOK").required().action((x, c) => c.copy(webhookUrl = x))
        .text("Sends the message to the given WEBHOOK url")
      opt[String]("user").valueName("NAME").optional().action((x, c) => c.copy(user = Option(x)))
        .text("Sends the message as the user with NAME")
      opt[String]("emoji").valueName("EMOJI").optional().action((x, c) => c.copy(iconEmoji = Option(x)))
        .text("Use EMOJI for the icon")
      opt[String]("to").valueName("CHANNEL or USERNAME").optional().action((x, c) => c.copy(channel = Option(x)))
        .text("Sends the message to #CHANNEL or @USERNAME")
      arg[String]("MESSAGE").required().unbounded().action((x, c) => c.copy(message = c.message :+ x))
        .text("Sends the given MESSAGE")
    }

    if (!parser.parse(args, Options()).exists(apply)) {
      System.exit(3)
    }
  }
} 
Example 54
Source File: OrderBook.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model

import org.json4s.JsonAST.JObject
import org.json4s.native.JsonMethods._
import org.json4s.{DefaultFormats, JValue}
import stellar.sdk.KeyPair
import stellar.sdk.model.response.ResponseParser

case class OrderBook(selling: Asset, buying: Asset, bids: Seq[Order], asks: Seq[Order])

case class Order(price: Price, quantity: Long)

object OrderBookDeserializer extends ResponseParser[OrderBook]({ o: JObject =>
  implicit val formats = DefaultFormats

  def asset(obj: JValue) = {
    def assetCode = (obj \ s"asset_code").extract[String]

    def assetIssuer = KeyPair.fromAccountId((obj \ s"asset_issuer").extract[String])

    (obj \ s"asset_type").extract[String] match {
      case "native" => NativeAsset
      case "credit_alphanum4" => IssuedAsset4(assetCode, assetIssuer)
      case "credit_alphanum12" => IssuedAsset12(assetCode, assetIssuer)
      case t => throw new RuntimeException(s"Unrecognised asset type '$t'")
    }
  }

  def orders(obj: JValue) = {
    obj.children.map(c =>
      Order(
        price = Price(
          n = (c \ "price_r" \ "n").extract[Int],
          d = (c \ "price_r" \ "d").extract[Int]
        ),
        quantity = Amount.toBaseUnits((c \ "amount").extract[String]).get
      ))
  }

  try {
    OrderBook(
      selling = asset(o \ "base"),
      buying = asset(o \ "counter"),
      bids = orders(o \ "bids"),
      asks = orders(o \ "asks")
    )
  } catch {
    case t: Throwable => throw new RuntimeException(pretty(render(o)), t)
  }
}) 
Example 55
Source File: Transacted.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.op

import java.time.ZonedDateTime

import org.json4s.DefaultFormats
import org.json4s.JsonAST.JObject
import stellar.sdk.model.response.ResponseParser


case class Transacted[+O <: Operation](id: Long,
                                      txnHash: String,
                                      createdAt: ZonedDateTime,
                                      operation: O)

object TransactedOperationDeserializer extends ResponseParser[Transacted[Operation]]({ o: JObject =>
  implicit val formats = DefaultFormats + OperationDeserializer

  def date(key: String) = ZonedDateTime.parse((o \ key).extract[String])

  Transacted(
    id = (o \ "id").extract[String].toLong,
    txnHash = (o \ "transaction_hash").extract[String],
    createdAt = date("created_at"),
    operation = o.extract[Operation])
}) 
Example 56
Source File: LedgerResponse.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.response

import java.time.ZonedDateTime

import org.json4s.DefaultFormats
import org.json4s.JsonAST.JObject
import stellar.sdk.model.{Amount, NativeAmount}

case class LedgerResponse(id: String, hash: String, previousHash: Option[String], sequence: Long, successTransactionCount: Int,
                          failureTransactionCount: Int, operationCount: Int, closedAt: ZonedDateTime,
                          totalCoins: NativeAmount, feePool: NativeAmount, baseFee: NativeAmount, baseReserve: NativeAmount,
                          maxTxSetSize: Int) {

  def transactionCount: Int = successTransactionCount + failureTransactionCount

}

object LedgerRespDeserializer extends ResponseParser[LedgerResponse]({ o: JObject =>
  implicit val formats = DefaultFormats

  LedgerResponse(
    id = (o \ "id").extract[String],
    hash = (o \ "hash").extract[String],
    previousHash = (o \ "prev_hash").extractOpt[String],
    sequence = (o \ "sequence").extract[Long],
    successTransactionCount = (o \ "successful_transaction_count").extract[Int],
    failureTransactionCount = (o \ "failed_transaction_count").extract[Int],
    operationCount = (o \ "operation_count").extract[Int],
    closedAt = ZonedDateTime.parse((o \ "closed_at").extract[String]),
    totalCoins = Amount.toBaseUnits((o \ "total_coins").extract[String]).map(NativeAmount.apply).get,
    feePool = Amount.toBaseUnits((o \ "fee_pool").extract[String]).map(NativeAmount.apply).get,
    baseFee = NativeAmount((o \ "base_fee").extractOpt[Long].getOrElse((o \ "base_fee_in_stroops").extract[Long])),
    baseReserve = {
      val old: Option[Long] = (o \ "base_reserve").extractOpt[String].map(_.toDouble).map(Amount.toBaseUnits).map(_.get)
      NativeAmount(old.getOrElse((o \ "base_reserve_in_stroops").extract[Long]))
    },
    maxTxSetSize = (o \ "max_tx_set_size").extract[Int]
  )
}) 
Example 57
Source File: OfferResponse.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.response

import java.time.ZonedDateTime

import org.json4s.DefaultFormats
import org.json4s.JsonAST.JObject
import stellar.sdk._
import stellar.sdk.model._

case class OfferResponse(id: Long, seller: PublicKeyOps, selling: Amount, buying: Asset, price: Price,
                         lastModifiedLedger: Long, lastModifiedTime: ZonedDateTime) {

  override def toString = {
    s"Offer $id: ${seller.accountId} selling $selling, buying $buying @ rate $price"
  }
}

object OfferRespDeserializer extends ResponseParser[OfferResponse]({ o: JObject =>
  implicit val formats = DefaultFormats
  val id = (o \ "id").extract[String].toLong

  def account(accountKey: String = "account") = KeyPair.fromAccountId((o \ accountKey).extract[String])

  def asset(prefix: String = "", issuerKey: String = "asset_issuer") = {
    def assetCode = (o \ prefix \ "asset_code").extract[String]

    def assetIssuer = KeyPair.fromAccountId((o \ prefix \ issuerKey).extract[String])

    (o \ prefix \ "asset_type").extract[String] match {
      case "native" => NativeAsset
      case "credit_alphanum4" => IssuedAsset4(assetCode, assetIssuer)
      case "credit_alphanum12" => IssuedAsset12(assetCode, assetIssuer)
      case t => throw new RuntimeException(s"Unrecognised asset type '$t'")
    }
  }

  def doubleFromString(key: String) = (o \ key).extract[String].toDouble

  def amount(prefix: String = "") = {
    val units = Amount.toBaseUnits(doubleFromString("amount")).get
    asset(prefix) match {
      case nna: NonNativeAsset => IssuedAmount(units, nna)
      case NativeAsset => NativeAmount(units)
    }
  }

  def price = {
    val priceObj = o \ "price_r"
    Price(
      (priceObj \ "n").extract[Int],
      (priceObj \ "d").extract[Int]
    )
  }

  def lastModifiedLedger = (o \ "last_modified_ledger").extract[Long]

  def lastModifiedTime = ZonedDateTime.parse((o \ "last_modified_time").extract[String])

  OfferResponse(id, account("seller"), amount("selling"), asset("buying"), price, lastModifiedLedger, lastModifiedTime)
}) 
Example 58
Source File: FederationResponse.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.response

import org.json4s.DefaultFormats
import org.json4s.JsonAST.JObject
import stellar.sdk.model._
import stellar.sdk.util.ByteArrays.{hexToBytes, trimmedByteArray}
import stellar.sdk.{KeyPair, PublicKey}

case class FederationResponse(address: String,
                              account: PublicKey,
                              memo: Memo = NoMemo)

object FederationResponseDeserialiser extends ResponseParser[FederationResponse]({ o: JObject =>
  implicit val formats = DefaultFormats

//  println(JsonMethods.pretty(JsonMethods.render(o)))

  FederationResponse(
    // reference server erroneously fails to set `stellar_address` for forward lookups
    address = (o \ "stellar_address").extractOpt[String].orNull,
    // reference server erroneously fails to set `account_id` for reverse lookups
    account = (o \ "account_id").extractOpt[String].map(KeyPair.fromAccountId).orNull,
    memo = (o \ "memo_type").extractOpt[String] match {
      case Some("id") => MemoId((o \ "memo").extract[String].toLong)
      case Some("text") => MemoText((o \ "memo").extract[String])
      case Some("hash") => MemoHash(trimmedByteArray(hexToBytes((o \ "memo").extract[String])))
      case _ => NoMemo
    }
  )
}) 
Example 59
Source File: TransactionPostResponse.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.response

import org.json4s.DefaultFormats
import org.json4s.JsonAST.JObject
import stellar.sdk.Network
import stellar.sdk.model.ledger.{LedgerEntryChange, LedgerEntryChanges, TransactionLedgerEntries}
import stellar.sdk.model.result._
import stellar.sdk.model.{NativeAmount, SignedTransaction}

sealed abstract class TransactionPostResponse(envelopeXDR: String, resultXDR: String) {
  val isSuccess: Boolean

  def transaction(implicit network: Network): SignedTransaction = SignedTransaction.decodeXDR(envelopeXDR)

  def feeCharged: NativeAmount

  
case class TransactionRejected(status: Int, detail: String,
                               resultCode: String, opResultCodes: Seq[String],
                               envelopeXDR: String, resultXDR: String)
  extends TransactionPostResponse(envelopeXDR, resultXDR) {

  override val isSuccess: Boolean = false

  // -- unroll nested XDR deserialised object into this object for convenience
  lazy val result: TransactionNotSuccessful = TransactionResult.decodeXDR(resultXDR).asInstanceOf[TransactionNotSuccessful]

  def feeCharged: NativeAmount = result.feeCharged
}



object TransactionPostResponseDeserializer extends ResponseParser[TransactionPostResponse]({
  o: JObject =>
    implicit val formats = DefaultFormats

    (o \ "type").extractOpt[String] match {

      case Some("https://stellar.org/horizon-errors/transaction_failed") =>
        TransactionRejected(
          status = (o \ "status").extract[Int],
          detail = (o \ "detail").extract[String],
          resultCode = (o \ "extras" \ "result_codes" \ "transaction").extract[String],
          opResultCodes = (o \ "extras" \ "result_codes" \ "operations").extract[Seq[String]],
          resultXDR = (o \ "extras" \ "result_xdr").extract[String],
          envelopeXDR = (o \ "extras" \ "envelope_xdr").extract[String]
        )

      case _ =>
        TransactionApproved(
          hash = (o \ "hash").extract[String],
          ledger = (o \ "ledger").extract[Long],
          envelopeXDR = (o \ "envelope_xdr").extract[String],
          resultXDR = (o \ "result_xdr").extract[String],
          resultMetaXDR = (o \ "result_meta_xdr").extract[String]
        )
    }
}) 
Example 60
Source File: AccountResponse.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.response

import java.nio.charset.StandardCharsets.UTF_8

import org.json4s.{DefaultFormats, Formats}
import org.json4s.JsonAST.{JArray, JObject}
import stellar.sdk._
import stellar.sdk.model.Amount.toBaseUnits
import stellar.sdk.model._
import stellar.sdk.util.ByteArrays

case class AccountResponse(id: PublicKey,
                           lastSequence: Long,
                           subEntryCount: Int,
                           thresholds: Thresholds,
                           authRequired: Boolean,
                           authRevocable: Boolean,
                           balances: List[Balance],
                           signers: List[Signer],
                           data: Map[String, Array[Byte]]) {

  def toAccount: Account = Account(AccountId(id.publicKey), lastSequence + 1)

  def decodedData: Map[String, String] = data.map { case (k, v) => k -> new String(v, UTF_8) }
}

object AccountRespDeserializer extends ResponseParser[AccountResponse]({ o: JObject =>
  implicit val formats: Formats = DefaultFormats
  val id = KeyPair.fromAccountId((o \ "id").extract[String])
  val seq = (o \ "sequence").extract[String].toLong
  val subEntryCount = (o \ "subentry_count").extract[Int]
  val lowThreshold = (o \ "thresholds" \ "low_threshold").extract[Int]
  val mediumThreshold = (o \ "thresholds" \ "med_threshold").extract[Int]
  val highThreshold = (o \ "thresholds" \ "high_threshold").extract[Int]
  val authRequired = (o \ "flags" \ "auth_required").extract[Boolean]
  val authRevocable = (o \ "flags" \ "auth_revocable").extract[Boolean]
  val JArray(jsBalances) = o \ "balances"
  val balances = jsBalances.map {
    case balObj: JObject =>
      val units = toBaseUnits((balObj \ "balance").extract[String].toDouble).get
      val amount = (balObj \ "asset_type").extract[String] match {
        case "credit_alphanum4" =>
          Amount(units, IssuedAsset4(
            code = (balObj \ "asset_code").extract[String],
            issuer = KeyPair.fromAccountId((balObj \ "asset_issuer").extract[String])
          ))
        case "credit_alphanum12" =>
          Amount(units, IssuedAsset12(
            code = (balObj \ "asset_code").extract[String],
            issuer = KeyPair.fromAccountId((balObj \ "asset_issuer").extract[String])
          ))
        case "native" => NativeAmount(units)
        case t => throw new RuntimeException(s"Unrecognised asset type: $t")
      }
      val limit = (balObj \ "limit").extractOpt[String].map(BigDecimal(_)).map(toBaseUnits).map(_.get)
      val buyingLiabilities = toBaseUnits(BigDecimal((balObj \ "buying_liabilities").extract[String])).get
      val sellingLiabilities = toBaseUnits(BigDecimal((balObj \ "selling_liabilities").extract[String])).get
      val authorised = (balObj \ "is_authorized").extractOpt[Boolean].getOrElse(false)
      val authorisedToMaintainLiabilities = (balObj \ "is_authorized_to_maintain_liabilities")
        .extractOpt[Boolean].getOrElse(false)

      Balance(amount, limit, buyingLiabilities, sellingLiabilities, authorised, authorisedToMaintainLiabilities)
    case _ => throw new RuntimeException(s"Expected js object at 'balances'")
  }
  val JArray(jsSigners) = o \ "signers"
  val signers = jsSigners.map {
    case signerObj: JObject =>
      val key = StrKey.decodeFromString((signerObj \ "key").extract[String]).asInstanceOf[SignerStrKey]
      val weight = (signerObj \ "weight").extract[Int]
      Signer(key, weight)
    case _ => throw new RuntimeException(s"Expected js object at 'signers'")
  }
  val JObject(dataFields) = o \ "data"
  val data = dataFields.map{ case (k, v) => k -> ByteArrays.base64(v.extract[String]) }.toMap

  AccountResponse(id, seq, subEntryCount, Thresholds(lowThreshold, mediumThreshold, highThreshold),
    authRequired, authRevocable, balances, signers, data)

}) 
Example 61
Source File: NetworkInfo.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.response

import org.json4s.DefaultFormats
import org.json4s.JsonAST.JObject


case class NetworkInfo(horizonVersion: String,
                       coreVersion: String,
                       earliestLedger: Long,
                       latestLedger: Long,
                       passphrase: String,
                       currentProtocolVersion: Int,
                       supportedProtocolVersion: Int)

object NetworkInfoDeserializer extends ResponseParser[NetworkInfo]({ o: JObject =>
  implicit val formats = DefaultFormats

  NetworkInfo(
    horizonVersion = (o \ "horizon_version").extract[String],
    coreVersion = (o \ "core_version").extract[String],
    earliestLedger = (o \ "history_elder_ledger").extract[Long],
    latestLedger = (o \ "history_latest_ledger").extract[Long],
    passphrase = (o \ "network_passphrase").extract[String],
    currentProtocolVersion = (o \ "current_protocol_version").extract[Int],
    supportedProtocolVersion = (o \ "core_supported_protocol_version").extract[Int]
  )
})