org.json4s.NoTypeHints Scala Examples

The following examples show how to use org.json4s.NoTypeHints. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: ImageProcessor.scala    From donut   with MIT License 5 votes vote down vote up
package report.donut.gherkin.processors

import report.donut.gherkin.model.Embedding
import org.json4s.jackson.Serialization
import org.json4s.{Formats, NoTypeHints, jackson}

object ImageProcessor {

  var imageMap = scala.collection.mutable.Map[Int, Embedding]()

  def getScreenshotIds(embeddings: List[Embedding]): String = {
    val a: Map[Int, Embedding] = embeddings.map(e => (e.data.hashCode -> e)).toMap
    a.map(a => imageMap += a)
    a.map(a => a._1).toList.mkString(",")
  }

  def allImages: String = {
    implicit def json4sJacksonFormats: Formats = jackson.Serialization.formats(NoTypeHints)
    val b: List[Embedding] = imageMap.map { case (k, v) => new Embedding(v.mime_type, v.data, k) }.toList
    Serialization.writePretty(b)
  }

} 
Example 2
Source File: FeeStatsResponseSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.response

import org.json4s.NoTypeHints
import org.json4s.native.JsonMethods.parse
import org.json4s.native.Serialization
import org.specs2.mutable.Specification
import stellar.sdk.ArbitraryInput

class FeeStatsResponseSpec extends Specification with ArbitraryInput {

  implicit val formats = Serialization.formats(NoTypeHints) + FeeStatsRespDeserializer

  "a fee stats response document" should {
    "parse to a fee stats response" >> prop { r: FeeStatsResponse =>
      val json =
        s"""
           |{
           |  "last_ledger": "${r.lastLedger}",
           |  "last_ledger_base_fee": "${r.lastLedgerBaseFee.units}",
           |  "ledger_capacity_usage": "${r.ledgerCapacityUsage}",
           |  "fee_charged": {
           |    "max": "${r.chargedFees.max.units}",
           |    "min": "${r.chargedFees.min.units}",
           |    "mode": "${r.chargedFees.mode.units}",
           |    "p10": "${r.chargedFees.percentiles(10).units}",
           |    "p20": "${r.chargedFees.percentiles(20).units}",
           |    "p30": "${r.chargedFees.percentiles(30).units}",
           |    "p40": "${r.chargedFees.percentiles(40).units}",
           |    "p50": "${r.chargedFees.percentiles(50).units}",
           |    "p60": "${r.chargedFees.percentiles(60).units}",
           |    "p70": "${r.chargedFees.percentiles(70).units}",
           |    "p80": "${r.chargedFees.percentiles(80).units}",
           |    "p90": "${r.chargedFees.percentiles(90).units}",
           |    "p95": "${r.chargedFees.percentiles(95).units}",
           |    "p99": "${r.chargedFees.percentiles(99).units}"
           |  },
           |  "max_fee": {
           |    "max": "${r.maxFees.max.units}",
           |    "min": "${r.maxFees.min.units}",
           |    "mode": "${r.maxFees.mode.units}",
           |    "p10": "${r.maxFees.percentiles(10).units}",
           |    "p20": "${r.maxFees.percentiles(20).units}",
           |    "p30": "${r.maxFees.percentiles(30).units}",
           |    "p40": "${r.maxFees.percentiles(40).units}",
           |    "p50": "${r.maxFees.percentiles(50).units}",
           |    "p60": "${r.maxFees.percentiles(60).units}",
           |    "p70": "${r.maxFees.percentiles(70).units}",
           |    "p80": "${r.maxFees.percentiles(80).units}",
           |    "p90": "${r.maxFees.percentiles(90).units}",
           |    "p95": "${r.maxFees.percentiles(95).units}",
           |    "p99": "${r.maxFees.percentiles(99).units}"
           |  }
           |}
         """.stripMargin

      val actual = parse(json).extract[FeeStatsResponse]
      actual mustEqual r
      actual.acceptedFeePercentiles mustEqual actual.chargedFees.percentiles
      actual.minAcceptedFee mustEqual actual.chargedFees.min
      actual.modeAcceptedFee mustEqual actual.chargedFees.mode
    }
  }

} 
Example 3
Source File: LedgerResponseSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.response

import java.time.ZoneId
import java.time.format.DateTimeFormatter

import org.json4s.NoTypeHints
import org.json4s.native.JsonMethods.parse
import org.json4s.native.Serialization
import org.specs2.mutable.Specification
import stellar.sdk.ArbitraryInput

class LedgerResponseSpec extends Specification with ArbitraryInput {

  val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'").withZone(ZoneId.of("UTC"))

  implicit val formats = Serialization.formats(NoTypeHints) + LedgerRespDeserializer

  "a ledger response document" should {
    "parse to a ledger response" >> prop { lr: LedgerResponse =>

      val json =
        s"""
           |{
           |  "_links": {
           |    "self": {
           |      "href": "http://horizon-testnet.stellar.org/ledgers/11"
           |    },
           |    "transactions": {
           |      "href": "http://horizon-testnet.stellar.org/ledgers/11/transactions{?cursor,limit,order}",
           |      "templated": true
           |    },
           |    "operations": {
           |      "href": "http://horizon-testnet.stellar.org/ledgers/11/operations{?cursor,limit,order}",
           |      "templated": true
           |    },
           |    "payments": {
           |      "href": "http://horizon-testnet.stellar.org/ledgers/11/payments{?cursor,limit,order}",
           |      "templated": true
           |    },
           |    "effects": {
           |      "href": "http://horizon-testnet.stellar.org/ledgers/11/effects{?cursor,limit,order}",
           |      "templated": true
           |    }
           |  },
           |  "id": "${lr.id}",
           |  "paging_token": "47244640256",
           |  "hash": "${lr.hash}",
           |  ${lr.previousHash.map(h => s""""prev_hash": "$h",""").getOrElse("")}
           |  "sequence": ${lr.sequence},
           |  "successful_transaction_count": ${lr.successTransactionCount},
           |  "failed_transaction_count": ${lr.failureTransactionCount},
           |  "operation_count": ${lr.operationCount},
           |  "closed_at": "${formatter.format(lr.closedAt)}",
           |  "total_coins": "${lr.totalCoins.toDisplayUnits}",
           |  "fee_pool": "${lr.feePool.toDisplayUnits}",
           |  "base_fee_in_stroops": ${lr.baseFee.units},
           |  "base_reserve_in_stroops": ${lr.baseReserve.units},
           |  "max_tx_set_size": ${lr.maxTxSetSize},
           |  "protocol_version": 4
           |}
        """.stripMargin

      parse(json).extract[LedgerResponse] must beLike { case actual: LedgerResponse =>
        actual.copy(closedAt = lr.closedAt) mustEqual lr
        actual.closedAt.toInstant.toEpochMilli mustEqual lr.closedAt.toInstant.toEpochMilli
      }
    }

    "calculate transaction count as sum of failed and successful transactions" >> prop { lr: LedgerResponse =>
      lr.transactionCount mustEqual lr.failureTransactionCount + lr.successTransactionCount
    }
  }

} 
Example 4
Source File: TradeEffectResponseSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.response

import java.util.Locale

import org.json4s.NoTypeHints
import org.json4s.native.JsonMethods.parse
import org.json4s.native.Serialization
import org.scalacheck.Gen
import org.specs2.mutable.Specification
import stellar.sdk._
import stellar.sdk.model.{Amount, NonNativeAsset}

class TradeEffectResponseSpec extends Specification with ArbitraryInput {

  implicit val formats = Serialization.formats(NoTypeHints) + EffectResponseDeserializer

  "a trade effect document" should {
    "parse to a trade effect" >> prop {
      (id: String, offerId: Long, buyer: KeyPair, bought: Amount, seller: KeyPair, sold: Amount) =>
        val json = doc(id, offerId, buyer, bought, seller, sold)
        parse(json).extract[EffectResponse] mustEqual EffectTrade(id, offerId, buyer, bought, seller, sold)
    }.setGen1(Gen.identifier).setGen2(Gen.posNum[Long])
  }

  def doc(id: String, offerId: Long, buyer: PublicKeyOps, bought: Amount, seller: PublicKeyOps, sold: Amount) = {
    s""" {
        "_links": {
          "operation": {
            "href": "https://horizon-testnet.stellar.org/operations/31161168848490497"
          },
          "succeeds": {
            "href": "https://horizon-testnet.stellar.org/effects?order=desc&cursor=31161168848490497-2"
          },
          "precedes": {
            "href": "https://horizon-testnet.stellar.org/effects?order=asc&cursor=31161168848490497-2"
          }
        },
        "id": "$id",
        "paging_token": "31161168848490497-2",
        "account": "${buyer.accountId}",
        "type": "trade",
        "type_i": 33,
        "seller": "${seller.accountId}",
        "offer_id": $offerId,
        ${amountDocPortion(sold, sold = true)},
        ${amountDocPortion(bought, sold = false)}
      }"""
  }

  def amountDocPortion(amount: Amount, sold: Boolean): String = {
    val bs = if (sold) "sold" else "bought"
    amount.asset match {
      case nn: NonNativeAsset =>
        s""""${bs}_amount": "${amountString(amount)}",
           |"${bs}_asset_type": "${nn.typeString}",
           |"${bs}_asset_code": "${nn.code}",
           |"${bs}_asset_issuer": "${nn.issuer.accountId}"
        """.stripMargin.trim

      case _ =>
        s""""${bs}_amount": "${amountString(amount)}",
           |"${bs}_asset_type": "native"
        """.stripMargin.trim
    }
  }

  def amountString(a: Amount): String = "%.7f".formatLocal(Locale.ROOT, a.units / math.pow(10, 7))
} 
Example 5
Source File: SignerEffectResponseSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.response

import java.util.Locale

import org.json4s.NoTypeHints
import org.json4s.native.JsonMethods._
import org.json4s.native.Serialization
import org.scalacheck.Gen
import org.specs2.mutable.Specification
import stellar.sdk._
import stellar.sdk.model.Amount

class SignerEffectResponseSpec extends Specification with ArbitraryInput {

  implicit val formats = Serialization.formats(NoTypeHints) + EffectResponseDeserializer

  "a signer created effect document" should {
    "parse to a signer created effect" >> prop { (id: String, kp: KeyPair, weight: Short, pubKey: String) =>
      val json = doc(id, kp, "signer_created", weight, "public_key" -> pubKey)
      parse(json).extract[EffectResponse] mustEqual EffectSignerCreated(id, kp.asPublicKey, weight, pubKey)
    }.setGen1(Gen.identifier).setGen4(Gen.identifier)
  }

  "a signer updated effect document" should {
    "parse to a signer updated effect" >> prop { (id: String, kp: KeyPair, weight: Short, pubKey: String) =>
      val json = doc(id, kp, "signer_updated", weight, "public_key" -> pubKey)
      parse(json).extract[EffectResponse] mustEqual EffectSignerUpdated(id, kp.asPublicKey, weight, pubKey)
    }.setGen1(Gen.identifier).setGen4(Gen.identifier)
  }

  "a signer removed effect document" should {
    "parse to a signer removed effect" >> prop { (id: String, kp: KeyPair, pubKey: String) =>
      val json = doc(id, kp, "signer_removed", 0, "public_key" -> pubKey)
      parse(json).extract[EffectResponse] mustEqual EffectSignerRemoved(id, kp.asPublicKey, pubKey)
    }.setGen1(Gen.identifier).setGen3(Gen.identifier)
  }

  def doc(id: String, kp: KeyPair, tpe: String, weight: Short, extra: (String, Any)*) =
    s"""
       |{
       |  "_links": {
       |    "operation": {
       |      "href": "https://horizon-testnet.stellar.org/operations/10157597659144"
       |    },
       |    "succeeds": {
       |      "href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659144-2"
       |    },
       |    "precedes": {
       |      "href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659144-2"
       |    }
       |  },
       |  "id": "$id",
       |  "paging_token": "10157597659144-2",
       |  "account": "${kp.accountId}",
       |  "weight": $weight
       |  "type": "$tpe",
       |  "type_i": 10,
       |  ${
      extra.map {
        case (k, v: String) => s""""$k": "$v"""".trim
        case (k, v) => s""""$k": $v""".trim
      }.mkString(", ")
    }
       |}
    """.stripMargin

  def amountString(a: Amount): String = "%.7f".formatLocal(Locale.ROOT, a.units / math.pow(10, 7))

} 
Example 6
Source File: OrderBookSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.response

import org.json4s.NoTypeHints
import org.json4s.native.JsonMethods.parse
import org.json4s.native.Serialization
import org.specs2.mutable.Specification
import stellar.sdk._
import stellar.sdk.model.op.JsonSnippets
import stellar.sdk.model.{Order, OrderBook, OrderBookDeserializer}

class OrderBookSpec extends Specification with ArbitraryInput with JsonSnippets {

  implicit val formats = Serialization.formats(NoTypeHints) + OrderBookDeserializer

  "order book" should {
    "parse from json" >> prop { ob: OrderBook =>
      val doc =
        s"""
           |{
           |  "bids": [${ob.bids.map(order).mkString(",")}],
           |  "asks": [${ob.asks.map(order).mkString(",")}],
           |  "base": {${asset(ob.selling)}}
           |  "counter": {${asset(ob.buying)}}
           |}
        """.stripMargin

      parse(doc).extract[OrderBook] mustEqual ob
    }
  }

  private def order(o: Order) =
    s"""{
       |  "price_r": {
       |    "n": ${o.price.n},
       |    "d": ${o.price.d}
       |  },
       |  "price": "${o.price.asDecimalString}",
       |  "amount": "${o.quantity / math.pow(10, 7)}"
       |}
     """.stripMargin

} 
Example 7
Source File: OfferResponseSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.response

import java.time.ZoneId
import java.time.format.DateTimeFormatter
import java.util.Locale

import org.json4s.NoTypeHints
import org.json4s.native.JsonMethods.parse
import org.json4s.native.Serialization
import org.specs2.mutable.Specification
import stellar.sdk.model.{Amount, Asset, NonNativeAsset}
import stellar.sdk.ArbitraryInput

class OfferResponseSpec extends Specification with ArbitraryInput {

  implicit val formats = Serialization.formats(NoTypeHints) + OfferRespDeserializer
  private val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'").withZone(ZoneId.of("UTC"))

  "an offer response document" should {
    "parse to an offer response" >> prop { or: OfferResponse =>
      val json =
        s"""
           |{
           |  "_links": {
           |    "self": {
           |      "href": "https://horizon-testnet.stellar.org/offers/101542"
           |    },
           |    "offer_maker": {
           |      "href": "https://horizon-testnet.stellar.org/accounts/GCXYKQF35XWATRB6AWDDV2Y322IFU2ACYYN5M2YB44IBWAIITQ4RYPXK"
           |    }
           |  },
           |  "id": ${or.id},
           |  "paging_token": "101542",
           |  "seller": "${or.seller.accountId}",
           |  "selling": {
           |    ${assetJson(or.selling.asset)}
           |  },
           |  "buying": {
           |    ${assetJson(or.buying)}
           |  },
           |  "amount": "${amountString(or.selling)}",
           |  "price_r": {
           |    "n": ${or.price.n},
           |    "d": ${or.price.d}
           |  },
           |  "price": "3.0300000",
           |  "last_modified_ledger": ${or.lastModifiedLedger},
           |  "last_modified_time": "${formatter.format(or.lastModifiedTime)}"
           |}
           |
        """.stripMargin

      parse(json).extract[OfferResponse] mustEqual or
    }
  }

  def assetJson(asset: Asset) = asset match {
    case nn: NonNativeAsset =>
      s"""
         |"asset_type": "${nn.typeString}",
         |"asset_code": "${nn.code}",
         |"asset_issuer": "${nn.issuer.accountId}"
        """.stripMargin.trim

    case _ => """"asset_type": "native""""
  }

  def amountString(a: Amount): String = "%.7f".formatLocal(Locale.ROOT, a.units / math.pow(10, 7))


} 
Example 8
Source File: TrustLineEffectResponseSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.response

import java.util.Locale

import org.json4s.NoTypeHints
import org.json4s.native.JsonMethods._
import org.json4s.native.Serialization
import org.scalacheck.Gen
import org.specs2.mutable.Specification
import stellar.sdk._
import stellar.sdk.model.{Amount, IssuedAmount, NonNativeAsset}

class TrustLineEffectResponseSpec extends Specification with ArbitraryInput {

  implicit val formats = Serialization.formats(NoTypeHints) + EffectResponseDeserializer

  "a trustline created effect document" should {
    "parse to a trustline created effect" >> prop {
      (id: String, accn: KeyPair, asset: NonNativeAsset, limit: Long) =>
        val json = doc(id, "trustline_created", accn, asset, limit)
        parse(json).extract[EffectResponse] mustEqual EffectTrustLineCreated(id, accn.asPublicKey, IssuedAmount(limit, asset))
    }.setGen1(Gen.identifier).setGen4(Gen.posNum[Long])
  }

  "a trustline updated effect document" should {
    "parse to a trustline updated effect" >> prop {
      (id: String, accn: KeyPair, asset: NonNativeAsset, limit: Long) =>
        val json = doc(id, "trustline_updated", accn, asset, limit)
        parse(json).extract[EffectResponse] mustEqual EffectTrustLineUpdated(id, accn.asPublicKey, IssuedAmount(limit, asset))
    }.setGen1(Gen.identifier).setGen4(Gen.posNum[Long])
  }

  "a trustline removed effect document" should {
    "parse to a trustline removed effect" >> prop { (id: String, accn: KeyPair, asset: NonNativeAsset) =>
      val json = doc(id, "trustline_removed", accn, asset, 0)
      parse(json).extract[EffectResponse] mustEqual EffectTrustLineRemoved(id, accn.asPublicKey, asset)
    }.setGen1(Gen.identifier)
  }

  def doc(id: String, tpe: String, accn: PublicKeyOps, asset: NonNativeAsset, limit: Long) = {
    s"""
       |{
       |  "_links": {
       |    "operation": {
       |      "href": "https://horizon-testnet.stellar.org/operations/10157597659144"
       |    },
       |    "succeeds": {
       |      "href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659144-2"
       |    },
       |    "precedes": {
       |      "href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659144-2"
       |    }
       |  },
       |  "id": "$id",
       |  "paging_token": "10157597659144-2",
       |  "account": "${accn.accountId}",
       |  "type": "$tpe",
       |  "type_i": 20,
       |  "asset_type": "${asset.typeString}",
       |  "asset_code": "${asset.code}",
       |  "asset_issuer": "${asset.issuer.accountId}",
       |  "limit": "${limit / math.pow(10, 7)}"
       |}
    """.stripMargin
  }


  def amountString(a: Amount): String = "%.7f".formatLocal(Locale.ROOT, a.units / math.pow(10, 7))

} 
Example 9
Source File: TrustLineAuthEffectResponseSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.response

import org.json4s.{Formats, NoTypeHints}
import org.json4s.native.JsonMethods._
import org.json4s.native.Serialization
import org.scalacheck.Gen
import org.specs2.mutable.Specification
import stellar.sdk._
import stellar.sdk.model.NonNativeAsset

class TrustLineAuthEffectResponseSpec extends Specification with ArbitraryInput {

  implicit val formats: Formats = Serialization.formats(NoTypeHints) + EffectResponseDeserializer

  "an authorize trustline effect document" should {
    "parse to an authorize trustline effect" >> prop { (id: String, accn: KeyPair, asset: NonNativeAsset) =>
      val json = doc(id, "trustline_authorized", accn, asset, 0.0)
      parse(json).extract[EffectResponse] mustEqual EffectTrustLineAuthorized(id, accn.asPublicKey, asset)
    }.setGen1(Gen.identifier)
  }

  "an authorize to maintain liabilities effect document" should {
    "parse to an authorize to maintain liabilities effect" >> prop { (id: String, accn: KeyPair, asset: NonNativeAsset) =>
      val json = doc(id, "trustline_authorized_to_maintain_liabilities", accn, asset, 0.0)
      parse(json).extract[EffectResponse] mustEqual EffectTrustLineAuthorizedToMaintainLiabilities(id, accn.asPublicKey, asset)
    }.setGen1(Gen.identifier)
  }

  "a deauthorize trustline effect document" should {
    "parse to a deauthorize trustline effect" >> prop { (id: String, accn: KeyPair, asset: NonNativeAsset) =>
      val json = doc(id, "trustline_deauthorized", accn, asset, 0.0)
      parse(json).extract[EffectResponse] mustEqual EffectTrustLineDeauthorized(id, accn.asPublicKey, asset)
    }.setGen1(Gen.identifier)
  }

  def doc(id: String, tpe: String, accn: PublicKeyOps, asset: NonNativeAsset, limit: Double) = {
    s"""
       |{
       |  "_links": {
       |    "operation": {
       |      "href": "https://horizon-testnet.stellar.org/operations/10157597659144"
       |    },
       |    "succeeds": {
       |      "href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659144-2"
       |    },
       |    "precedes": {
       |      "href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659144-2"
       |    }
       |  },
       |  "id": "$id",
       |  "paging_token": "10157597659144-2",
       |  "account": "${asset.issuer.accountId}",
       |  "type": "$tpe",
       |  "type_i": 23,
       |  "asset_type": "${asset.typeString}",
       |  "asset_code": "${asset.code}",
       |  "trustor": "${accn.accountId}"
       |}
    """.stripMargin
  }
} 
Example 10
Source File: TradeAggregationSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model

import org.json4s.NoTypeHints
import org.json4s.native.{JsonMethods, Serialization}
import org.specs2.mutable.Specification
import stellar.sdk.ArbitraryInput

class TradeAggregationSpec extends Specification with ArbitraryInput {
  implicit val formats = Serialization.formats(NoTypeHints) + TradeAggregationDeserializer

  "a payment path response document" should {
    "parse to a payment path" >> prop { ta: TradeAggregation =>
      val json =
        s"""
          |{
          |  "timestamp": ${ta.instant.toEpochMilli.toString},
          |  "trade_count": ${ta.tradeCount},
          |  "base_volume": "${ta.baseVolume}",
          |  "counter_volume": "${ta.counterVolume}",
          |  "avg": "${ta.average}",
          |  "high": "${ta.high.asDecimalString}",
          |  "high_r": {
          |    "N": ${ta.high.n},
          |    "D": ${ta.high.d}
          |  },
          |  "low": "${ta.low.asDecimalString}",
          |  "low_r": {
          |    "N": ${ta.low.n},
          |    "D": ${ta.low.d}
          |  },
          |  "open": "${ta.open.asDecimalString}",
          |  "open_r": {
          |    "N": ${ta.open.n},
          |    "D": ${ta.open.d}
          |  },
          |  "close": "${ta.close.asDecimalString}",
          |  "close_r": {
          |    "N": ${ta.close.n},
          |    "D": ${ta.close.d.toString}
          |  }
          |}
        """.stripMargin

      JsonMethods.parse(json).extract[TradeAggregation] mustEqual ta
    }
  }
} 
Example 11
Source File: JsonUtils.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.kafka010

import scala.collection.mutable.HashMap
import scala.util.control.NonFatal

import org.apache.kafka.common.TopicPartition
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization


  def partitionOffsets(partitionOffsets: Map[TopicPartition, Long]): String = {
    val result = new HashMap[String, HashMap[Int, Long]]()
    implicit val ordering = new Ordering[TopicPartition] {
      override def compare(x: TopicPartition, y: TopicPartition): Int = {
        Ordering.Tuple2[String, Int].compare((x.topic, x.partition), (y.topic, y.partition))
      }
    }
    val partitions = partitionOffsets.keySet.toSeq.sorted  // sort for more determinism
    partitions.foreach { tp =>
        val off = partitionOffsets(tp)
        val parts = result.getOrElse(tp.topic, new HashMap[Int, Long])
        parts += tp.partition -> off
        result += tp.topic -> parts
    }
    Serialization.write(result)
  }
} 
Example 12
Source File: FileStreamSourceOffset.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import scala.util.control.Exception._

import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization


case class FileStreamSourceOffset(logOffset: Long) extends Offset {
  override def json: String = {
    Serialization.write(this)(FileStreamSourceOffset.format)
  }
}

object FileStreamSourceOffset {
  implicit val format = Serialization.formats(NoTypeHints)

  def apply(offset: Offset): FileStreamSourceOffset = {
    offset match {
      case f: FileStreamSourceOffset => f
      case SerializedOffset(str) =>
        catching(classOf[NumberFormatException]).opt {
          FileStreamSourceOffset(str.toLong)
        }.getOrElse {
          Serialization.read[FileStreamSourceOffset](str)
        }
      case _ =>
        throw new IllegalArgumentException(
          s"Invalid conversion from offset of ${offset.getClass} to FileStreamSourceOffset")
    }
  }
} 
Example 13
Source File: FileStreamSinkLog.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import org.apache.hadoop.fs.{FileStatus, Path}
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization
import org.json4s.jackson.Serialization.{read, write}

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.internal.SQLConf


class FileStreamSinkLog(
    metadataLogVersion: String,
    sparkSession: SparkSession,
    path: String)
  extends CompactibleFileStreamLog[SinkFileStatus](metadataLogVersion, sparkSession, path) {

  private implicit val formats = Serialization.formats(NoTypeHints)

  protected override val fileCleanupDelayMs = sparkSession.sessionState.conf.fileSinkLogCleanupDelay

  protected override val isDeletingExpiredLog = sparkSession.sessionState.conf.fileSinkLogDeletion

  protected override val defaultCompactInterval =
    sparkSession.sessionState.conf.fileSinkLogCompactInterval

  require(defaultCompactInterval > 0,
    s"Please set ${SQLConf.FILE_SINK_LOG_COMPACT_INTERVAL.key} (was $defaultCompactInterval) " +
      "to a positive value.")

  override def compactLogs(logs: Seq[SinkFileStatus]): Seq[SinkFileStatus] = {
    val deletedFiles = logs.filter(_.action == FileStreamSinkLog.DELETE_ACTION).map(_.path).toSet
    if (deletedFiles.isEmpty) {
      logs
    } else {
      logs.filter(f => !deletedFiles.contains(f.path))
    }
  }
}

object FileStreamSinkLog {
  val VERSION = "v1"
  val DELETE_ACTION = "delete"
  val ADD_ACTION = "add"
} 
Example 14
Source File: StreamMetadata.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import java.io.{InputStreamReader, OutputStreamWriter}
import java.nio.charset.StandardCharsets

import scala.util.control.NonFatal

import org.apache.commons.io.IOUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, FSDataInputStream, FSDataOutputStream, Path}
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization

import org.apache.spark.internal.Logging
import org.apache.spark.sql.streaming.StreamingQuery


  def write(
      metadata: StreamMetadata,
      metadataFile: Path,
      hadoopConf: Configuration): Unit = {
    var output: FSDataOutputStream = null
    try {
      val fs = FileSystem.get(hadoopConf)
      output = fs.create(metadataFile)
      val writer = new OutputStreamWriter(output)
      Serialization.write(metadata, writer)
      writer.close()
    } catch {
      case NonFatal(e) =>
        logError(s"Error writing stream metadata $metadata to $metadataFile", e)
        throw e
    } finally {
      IOUtils.closeQuietly(output)
    }
  }
} 
Example 15
Source File: TagProcessor.scala    From donut   with MIT License 5 votes vote down vote up
package report.donut.gherkin.processors

import org.json4s.jackson.Serialization
import org.json4s.{Formats, NoTypeHints, jackson}
import report.donut.gherkin.model._

case class ReportTag(tag: String,
                     scenarios: List[Scenario],
                     scenariosMetrics: Metrics,
                     tagStatus: String,
                     htmlElements: String = "")

case class TagMetricsForChart(tag: String, scenariosMetrics: Metrics)

object TagProcessor {

  def apply(features: List[Feature]): (List[ReportTag], String) = {

    val allReportTags = createAllReportTags(features)

    (allReportTags, createChart(allReportTags))
  }

  private[processors] def createChart(reportTags: List[ReportTag]): String = {
    implicit def json4sJacksonFormats: Formats = jackson.Serialization.formats(NoTypeHints)

    Serialization.writePretty(reportTags.map(t => new TagMetricsForChart(t.tag, t.scenariosMetrics)))
  }

  private[processors] def createAllReportTags(features: List[Feature]): List[ReportTag] = {
    val scenarios: List[Scenario] = features.flatMap(f => addFeatureTagsToScenarios(f.scenariosExcludeBackgroundAndUnitTests, f.tags))
    groupElementsByTag(scenarios)
      .map { case (tag, scenarioList) => new ReportTag(tag, scenarioList, ScenarioMetrics(scenarioList), tagStatus(scenarioList)) }.toList
      .zipWithIndex.map { case (t, i) => t.copy(htmlElements = HTMLTagsProcessor(t.scenarios, i.toString.trim)) }
      .sortWith((left, right) => left.scenariosMetrics.total > right.scenariosMetrics.total)
  }

  // tagName -> List[Elements], excluding background elements
  private[processors] def groupElementsByTag(scenarios: List[Scenario]): Map[String, List[Scenario]] =
    scenarios.flatMap(s => s.tags.map(tag => (tag, s))).groupBy(_._1).mapValues(value => value.map(_._2))

  // cucumber 1 backwards compatibility - adds the parent (feature) tag to all children (scenarios)
  private[processors] def addFeatureTagsToScenarios(scenarios: List[Scenario], featureTags: List[String]): List[Scenario] =
    scenarios.map(e => e.copy(tags = (e.tags ::: featureTags).distinct))

  // Returns `passed` or `failed`
  private[processors] def tagStatus(scenarios: List[Scenario]): String = {
    val statuses = scenarios.map(s => s.status.statusStr)
    if (statuses.contains(Status.FAILED)) Status.FAILED else Status.PASSED
  }
} 
Example 16
Source File: PaymentOperationSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.op

import org.json4s.{Formats, NoTypeHints}
import org.json4s.native.JsonMethods.parse
import org.json4s.native.Serialization
import org.scalacheck.Arbitrary
import org.specs2.mutable.Specification
import stellar.sdk.util.ByteArrays.base64
import stellar.sdk.{ArbitraryInput, DomainMatchers}

class PaymentOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets {

  implicit val arb: Arbitrary[Transacted[PaymentOperation]] = Arbitrary(genTransacted(genPaymentOperation))
  implicit val formats: Formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer

  "payment operation" should {
    "serde via xdr string" >> prop { actual: PaymentOperation =>
      Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual)
    }

    "serde via xdr bytes" >> prop { actual: PaymentOperation =>
      val (remaining, decoded) = Operation.decode.run(actual.encode).value
      decoded mustEqual actual
      remaining must beEmpty
    }

    "parse from json" >> prop { op: Transacted[PaymentOperation] =>
      val doc =
        s"""
           | {
           |  "_links": {
           |    "self": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144"},
           |    "transaction": {"href": "https://horizon-testnet.stellar.org/transactions/17a670bc424ff5ce3b386dbfaae9990b66a2a37b4fbe51547e8794962a3f9e6a"},
           |    "effects": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144/effects"},
           |    "succeeds": {"href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659144"},
           |    "precedes": {"href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659144"}
           |  },
           |  "id": "${op.id}",
           |  "paging_token": "10157597659137",
           |  "source_account": "${op.operation.sourceAccount.get.accountId}",
           |  "type": "payment",
           |  "type_i": 1,
           |  "created_at": "${formatter.format(op.createdAt)}",
           |  "transaction_hash": "${op.txnHash}",
           |  ${amountDocPortion(op.operation.amount)},
           |  "from": "${op.operation.sourceAccount.get.accountId}",
           |  "to": "${op.operation.destinationAccount.publicKey.accountId}",
           |}
         """.stripMargin

      parse(doc).extract[Transacted[PaymentOperation]] mustEqual removeDestinationSubAccountId(op)
    }.setGen(genTransacted(genPaymentOperation.suchThat(_.sourceAccount.nonEmpty)))
  }

  // Because sub accounts are not yet supported in Horizon JSON.
  private def removeDestinationSubAccountId(op: Transacted[PaymentOperation]): Transacted[PaymentOperation] = {
    op.copy(operation = op.operation.copy(destinationAccount = op.operation.destinationAccount.copy(subAccountId = None)))
  }
} 
Example 17
Source File: JsonUtils.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.kafka010

import scala.collection.mutable.HashMap
import scala.util.control.NonFatal

import org.apache.kafka.common.TopicPartition
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization


  def partitionOffsets(partitionOffsets: Map[TopicPartition, Long]): String = {
    val result = new HashMap[String, HashMap[Int, Long]]()
    implicit val ordering = new Ordering[TopicPartition] {
      override def compare(x: TopicPartition, y: TopicPartition): Int = {
        Ordering.Tuple2[String, Int].compare((x.topic, x.partition), (y.topic, y.partition))
      }
    }
    val partitions = partitionOffsets.keySet.toSeq.sorted  // sort for more determinism
    partitions.foreach { tp =>
        val off = partitionOffsets(tp)
        val parts = result.getOrElse(tp.topic, new HashMap[Int, Long])
        parts += tp.partition -> off
        result += tp.topic -> parts
    }
    Serialization.write(result)
  }
} 
Example 18
Source File: FileStreamSourceOffset.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import scala.util.control.Exception._

import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization


case class FileStreamSourceOffset(logOffset: Long) extends Offset {
  override def json: String = {
    Serialization.write(this)(FileStreamSourceOffset.format)
  }
}

object FileStreamSourceOffset {
  implicit val format = Serialization.formats(NoTypeHints)

  def apply(offset: Offset): FileStreamSourceOffset = {
    offset match {
      case f: FileStreamSourceOffset => f
      case SerializedOffset(str) =>
        catching(classOf[NumberFormatException]).opt {
          FileStreamSourceOffset(str.toLong)
        }.getOrElse {
          Serialization.read[FileStreamSourceOffset](str)
        }
      case _ =>
        throw new IllegalArgumentException(
          s"Invalid conversion from offset of ${offset.getClass} to FileStreamSourceOffset")
    }
  }
} 
Example 19
Source File: FileStreamSinkLog.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import org.apache.hadoop.fs.{FileStatus, Path}
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization
import org.json4s.jackson.Serialization.{read, write}

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.internal.SQLConf


class FileStreamSinkLog(
    metadataLogVersion: String,
    sparkSession: SparkSession,
    path: String)
  extends CompactibleFileStreamLog[SinkFileStatus](metadataLogVersion, sparkSession, path) {

  private implicit val formats = Serialization.formats(NoTypeHints)

  protected override val fileCleanupDelayMs = sparkSession.sessionState.conf.fileSinkLogCleanupDelay

  protected override val isDeletingExpiredLog = sparkSession.sessionState.conf.fileSinkLogDeletion

  protected override val defaultCompactInterval =
    sparkSession.sessionState.conf.fileSinkLogCompactInterval

  require(defaultCompactInterval > 0,
    s"Please set ${SQLConf.FILE_SINK_LOG_COMPACT_INTERVAL.key} (was $defaultCompactInterval) " +
      "to a positive value.")

  override def compactLogs(logs: Seq[SinkFileStatus]): Seq[SinkFileStatus] = {
    val deletedFiles = logs.filter(_.action == FileStreamSinkLog.DELETE_ACTION).map(_.path).toSet
    if (deletedFiles.isEmpty) {
      logs
    } else {
      logs.filter(f => !deletedFiles.contains(f.path))
    }
  }
}

object FileStreamSinkLog {
  val VERSION = "v1"
  val DELETE_ACTION = "delete"
  val ADD_ACTION = "add"
} 
Example 20
Source File: StreamMetadata.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import java.io.{InputStreamReader, OutputStreamWriter}
import java.nio.charset.StandardCharsets

import scala.util.control.NonFatal

import org.apache.commons.io.IOUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, FSDataInputStream, FSDataOutputStream, Path}
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization

import org.apache.spark.internal.Logging
import org.apache.spark.sql.streaming.StreamingQuery


  def write(
      metadata: StreamMetadata,
      metadataFile: Path,
      hadoopConf: Configuration): Unit = {
    var output: FSDataOutputStream = null
    try {
      val fs = FileSystem.get(hadoopConf)
      output = fs.create(metadataFile)
      val writer = new OutputStreamWriter(output)
      Serialization.write(metadata, writer)
      writer.close()
    } catch {
      case NonFatal(e) =>
        logError(s"Error writing stream metadata $metadata to $metadataFile", e)
        throw e
    } finally {
      IOUtils.closeQuietly(output)
    }
  }
} 
Example 21
Source File: LambdaHTTPApiAnnotation.scala    From quaich   with Apache License 2.0 5 votes vote down vote up
package codes.bytes.quaich.api.http.macros

import scala.annotation.{StaticAnnotation, compileTimeOnly}
import scala.language.postfixOps
import scala.reflect.macros.blackbox
import scala.language.experimental.macros

object LambdaHTTPApi {
  // todo - check for companion object and reject
  def annotation_impl(c: blackbox.Context)(annottees: c.Expr[Any]*): c.Expr[Any] = {
    import c.universe._
    import Flag._

    val p = c.enclosingPosition

    val inputs = annottees.map(_.tree).toList



    val result: Tree = inputs match {
      case (cls @ q"$mods class $name[..$tparams] extends ..$parents { ..$body }") :: Nil if mods.hasFlag(ABSTRACT) ⇒
        c.abort(p, "! The @LambdaHTTPApi annotation is not valid on abstract classes.")
        cls
      // todo - detect and handle companion object!
      case (cls @ q"$mods class $name[..$tparams] extends ..$parents { ..$body }") :: Nil ⇒
        //val baseName = name.decodedName.toString
        //val handlerName = TermName(s"$baseName$$RequestHandler")
        //val handlerName = name.toTermName
        val handlerName = name.asInstanceOf[TypeName].toTermName

        val cls = q"""
        $mods class $name[..$tparams](
            val request: codes.bytes.quaich.api.http.LambdaHTTPRequest,
            val context: codes.bytes.quaich.api.http.LambdaContext
          )
          extends ..$parents
          with codes.bytes.quaich.api.http.HTTPHandler {
            import org.json4s.jackson.JsonMethods._
            import org.json4s.jackson.Serialization
            import org.json4s.jackson.Serialization._
            import org.json4s.{NoTypeHints, _}

            protected implicit val formats = Serialization.formats(NoTypeHints)

            ..$body
          }
        """

        val obj = q"""
        object $handlerName extends codes.bytes.quaich.api.http.HTTPApp {
          def newHandler(
            request: codes.bytes.quaich.api.http.LambdaHTTPRequest,
            context: codes.bytes.quaich.api.http.LambdaContext
          ): codes.bytes.quaich.api.http.HTTPHandler =
            new $name(request, context)


        }
        """

        q"$cls; $obj"

      case Nil ⇒
        c.abort(p, s"Cannot annotate an empty Tree.")
      case _ ⇒
        c.abort(p, s"! The @LambdaHTTPApi Annotation is only valid on non-abstract Classes")
    }

    //c.info(p, "result: " + result, force = true)

    c.Expr[Any](result)

  }

}

@compileTimeOnly("Setup the macro paradise compiler plugin to enable expansion of macro annotations.")
class LambdaHTTPApi extends StaticAnnotation {

  def macroTransform(annottees: Any*): Any = macro LambdaHTTPApi.annotation_impl

}
// vim: set ts=2 sw=2 sts=2 et: 
Example 22
Source File: JsonUtils.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.kafka010

import scala.collection.mutable.HashMap
import scala.util.control.NonFatal

import org.apache.kafka.common.TopicPartition
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization


  def partitionOffsets(partitionOffsets: Map[TopicPartition, Long]): String = {
    val result = new HashMap[String, HashMap[Int, Long]]()
    implicit val ordering = new Ordering[TopicPartition] {
      override def compare(x: TopicPartition, y: TopicPartition): Int = {
        Ordering.Tuple2[String, Int].compare((x.topic, x.partition), (y.topic, y.partition))
      }
    }
    val partitions = partitionOffsets.keySet.toSeq.sorted  // sort for more determinism
    partitions.foreach { tp =>
        val off = partitionOffsets(tp)
        val parts = result.getOrElse(tp.topic, new HashMap[Int, Long])
        parts += tp.partition -> off
        result += tp.topic -> parts
    }
    Serialization.write(result)
  }
} 
Example 23
Source File: FileStreamSourceOffset.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import scala.util.control.Exception._

import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization


case class FileStreamSourceOffset(logOffset: Long) extends Offset {
  override def json: String = {
    Serialization.write(this)(FileStreamSourceOffset.format)
  }
}

object FileStreamSourceOffset {
  implicit val format = Serialization.formats(NoTypeHints)

  def apply(offset: Offset): FileStreamSourceOffset = {
    offset match {
      case f: FileStreamSourceOffset => f
      case SerializedOffset(str) =>
        catching(classOf[NumberFormatException]).opt {
          FileStreamSourceOffset(str.toLong)
        }.getOrElse {
          Serialization.read[FileStreamSourceOffset](str)
        }
      case _ =>
        throw new IllegalArgumentException(
          s"Invalid conversion from offset of ${offset.getClass} to FileStreamSourceOffset")
    }
  }
} 
Example 24
Source File: FileStreamSinkLog.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import java.net.URI

import org.apache.hadoop.fs.{FileStatus, Path}
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.internal.SQLConf


class FileStreamSinkLog(
    metadataLogVersion: Int,
    sparkSession: SparkSession,
    path: String)
  extends CompactibleFileStreamLog[SinkFileStatus](metadataLogVersion, sparkSession, path) {

  private implicit val formats = Serialization.formats(NoTypeHints)

  protected override val fileCleanupDelayMs = sparkSession.sessionState.conf.fileSinkLogCleanupDelay

  protected override val isDeletingExpiredLog = sparkSession.sessionState.conf.fileSinkLogDeletion

  protected override val defaultCompactInterval =
    sparkSession.sessionState.conf.fileSinkLogCompactInterval

  require(defaultCompactInterval > 0,
    s"Please set ${SQLConf.FILE_SINK_LOG_COMPACT_INTERVAL.key} (was $defaultCompactInterval) " +
      "to a positive value.")

  override def compactLogs(logs: Seq[SinkFileStatus]): Seq[SinkFileStatus] = {
    val deletedFiles = logs.filter(_.action == FileStreamSinkLog.DELETE_ACTION).map(_.path).toSet
    if (deletedFiles.isEmpty) {
      logs
    } else {
      logs.filter(f => !deletedFiles.contains(f.path))
    }
  }
}

object FileStreamSinkLog {
  val VERSION = 1
  val DELETE_ACTION = "delete"
  val ADD_ACTION = "add"
} 
Example 25
Source File: StreamMetadata.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import java.io.{InputStreamReader, OutputStreamWriter}
import java.nio.charset.StandardCharsets

import scala.util.control.NonFatal

import org.apache.commons.io.IOUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, FSDataInputStream, FSDataOutputStream, Path}
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization

import org.apache.spark.internal.Logging
import org.apache.spark.sql.streaming.StreamingQuery


  def write(
      metadata: StreamMetadata,
      metadataFile: Path,
      hadoopConf: Configuration): Unit = {
    var output: FSDataOutputStream = null
    try {
      val fs = metadataFile.getFileSystem(hadoopConf)
      output = fs.create(metadataFile)
      val writer = new OutputStreamWriter(output)
      Serialization.write(metadata, writer)
      writer.close()
    } catch {
      case NonFatal(e) =>
        logError(s"Error writing stream metadata $metadata to $metadataFile", e)
        throw e
    } finally {
      IOUtils.closeQuietly(output)
    }
  }
} 
Example 26
Source File: KinesisPosition.scala    From kinesis-sql   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.kinesis

import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization

trait KinesisPosition extends Serializable {
  val iteratorType: String
  val iteratorPosition: String

  override def toString: String = s"KinesisPosition($iteratorType, $iteratorPosition)"
}

class TrimHorizon() extends KinesisPosition {
  override val iteratorType = "TRIM_HORIZON"
  override val iteratorPosition = ""
}

class Latest() extends KinesisPosition {
  override val iteratorType = "LATEST"
  override val iteratorPosition = ""
}

class AtTimeStamp(timestamp: String) extends KinesisPosition {
  def this(timestamp: Long) {
    this(timestamp.toString)
  }
  override val iteratorType = "AT_TIMESTAMP"
  override val iteratorPosition = timestamp.toString
}

class AfterSequenceNumber(seqNumber: String) extends KinesisPosition {
  override val iteratorType = "AFTER_SEQUENCE_NUMBER"
  override val iteratorPosition = seqNumber
}

class AtSequenceNumber(seqNumber: String) extends KinesisPosition {
  override val iteratorType = "AT_SEQUENCE_NUMBER"
  override val iteratorPosition = seqNumber
}

class ShardEnd() extends KinesisPosition {
  override val iteratorType = "SHARD_END"
  override val iteratorPosition = ""
}

private[kinesis] object KinesisPosition {
  def make(iteratorType: String, iteratorPosition: String): KinesisPosition = iteratorType match {
    case iterType if "TRIM_HORIZON".equalsIgnoreCase(iterType) => new TrimHorizon()
    case iterType if "LATEST".equalsIgnoreCase(iterType) => new Latest()
    case iterType if "AT_TIMESTAMP".equalsIgnoreCase(iterType) => new AtTimeStamp(iteratorPosition)
    case iterType if "AT_SEQUENCE_NUMBER".equalsIgnoreCase(iterType) =>
      new AtSequenceNumber(iteratorPosition)
    case iterType if "AFTER_SEQUENCE_NUMBER".equalsIgnoreCase(iterType) =>
      new AfterSequenceNumber(iteratorPosition)
    case iterType if "SHARD_END".equalsIgnoreCase(iterType) => new ShardEnd()
  }
}


  def fromCheckpointJson(text: String, defaultPosition: KinesisPosition): InitialKinesisPosition = {
    val kso = KinesisSourceOffset(text)
    val shardOffsets = kso.shardsToOffsets

    new InitialKinesisPosition(
      shardOffsets.shardInfoMap
        .map(si => si._1 -> KinesisPosition.make(si._2.iteratorType, si._2.iteratorPosition)),
      defaultPosition
      )
  }
} 
Example 27
Source File: KinesisSourceOffset.scala    From kinesis-sql   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.kinesis

import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization
import scala.collection.mutable.HashMap
import scala.util.control.NonFatal

import org.apache.spark.sql.execution.streaming.Offset
import org.apache.spark.sql.execution.streaming.SerializedOffset
import org.apache.spark.sql.sources.v2.reader.streaming.{Offset => OffsetV2, PartitionOffset}


 
  def apply(json: String): KinesisSourceOffset = {
    try {
      val readObj = Serialization.read[ Map[ String, Map[ String, String ] ] ](json)
      val metadata = readObj.get("metadata")
      val shardInfoMap: Map[String, ShardInfo ] = readObj.filter(_._1 != "metadata").map {
        case (shardId, value) => shardId.toString -> new ShardInfo(shardId.toString,
          value.get("iteratorType").get,
          value.get("iteratorPosition").get)
      }.toMap
      KinesisSourceOffset(
        new ShardOffsets(
          metadata.get("batchId").toLong,
          metadata.get("streamName"),
          shardInfoMap))
    } catch {
      case NonFatal(x) => throw new IllegalArgumentException(x)
    }
  }

  def getMap(shardInfos: Array[ShardInfo]): Map[String, ShardInfo] = {
    shardInfos.map {
      s: ShardInfo => (s.shardId -> s)
    }.toMap
  }

} 
Example 28
Source File: JsonUtils.scala    From pulsar-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.pulsar

import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization

import org.apache.pulsar.client.api.MessageId


object JsonUtils {

  private implicit val formats = Serialization.formats(NoTypeHints)

  def topics(str: String): Array[String] = {
    Serialization.read[Array[String]](str)
  }

  def topics(topics: Array[String]): String = {
    Serialization.write(topics)
  }

  def topicOffsets(str: String): Map[String, MessageId] = {
    Serialization.read[Map[String, Array[Byte]]](str).map {
      case (topic, msgIdBytes) =>
        (topic, MessageId.fromByteArray(msgIdBytes))
    }
  }

  def topicOffsets(topicOffsets: Map[String, MessageId]): String = {
    Serialization.write(topicOffsets.map {
      case (topic, msgId) =>
        (topic, msgId.toByteArray)
    })
  }

  def topicTimes(topicTimes: Map[String, Long]): String = {
    Serialization.write(topicTimes)
  }

  def topicTimes(str: String): Map[String, Long] = {
    Serialization.read[Map[String, Long]](str)
  }
} 
Example 29
Source File: FunctionCalls.scala    From aardpfark   with Apache License 2.0 5 votes vote down vote up
package com.ibm.aardpfark.pfa.expression

import com.ibm.aardpfark.pfa.document.{PFAExpressionSerializer, ParamSerializer, SchemaSerializer}
import org.json4s.native.JsonMethods.parse
import org.json4s.native.Serialization
import org.json4s.native.Serialization.write
import org.json4s.{JDouble, JField, JInt, JObject, JString, JValue, NoTypeHints}


class FunctionCall(name: String, args: Any*) extends PFAExpression {
  import com.ibm.aardpfark.pfa.dsl._
  import org.json4s.JsonDSL._

  override def json: JValue = {
    val jArgs = args.map {
      case n: Double =>
        JDouble(n)
      case i: Int =>
        JInt(i)
      case s: String =>
        JString(s)
      case expr: PFAExpression =>
        expr.json
      case fnDef: FunctionDef =>
        implicit val formats = Serialization.formats(NoTypeHints) +
          new SchemaSerializer +
          new PFAExpressionSerializer +
          new ParamSerializer
        parse(write(fnDef))
    }
    JObject(JField(name, jArgs) :: Nil)
  }
} 
Example 30
Source File: PFADocument.scala    From aardpfark   with Apache License 2.0 5 votes vote down vote up
package com.ibm.aardpfark.pfa.document

import com.ibm.aardpfark.pfa.dsl._
import com.ibm.aardpfark.pfa.expression.PFAExpression
import com.ibm.aardpfark.pfa.utils.Utils
import org.apache.avro.Schema
import org.json4s.native.Serialization
import org.json4s.native.Serialization.{write, writePretty}
import org.json4s.{FieldSerializer, NoTypeHints}

trait ToPFA {
  def pfa: PFADocument
}

trait HasAction {
  protected def action: PFAExpression
}

trait HasModelCell {
  protected def modelCell: NamedCell[_]
}

case class PFADocument(
  name: Option[String] = None,
  version: Option[Long] = Some(1L),
  doc: Option[String] = Some(s"Auto-generated by Aardpfark at ${Utils.getCurrentDate}"),
  metadata: Map[String, String] = Map(),
  // options,
  input: Schema,
  output: Schema,
  // begin: Seq[String] = Seq(),
  // end: Seq[String] = Seq(),
  // method: String = "map",
  action: Seq[PFAExpression],
  cells: Map[String, Cell[_]] = Map(),
  // pools
  fcns: Map[String, FunctionDef] = Map()
  // randseed
  // zero
  // merge
  ) {

  implicit val formats = Serialization.formats(NoTypeHints) +
    new SchemaSerializer +
    new PFAExpressionSerializer +
    new ParamSerializer +
    new FieldSerializer[Cell[_]] +
    new TreeSerializer

  def toJSON(pretty: Boolean = false) = {
    if (pretty) writePretty(this) else write(this)
  }
} 
Example 31
Source File: FederationServer.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk

import java.net.HttpURLConnection.HTTP_NOT_FOUND

import com.typesafe.scalalogging.LazyLogging
import okhttp3.{Headers, HttpUrl, OkHttpClient, Request}
import org.json4s.native.{JsonMethods, Serialization}
import org.json4s.{Formats, NoTypeHints}
import stellar.sdk.inet.RestException
import stellar.sdk.model.response.{FederationResponse, FederationResponseDeserialiser}

import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success, Try}

case class FederationServer(base: HttpUrl) extends LazyLogging {

  implicit val formats: Formats = Serialization.formats(NoTypeHints) + FederationResponseDeserialiser
  private val client = new OkHttpClient()
  private val headers = Headers.of(
    "X-Client-Name", BuildInfo.name,
    "X-Client-Version", BuildInfo.version)

  def byName(name: String)(implicit ec: ExecutionContext): Future[Option[FederationResponse]] =
    fetchFederationResponse(base.newBuilder()
      .addQueryParameter("q", name)
      .addQueryParameter("type", "name")
      .build(),  _.copy(address = name))

  def byAccount(account: PublicKey)(implicit ec: ExecutionContext): Future[Option[FederationResponse]] =
    fetchFederationResponse(base.newBuilder()
      .addQueryParameter("q", account.accountId)
      .addQueryParameter("type", "id")
      .build(), _.copy(account = account))


  private def fetchFederationResponse(url: HttpUrl, fillIn: FederationResponse => FederationResponse)
                                     (implicit ec: ExecutionContext): Future[Option[FederationResponse]] =
    Future(client.newCall(new Request.Builder().url(url).headers(headers).build()).execute())
      .map { response =>
        response.code() match {
          case HTTP_NOT_FOUND => None
          case e if e >= 500 => throw RestException(response.body().string())
          case _ =>
            Try(response.body().string())
              .map(JsonMethods.parse(_))
              .map(_.extract[FederationResponse])
              .map(fillIn)
              .map(validate) match {
              case Success(fr) => Some(fr)
              case Failure(t) => throw RestException("Could not parse document as FederationResponse.", t)
            }
        }
      }


  private def validate(fr: FederationResponse): FederationResponse = {
    if (fr.account == null) throw RestException(s"Document did not contain account_id")
    if (fr.address == null) throw RestException(s"Document did not contain stellar_address")
    fr
  }
}

object FederationServer {
  def apply(uriString: String): FederationServer = new FederationServer(HttpUrl.parse(uriString))
} 
Example 32
Source File: FileStreamSinkLog.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import org.apache.hadoop.fs.{FileStatus, Path}
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization
import org.json4s.jackson.Serialization.{read, write}

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.internal.SQLConf


class FileStreamSinkLog(
    metadataLogVersion: String,
    sparkSession: SparkSession,
    path: String)
  extends CompactibleFileStreamLog[SinkFileStatus](metadataLogVersion, sparkSession, path) {

  private implicit val formats = Serialization.formats(NoTypeHints)

  protected override val fileCleanupDelayMs = sparkSession.sessionState.conf.fileSinkLogCleanupDelay

  protected override val isDeletingExpiredLog = sparkSession.sessionState.conf.fileSinkLogDeletion

  protected override val compactInterval = sparkSession.sessionState.conf.fileSinkLogCompactInterval
  require(compactInterval > 0,
    s"Please set ${SQLConf.FILE_SINK_LOG_COMPACT_INTERVAL.key} (was $compactInterval) " +
      "to a positive value.")

  protected override def serializeData(data: SinkFileStatus): String = {
    write(data)
  }

  protected override def deserializeData(encodedString: String): SinkFileStatus = {
    read[SinkFileStatus](encodedString)
  }

  override def compactLogs(logs: Seq[SinkFileStatus]): Seq[SinkFileStatus] = {
    val deletedFiles = logs.filter(_.action == FileStreamSinkLog.DELETE_ACTION).map(_.path).toSet
    if (deletedFiles.isEmpty) {
      logs
    } else {
      logs.filter(f => !deletedFiles.contains(f.path))
    }
  }
}

object FileStreamSinkLog {
  val VERSION = "v1"
  val DELETE_ACTION = "delete"
  val ADD_ACTION = "add"
} 
Example 33
Source File: JsonSerializer.scala    From akka-serialization-test   with Apache License 2.0 5 votes vote down vote up
package com.github.dnvriend.serializer.json

import akka.serialization.Serializer
import com.github.dnvriend.domain.OrderDomain
import org.json4s.native.JsonMethods._
import org.json4s.native.Serialization
import org.json4s.native.Serialization._
import org.json4s.{ DefaultFormats, Formats, NoTypeHints }

case class EventWrapper(manifest: String, payload: String)

class JsonSerializer extends Serializer {

  implicit val formats: Formats = DefaultFormats + OrderDomain.DirectDebitTypeSerializer

  override def identifier: Int = Int.MaxValue

  override def includeManifest: Boolean = true

  override def toBinary(o: AnyRef): Array[Byte] =
    write(EventWrapper(o.getClass.getName, write(o))).getBytes()

  override def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = {
    val wrapper: EventWrapper = parse(new String(bytes)).extract[EventWrapper]
    implicit val mf = Manifest.classType(Class.forName(wrapper.manifest))
    read(wrapper.payload)
  }
} 
Example 34
Source File: ApiHandler.scala    From avoin-voitto   with MIT License 5 votes vote down vote up
package liigavoitto.api

import akka.actor.ActorSystem
import liigavoitto.fetch.{ScoresFetcher, ScoresFromFilesFetcher}
import liigavoitto.journalist.LiigaJournalist
import liigavoitto.scores.ScoresApiClient
import liigavoitto.util.DateTimeNoMillisSerializer
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization
import org.json4s.jackson.Serialization.write

class ApiHandler(implicit val system: ActorSystem) {
  implicit val formats = Serialization.formats(NoTypeHints) + DateTimeNoMillisSerializer

  val api = new ScoresApiClient()

  def report(matchId: String, lang: String) = {
    val fetcher = new ScoresFetcher(matchId, api)
    val article = LiigaJournalist.createArticle(fetcher.getEnrichedMatchData, lang)
    write(article)
  }

  def localReport(matchId: String, lang: String) = {
    val fetcher = new ScoresFromFilesFetcher(matchId)
    val article = LiigaJournalist.createArticle(fetcher.getEnrichedMatchData, lang)
    write(article)
  }
} 
Example 35
Source File: Slack.scala    From amadou   with Apache License 2.0 5 votes vote down vote up
package com.mediative.amadou

import org.apache.http.client.methods.HttpPost
import org.apache.http.entity.{ContentType, StringEntity}
import org.apache.http.impl.client.HttpClients
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization
import org.json4s.jackson.Serialization.{write}

object Slack {
  case class PostException(msg: String) extends RuntimeException(msg)

  case class Payload(
      channel: String,
      text: String,
      username: String,
      icon_emoji: String,
      link_names: Boolean)
}


  def post(message: String, icon: String = this.icon): Unit = {
    val payload = Payload(channel, message, user, icon, true)
    logger.info(s"Posting $payload to $url")

    val client        = HttpClients.createDefault()
    val requestEntity = new StringEntity(write(payload), ContentType.APPLICATION_JSON)
    val postMethod    = new HttpPost(url)
    postMethod.setEntity(requestEntity)

    val response = client.execute(postMethod)
    client.close()
    val status = response.getStatusLine
    if (status.getStatusCode != 200)
      throw PostException(
        s"$url replied with status ${status.getStatusCode}: ${status.getReasonPhrase}")
  }
} 
Example 36
Source File: RedisSourceOffset.scala    From spark-redis   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package org.apache.spark.sql.redis.stream

import com.redislabs.provider.redis.util.JsonUtils
import org.apache.spark.sql.execution.streaming.{Offset, SerializedOffset}
import org.json4s.jackson.Serialization
import org.json4s.{Formats, NoTypeHints}


case class RedisSourceOffset(offsets: Map[String, RedisConsumerOffset]) extends Offset {

  override def json(): String = JsonUtils.toJson(this)
}

object RedisSourceOffset {

  private implicit val formats: Formats = Serialization.formats(NoTypeHints)

  def fromOffset(offset: Offset): RedisSourceOffset = {
    offset match {
      case o: RedisSourceOffset => o
      case so: SerializedOffset => fromJson(so.json)
      case _ =>
        throw new IllegalArgumentException(
          s"Invalid conversion from offset of ${offset.getClass} to RedisSourceOffset")
    }

    fromJson(offset.json())
  }

  def fromJson(json: String): RedisSourceOffset = {
    try {
      Serialization.read[RedisSourceOffset](json)
    } catch {
      case e: Throwable =>
        val example = RedisSourceOffset(Map("my-stream" -> RedisConsumerOffset("redis-source", "1543674099961-0")))
        val jsonExample = Serialization.write(example)
        throw new RuntimeException(s"Unable to parse offset json. Example of valid json: $jsonExample", e)
    }
  }
}

case class RedisConsumerOffset(groupName: String, offset: String)

case class RedisSourceOffsetRange(start: Option[String], end: String, config: RedisConsumerConfig) 
Example 37
Source File: DataSourceUtils.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources

import org.apache.hadoop.fs.Path
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization

import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.types._


object DataSourceUtils {

  
  private def verifySchema(format: FileFormat, schema: StructType, isReadPath: Boolean): Unit = {
    schema.foreach { field =>
      if (!format.supportDataType(field.dataType, isReadPath)) {
        throw new AnalysisException(
          s"$format data source does not support ${field.dataType.catalogString} data type.")
      }
    }
  }

  // SPARK-24626: Metadata files and temporary files should not be
  // counted as data files, so that they shouldn't participate in tasks like
  // location size calculation.
  private[sql] def isDataPath(path: Path): Boolean = {
    val name = path.getName
    !(name.startsWith("_") || name.startsWith("."))
  }
} 
Example 38
Source File: FileStreamSourceOffset.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import scala.util.control.Exception._

import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization


case class FileStreamSourceOffset(logOffset: Long) extends Offset {
  override def json: String = {
    Serialization.write(this)(FileStreamSourceOffset.format)
  }
}

object FileStreamSourceOffset {
  implicit val format = Serialization.formats(NoTypeHints)

  def apply(offset: Offset): FileStreamSourceOffset = {
    offset match {
      case f: FileStreamSourceOffset => f
      case SerializedOffset(str) =>
        catching(classOf[NumberFormatException]).opt {
          FileStreamSourceOffset(str.toLong)
        }.getOrElse {
          Serialization.read[FileStreamSourceOffset](str)
        }
      case _ =>
        throw new IllegalArgumentException(
          s"Invalid conversion from offset of ${offset.getClass} to FileStreamSourceOffset")
    }
  }
} 
Example 39
Source File: FileStreamSinkLog.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import java.net.URI

import org.apache.hadoop.fs.{FileStatus, Path}
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.internal.SQLConf


class FileStreamSinkLog(
    metadataLogVersion: Int,
    sparkSession: SparkSession,
    path: String)
  extends CompactibleFileStreamLog[SinkFileStatus](metadataLogVersion, sparkSession, path) {

  private implicit val formats = Serialization.formats(NoTypeHints)

  protected override val fileCleanupDelayMs = sparkSession.sessionState.conf.fileSinkLogCleanupDelay

  protected override val isDeletingExpiredLog = sparkSession.sessionState.conf.fileSinkLogDeletion

  protected override val defaultCompactInterval =
    sparkSession.sessionState.conf.fileSinkLogCompactInterval

  require(defaultCompactInterval > 0,
    s"Please set ${SQLConf.FILE_SINK_LOG_COMPACT_INTERVAL.key} (was $defaultCompactInterval) " +
      "to a positive value.")

  override def compactLogs(logs: Seq[SinkFileStatus]): Seq[SinkFileStatus] = {
    val deletedFiles = logs.filter(_.action == FileStreamSinkLog.DELETE_ACTION).map(_.path).toSet
    if (deletedFiles.isEmpty) {
      logs
    } else {
      logs.filter(f => !deletedFiles.contains(f.path))
    }
  }
}

object FileStreamSinkLog {
  val VERSION = 1
  val DELETE_ACTION = "delete"
  val ADD_ACTION = "add"
} 
Example 40
Source File: StreamMetadata.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import java.io.{InputStreamReader, OutputStreamWriter}
import java.nio.charset.StandardCharsets
import java.util.ConcurrentModificationException

import scala.util.control.NonFatal

import org.apache.commons.io.IOUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileAlreadyExistsException, FSDataInputStream, Path}
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization

import org.apache.spark.internal.Logging
import org.apache.spark.sql.execution.streaming.CheckpointFileManager.CancellableFSDataOutputStream
import org.apache.spark.sql.streaming.StreamingQuery


  def write(
      metadata: StreamMetadata,
      metadataFile: Path,
      hadoopConf: Configuration): Unit = {
    var output: CancellableFSDataOutputStream = null
    try {
      val fileManager = CheckpointFileManager.create(metadataFile.getParent, hadoopConf)
      output = fileManager.createAtomic(metadataFile, overwriteIfPossible = false)
      val writer = new OutputStreamWriter(output)
      Serialization.write(metadata, writer)
      writer.close()
    } catch {
      case e: FileAlreadyExistsException =>
        if (output != null) {
          output.cancel()
        }
        throw new ConcurrentModificationException(
          s"Multiple streaming queries are concurrently using $metadataFile", e)
      case e: Throwable =>
        if (output != null) {
          output.cancel()
        }
        logError(s"Error writing stream metadata $metadata to $metadataFile", e)
        throw e
    }
  }
} 
Example 41
Source File: CommitLog.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import java.io.{InputStream, OutputStream}
import java.nio.charset.StandardCharsets._

import scala.io.{Source => IOSource}

import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization

import org.apache.spark.sql.SparkSession


class CommitLog(sparkSession: SparkSession, path: String)
  extends HDFSMetadataLog[CommitMetadata](sparkSession, path) {

  import CommitLog._

  override protected def deserialize(in: InputStream): CommitMetadata = {
    // called inside a try-finally where the underlying stream is closed in the caller
    val lines = IOSource.fromInputStream(in, UTF_8.name()).getLines()
    if (!lines.hasNext) {
      throw new IllegalStateException("Incomplete log file in the offset commit log")
    }
    parseVersion(lines.next.trim, VERSION)
    val metadataJson = if (lines.hasNext) lines.next else EMPTY_JSON
    CommitMetadata(metadataJson)
  }

  override protected def serialize(metadata: CommitMetadata, out: OutputStream): Unit = {
    // called inside a try-finally where the underlying stream is closed in the caller
    out.write(s"v${VERSION}".getBytes(UTF_8))
    out.write('\n')

    // write metadata
    out.write(metadata.json.getBytes(UTF_8))
  }
}

object CommitLog {
  private val VERSION = 1
  private val EMPTY_JSON = "{}"
}


case class CommitMetadata(nextBatchWatermarkMs: Long = 0) {
  def json: String = Serialization.write(this)(CommitMetadata.format)
}

object CommitMetadata {
  implicit val format = Serialization.formats(NoTypeHints)

  def apply(json: String): CommitMetadata = Serialization.read[CommitMetadata](json)
} 
Example 42
Source File: StreamStreamDataGenerator.scala    From structured-streaming-application   with Apache License 2.0 5 votes vote down vote up
package knolx.kafka

import java.util.Properties

import akka.actor.ActorSystem
import knolx.Config._
import knolx.KnolXLogger
import knolx.spark.Stock
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization
import org.json4s.jackson.Serialization.write

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.DurationInt
import scala.util.Random


object StreamStreamDataGenerator extends App with KnolXLogger {
  val system = ActorSystem("DataStreamer")
  val props = new Properties()
  props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer)
  props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer].getName)
  props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer].getName)

  val producer = new KafkaProducer[String, String](props)

  val companyNames = List("kirloskar", "bajaj", "amul", "dlf", "ebay")
  val orderTypes = List("buy", "sell")
  val numberOfSharesList = List(1, 2, 3, 4, 5, 6, 7, 8, 9)
  val randomCompanyNames = Random.shuffle(companyNames).drop(Random.shuffle((1 to 3).toList).head)

  implicit val formats = Serialization.formats(NoTypeHints)

  info("Streaming companies listed into Kafka...")
  system.scheduler.schedule(0 seconds, 20 seconds) {
    randomCompanyNames.foreach { name =>
      producer.send(new ProducerRecord[String, String](companiesTopic, name))
    }
  }

  info("Streaming stocks data into Kafka...")
  system.scheduler.schedule(0 seconds, 5 seconds) {
    companyNames.foreach { name =>
      val stock = Stock(name, Random.shuffle(numberOfSharesList).head, Random.shuffle(orderTypes).head)
      producer.send(new ProducerRecord[String, String](stocksTopic, write(stock)))
    }
  }
} 
Example 43
Source File: StreamStaticDataGenerator.scala    From structured-streaming-application   with Apache License 2.0 5 votes vote down vote up
package knolx.kafka

import java.util.Properties

import akka.actor.ActorSystem
import knolx.Config.{bootstrapServer, topic}
import knolx.KnolXLogger
import knolx.spark.Stock
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization
import org.json4s.jackson.Serialization.write

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.DurationInt
import scala.util.Random


object StreamStaticDataGenerator extends App with KnolXLogger {
  val system = ActorSystem("DataStreamer")
  val props = new Properties()
  props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer)
  props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer].getName)
  props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer].getName)

  val producer = new KafkaProducer[String, String](props)

  val companyNames = List("kirloskar", "bajaj", "amul", "dlf", "ebay")
  val orderTypes = List("buy", "sell")
  val numberOfSharesList = List(1, 2, 3, 4, 5, 6, 7, 8, 9)

  implicit val formats = Serialization.formats(NoTypeHints)
  info("Streaming data into Kafka...")
  system.scheduler.schedule(0 seconds, 5 seconds) {
    companyNames.foreach { name =>
      val stock = Stock(name, Random.shuffle(numberOfSharesList).head, Random.shuffle(orderTypes).head)
      producer.send(new ProducerRecord[String, String](topic, write(stock)))
    }
  }
} 
Example 44
Source File: package.scala    From azure-event-hubs-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark

import java.time.Duration

import com.microsoft.azure.eventhubs.{ EventHubClient, EventHubClientOptions, PartitionReceiver }
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization


package object eventhubs {

  implicit val formats = Serialization.formats(NoTypeHints)

  val StartOfStream: String = "-1"
  val EndOfStream: String = "@latest"
  val DefaultEventPosition: EventPosition = EventPosition.fromEndOfStream
  val DefaultEndingPosition: EventPosition = EventPosition.fromEndOfStream
  val DefaultMaxRatePerPartition: Rate = 1000
  val DefaultReceiverTimeout: Duration = Duration.ofSeconds(60)
  val DefaultMaxSilentTime: Duration = EventHubClientOptions.SILENT_OFF
  val MinSilentTime: Duration = EventHubClientOptions.SILENT_MINIMUM
  val DefaultOperationTimeout: Duration = Duration.ofSeconds(300)
  val DefaultConsumerGroup: String = EventHubClient.DEFAULT_CONSUMER_GROUP_NAME
  val PrefetchCountMinimum: Int = PartitionReceiver.MINIMUM_PREFETCH_COUNT
  val PrefetchCountMaximum: Int = PartitionReceiver.MAXIMUM_PREFETCH_COUNT
  val DefaultPrefetchCount: Int = PartitionReceiver.DEFAULT_PREFETCH_COUNT
  val DefaultFailOnDataLoss = "true"
  val DefaultUseSimulatedClient = "false"
  val DefaultPartitionPreferredLocationStrategy = "Hash"
  val DefaultUseExclusiveReceiver = "true"
  val StartingSequenceNumber = 0L
  val DefaultThreadPoolSize = 16
  val DefaultEpoch = 0L
  val RetryCount = 10
  val WaitInterval = 5000

  val OffsetAnnotation = "x-opt-offset"
  val EnqueuedTimeAnnotation = "x-opt-enqueued-time"
  val SequenceNumberAnnotation = "x-opt-sequence-number"

  val SparkConnectorVersion = "2.3.16"

  type PartitionId = Int
  val PartitionId: Int.type = Int

  type Rate = Int
  val Rate: Int.type = Int

  type Offset = Long
  val Offset: Long.type = Long

  type EnqueueTime = Long
  val EnqueueTime: Long.type = Long

  type SequenceNumber = Long
  val SequenceNumber: Long.type = Long

  object PartitionPreferredLocationStrategy extends Enumeration {
    type PartitionPreferredLocationStrategy = Value
    val Hash, BalancedHash = Value
  }

  // Allow Strings to be converted to types defined in this library.
  implicit class EventHubsString(val str: String) extends AnyVal {
    def toPartitionId: PartitionId = str.toInt

    def toRate: Rate = str.toInt

    def toOffset: Offset = str.toLong

    def toEnqueueTime: EnqueueTime = str.toLong

    def toSequenceNumber: SequenceNumber = str.toLong
  }
} 
Example 45
Source File: JsonUtils.scala    From azure-event-hubs-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.eventhubs

import org.apache.spark.eventhubs.{ NameAndPartition, _ }
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization

import scala.collection.mutable
import scala.util.control.NonFatal


  def partitionSeqNos(jsonStr: String): Map[NameAndPartition, SequenceNumber] = {
    try {
      Serialization.read[Map[String, Map[PartitionId, SequenceNumber]]](jsonStr).flatMap {
        case (name, partSeqNos) =>
          partSeqNos.map {
            case (part, seqNo) =>
              NameAndPartition(name, part) -> seqNo
          }
      }
    } catch {
      case NonFatal(_) =>
        throw new IllegalArgumentException(
          s"failed to parse $jsonStr" +
            s"""Expected e.g. {"ehName":{"0":23,"1":-1},"ehNameB":{"0":-2}}""")
    }
  }
} 
Example 46
Source File: JsonUtils.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.kafka010

import java.io.Writer

import scala.collection.mutable.HashMap
import scala.util.control.NonFatal

import org.apache.kafka.common.TopicPartition
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization


  def partitionOffsets(partitionOffsets: Map[TopicPartition, Long]): String = {
    val result = new HashMap[String, HashMap[Int, Long]]()
    partitionOffsets.foreach { case (tp, off) =>
        val parts = result.getOrElse(tp.topic, new HashMap[Int, Long])
        parts += tp.partition -> off
        result += tp.topic -> parts
    }
    Serialization.write(result)
  }
} 
Example 47
Source File: PaymentPathSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model

import org.json4s.NoTypeHints
import org.json4s.native.{JsonMethods, Serialization}
import org.specs2.mutable.Specification
import stellar.sdk.ArbitraryInput

class PaymentPathSpec extends Specification with ArbitraryInput {

  implicit val formats = Serialization.formats(NoTypeHints) + PaymentPathDeserializer

  "a payment path response document" should {
    "parse to a payment path" >> prop { path: PaymentPath =>

      def amountJson(prefix: String, amount: Amount) =
        s"""
           |"${prefix}amount": "${amount.toDisplayUnits}",
           |${assetJson(prefix, amount.asset)}
         """.stripMargin

      def assetJson(prefix: String, asset: Asset) = {
        asset match {
          case NativeAsset => s""""${prefix}asset_type": "native""""
          case issuedAsset: NonNativeAsset =>
            s"""
               |"${prefix}asset_type": "${issuedAsset.typeString}",
               |"${prefix}asset_code": "${issuedAsset.code}",
               |"${prefix}asset_issuer": "${issuedAsset.issuer.accountId}"
            """.stripMargin
        }
      }

      val json =
        s"""
           |{
           |  ${amountJson("source_", path.source)},
           |  ${amountJson("destination_", path.destination)},
           |  "path": ${path.path.map(j => s"{${assetJson("", j)}}").mkString("[", ",", "]")}
           |}
         """.stripMargin

      JsonMethods.parse(json).extract[PaymentPath] mustEqual path
    }
  }

  "the underlying amount parser" should {
    "not parse unrecognised asset type" >> {
      val doc = """{"foo_asset_type":"bananas"}"""
      AmountParser.parseAsset("foo_", JsonMethods.parse(doc)) must throwA[RuntimeException]
    }
  }

} 
Example 48
Source File: TradeSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model

import org.json4s.NoTypeHints
import org.json4s.native.JsonMethods.parse
import org.json4s.native.Serialization
import org.specs2.mutable.Specification
import stellar.sdk.ArbitraryInput
import stellar.sdk.model.op.JsonSnippets

class TradeSpec extends Specification with ArbitraryInput with JsonSnippets {

  implicit val formats = Serialization.formats(NoTypeHints) + TradeDeserializer

  "trade" should {
    "parse from json" >> prop { trade: Trade =>

      val doc =
        s"""
           |{
           |  "_links": {
           |    "self": {"href": ""},
           |    "base": {"href": "https://horizon.stellar.org/accounts/GCI7ILB37OFVHLLSA74UCXZFCTPEBJOZK7YCNBI7DKH7D76U4CRJBL2A"},
           |    "counter": {"href": "https://horizon.stellar.org/accounts/GDRFRGR2FDUFF2RI6PQE5KFSCJHGSEIOGET22R66XSATP3BYHZ46BPLO"},
           |    "operation": {"href": "https://horizon.stellar.org/operations/38583306127675393"}
           |  },
           |  "id": "${trade.id}",
           |  "paging_token": "38583306127675393-2",
           |  "ledger_close_time": "${formatter.format(trade.ledgerCloseTime)}",
           |  "offer_id": "${trade.offerId}",
           |  "base_offer_id": "${trade.baseOfferId}",
           |  "base_account": "${trade.baseAccount.accountId}",
           |  ${amountDocPortion(trade.baseAmount, "base_amount", "base_")}
           |  ${amountDocPortion(trade.counterAmount, "counter_amount", "counter_")}
           |  "counter_account": "${trade.counterAccount.accountId}",
           |  "counter_offer_id": "${trade.counterOfferId}",
           |  "base_is_seller": ${trade.baseIsSeller}
           |}
         """.stripMargin

      parse(doc).extract[Trade] mustEqual trade
    }
  }

} 
Example 49
Source File: AccountMergeOperationSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.op

import org.json4s.NoTypeHints
import org.json4s.native.JsonMethods.parse
import org.json4s.native.Serialization
import org.scalacheck.Arbitrary
import org.specs2.mutable.Specification
import stellar.sdk.util.ByteArrays.base64
import stellar.sdk.{ArbitraryInput, DomainMatchers, KeyPair}

class AccountMergeOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets {

  implicit val arb: Arbitrary[Transacted[AccountMergeOperation]] = Arbitrary(genTransacted(genAccountMergeOperation))
  implicit val formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer

  "account merge operation" should {
    "serde via xdr string" >> prop { actual: AccountMergeOperation =>
      Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual)
    }

    "serde via xdr bytes" >> prop { actual: AccountMergeOperation =>
      val (remaining, decoded) = Operation.decode.run(actual.encode).value
      decoded mustEqual actual
      remaining must beEmpty
    }

    "parse from json" >> prop { op: Transacted[AccountMergeOperation] =>
      val doc =
        s"""
           | {
           |  "_links": {
           |    "self": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144"},
           |    "transaction": {"href": "https://horizon-testnet.stellar.org/transactions/17a670bc424ff5ce3b386dbfaae9990b66a2a37b4fbe51547e8794962a3f9e6a"},
           |    "effects": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144/effects"},
           |    "succeeds": {"href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659144"},
           |    "precedes": {"href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659144"}
           |  },
           |  "id": "${op.id}",
           |  "paging_token": "10157597659137",
           |  "source_account":"${op.operation.sourceAccount.get.accountId}",
           |  "type_i": 8,
           |  "type": "account_merge"
           |  "created_at": "${formatter.format(op.createdAt)}",
           |  "transaction_hash": "${op.txnHash}",
           |  "account": "${op.operation.sourceAccount.get.accountId}",
           |  "into": "${KeyPair.fromPublicKey(op.operation.destination.hash).accountId}",
           |}
         """.stripMargin

      parse(doc).extract[Transacted[AccountMergeOperation]] mustEqual op
    }.setGen(genTransacted(genAccountMergeOperation.suchThat(_.sourceAccount.nonEmpty)))
  }

} 
Example 50
Source File: SetOptionsOperationSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.op

import org.json4s.NoTypeHints
import org.json4s.native.JsonMethods.parse
import org.json4s.native.Serialization
import org.scalacheck.Arbitrary
import org.specs2.mutable.Specification
import stellar.sdk.util.ByteArrays.base64
import stellar.sdk.{ArbitraryInput, DomainMatchers}

class SetOptionsOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets {

  implicit val arb: Arbitrary[Transacted[SetOptionsOperation]] = Arbitrary(genTransacted(genSetOptionsOperation))
  implicit val formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer

  "set options operation" should {
    "serde via xdr string" >> prop { actual: SetOptionsOperation =>
      Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual)
    }

    "serde via xdr bytes" >> prop { actual: SetOptionsOperation =>
      val (remaining, decoded) = Operation.decode.run(actual.encode).value
      decoded must beEquivalentTo(actual)
      remaining must beEmpty
    }

    "parse from json" >> prop { op: Transacted[SetOptionsOperation] =>
      val doc =
        s"""
           |{
           |  "_links": {
           |    "self": {"href": "https://horizon-testnet.stellar.org/operations/10157597659137"},
           |    "transaction": {"href": "https://horizon-testnet.stellar.org/transactions/17a670bc424ff5ce3b386dbfaae9990b66a2a37b4fbe51547e8794962a3f9e6a"},
           |    "effects": {"href": "https://horizon-testnet.stellar.org/operations/10157597659137/effects"},
           |    "succeeds": {"href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659137"},
           |    "precedes": {"href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659137"}
           |  },
           |  "id": "${op.id}",
           |  "paging_token": "10157597659137",
           |  "source_account": "${op.operation.sourceAccount.get.accountId}",
           |  "created_at": "${formatter.format(op.createdAt)}",
           |  "transaction_hash": "${op.txnHash}",
           |  ${opt("inflation_dest", op.operation.inflationDestination.map(_.accountId))}
           |  ${opt("home_domain", op.operation.homeDomain)}
           |  ${opt("master_key_weight", op.operation.masterKeyWeight)}
           |  ${opt("signer_key", op.operation.signer.map(_.key.encodeToChars.mkString))}
           |  ${opt("signer_weight", op.operation.signer.map(_.weight))}
           |  ${opt("set_flags", op.operation.setFlags.map(_.map(_.i)))}
           |  ${opt("set_flags_s", op.operation.setFlags.map(_.map(_.s)))}
           |  ${opt("clear_flags", op.operation.clearFlags.map(_.map(_.i)))}
           |  ${opt("clear_flags_s", op.operation.clearFlags.map(_.map(_.s)))}
           |  ${opt("low_threshold", op.operation.lowThreshold)}
           |  ${opt("med_threshold", op.operation.mediumThreshold)}
           |  ${opt("high_threshold", op.operation.highThreshold)}
           |  "type": "set_options",
           |  "type_i": 5,
           |}
         """.stripMargin

      parse(doc).extract[Transacted[SetOptionsOperation]] must beEquivalentTo(op)
    }.setGen(genTransacted(genSetOptionsOperation.suchThat(_.sourceAccount.nonEmpty)))
  }
} 
Example 51
Source File: PathPaymentStrictReceiveOperationSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.op

import org.json4s.{Formats, NoTypeHints}
import org.json4s.native.JsonMethods.parse
import org.json4s.native.Serialization
import org.scalacheck.Arbitrary
import org.specs2.mutable.Specification
import stellar.sdk.util.ByteArrays.base64
import stellar.sdk.{ArbitraryInput, DomainMatchers}

class PathPaymentStrictReceiveOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets {

  implicit val arb: Arbitrary[Transacted[PathPaymentStrictReceiveOperation]] = Arbitrary(genTransacted(genPathPaymentStrictReceiveOperation))
  implicit val formats: Formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer

  "path payment operation" should {
    "serde via xdr string" >> prop { actual: PathPaymentStrictReceiveOperation =>
      Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual)
    }

    "serde via xdr bytes" >> prop { actual: PathPaymentStrictReceiveOperation =>
      val (remaining, decoded) = Operation.decode.run(actual.encode).value
      decoded mustEqual actual
      remaining must beEmpty
    }

    "parse from json" >> prop { op: Transacted[PathPaymentStrictReceiveOperation] =>
      val doc =
        s"""
           |{
           |  "_links":{
           |    "self":{"href":"https://horizon-testnet.stellar.org/operations/940258535411713"},
           |    "transaction":{"href":"https://horizon-testnet.stellar.org/transactions/a995af17837d1b53fb5782269250a36e9dbe74170260b46f2708e5f23f7c864a"},
           |    "effects":{"href":"https://horizon-testnet.stellar.org/operations/940258535411713/effects"},
           |    "succeeds":{"href":"https://horizon-testnet.stellar.org/effects?order=desc&cursor=940258535411713"},
           |    "precedes":{"href":"https://horizon-testnet.stellar.org/effects?order=asc&cursor=940258535411713"}
           |  },
           |  "id": "${op.id}",
           |  "paging_token": "10157597659137",
           |  "source_account": "${op.operation.sourceAccount.get.accountId}",
           |  "type":"path_payment",
           |  "type_i":2,
           |  "created_at": "${formatter.format(op.createdAt)}",
           |  "transaction_hash": "${op.txnHash}",
           |  ${amountDocPortion(op.operation.destinationAmount)}
           |  ${amountDocPortion(op.operation.sendMax, "source_max", "source_")}
           |  "from":"${op.operation.sourceAccount.get.accountId}",
           |  "to":"${op.operation.destinationAccount.publicKey.accountId}",
           |  "path":[${if (op.operation.path.isEmpty) "" else op.operation.path.map(asset(_)).mkString("{", "},{", "}")}]
           |}
         """.stripMargin

      parse(doc).extract[Transacted[Operation]] mustEqual removeDestinationSubAccountId(op)
    }.setGen(genTransacted(genPathPaymentStrictReceiveOperation.suchThat(_.sourceAccount.nonEmpty)))
  }

  // Because sub accounts are not yet supported in Horizon JSON.
  private def removeDestinationSubAccountId(op: Transacted[PathPaymentStrictReceiveOperation]): Transacted[PathPaymentStrictReceiveOperation] = {
    op.copy(operation = op.operation.copy(destinationAccount = op.operation.destinationAccount.copy(subAccountId = None)))
  }
} 
Example 52
Source File: CreateAccountOperationSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.op

import org.json4s.{Formats, NoTypeHints}
import org.json4s.native.JsonMethods.parse
import org.json4s.native.Serialization
import org.scalacheck.Arbitrary
import org.specs2.mutable.Specification
import stellar.sdk.util.ByteArrays.base64
import stellar.sdk.{ArbitraryInput, DomainMatchers}

class CreateAccountOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets {

  implicit val arb: Arbitrary[Transacted[CreateAccountOperation]] = Arbitrary(genTransacted(genCreateAccountOperation))
  implicit val formats: Formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer + OperationDeserializer

  "create account operation" should {
    "serde via xdr string" >> prop { actual: CreateAccountOperation =>
      Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual)
    }

    "serde via xdr bytes" >> prop { actual: CreateAccountOperation =>
      val (remaining, decoded) = Operation.decode.run(actual.encode).value
      decoded mustEqual actual
      remaining must beEmpty
    }

    "be parsed from json " >> prop { op: Transacted[CreateAccountOperation] =>
      val doc =
        s"""
           |{
           |  "_links": {
           |    "self": {"href": "https://horizon-testnet.stellar.org/operations/10157597659137"},
           |    "transaction": {"href": "https://horizon-testnet.stellar.org/transactions/17a670bc424ff5ce3b386dbfaae9990b66a2a37b4fbe51547e8794962a3f9e6a"},
           |    "effects": {"href": "https://horizon-testnet.stellar.org/operations/10157597659137/effects"},
           |    "succeeds": {"href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659137"},
           |    "precedes": {"href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659137"}
           |  },
           |  "id": "${op.id}",
           |  "paging_token": "10157597659137",
           |  "source_account": "${op.operation.sourceAccount.get.accountId}",
           |  "type": "create_account",
           |  "type_i": 0,
           |  "created_at": "${formatter.format(op.createdAt)}",
           |  "transaction_hash": "${op.txnHash}",
           |  "starting_balance": "${amountString(op.operation.startingBalance)}",
           |  "funder": "${op.operation.sourceAccount.get.accountId}",
           |  "account": "${op.operation.destinationAccount.publicKey.accountId}"
           |}
         """.stripMargin

      parse(doc).extract[Transacted[CreateAccountOperation]] mustEqual removeDestinationSubAccountId(op)
    }.setGen(genTransacted(genCreateAccountOperation.suchThat(_.sourceAccount.nonEmpty)))
  }

  // Because sub accounts are not yet supported in Horizon JSON.
  private def removeDestinationSubAccountId(op: Transacted[CreateAccountOperation]): Transacted[CreateAccountOperation] = {
    op.copy(operation = op.operation.copy(destinationAccount = op.operation.destinationAccount.copy(subAccountId = None)))
  }
} 
Example 53
Source File: InflationOperationSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.op

import org.json4s.NoTypeHints
import org.json4s.native.JsonMethods.parse
import org.json4s.native.Serialization
import org.scalacheck.Arbitrary
import org.specs2.mutable.Specification
import stellar.sdk.util.ByteArrays.base64
import stellar.sdk.{ArbitraryInput, DomainMatchers}

class InflationOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets {

  implicit val arb: Arbitrary[Transacted[InflationOperation]] = Arbitrary(genTransacted(genInflationOperation))
  implicit val formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer

  "the inflation operation" should {
    "serde via xdr string" >> prop { actual: InflationOperation =>
      Operation.decodeXDR(base64(actual.encode)) mustEqual actual
    }

    "serde via xdr bytes" >> prop { actual: InflationOperation =>
      val (remaining, decoded) = Operation.decode.run(actual.encode).value
      decoded mustEqual actual
      remaining must beEmpty
    }

    "parse from json" >> prop { op: Transacted[InflationOperation] =>
      val doc =
        s"""
           | {
           |  "_links": {
           |    "self": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144"},
           |    "transaction": {"href": "https://horizon-testnet.stellar.org/transactions/17a670bc424ff5ce3b386dbfaae9990b66a2a37b4fbe51547e8794962a3f9e6a"},
           |    "effects": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144/effects"},
           |    "succeeds": {"href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659144"},
           |    "precedes": {"href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659144"}
           |  },
           |  "id": "${op.id}",
           |  "paging_token": "10157597659137",
           |  "source_account": "${op.operation.sourceAccount.get.accountId}",
           |  "type": "inflation",
           |  "type_i": 9,
           |  "created_at": "${formatter.format(op.createdAt)}",
           |  "transaction_hash": "${op.txnHash}",
           |}
         """.stripMargin

      parse(doc).extract[Transacted[InflationOperation]] mustEqual op
    }.setGen(genTransacted(genInflationOperation.suchThat(_.sourceAccount.nonEmpty)))
  }

} 
Example 54
Source File: BumpSequenceOperationSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.op

import org.json4s.NoTypeHints
import org.json4s.native.JsonMethods.parse
import org.json4s.native.Serialization
import org.scalacheck.Arbitrary
import org.specs2.mutable.Specification
import stellar.sdk.util.ByteArrays
import stellar.sdk.{ArbitraryInput, DomainMatchers}

class BumpSequenceOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets {

  implicit val arb: Arbitrary[Transacted[BumpSequenceOperation]] = Arbitrary(genTransacted(genBumpSequenceOperation))
  implicit val formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer

  "bump sequence operation" should {
    "serde via xdr bytes" >> prop { actual: BumpSequenceOperation =>
      val (remaining, decoded) = Operation.decode.run(actual.encode).value
      decoded mustEqual actual
      remaining must beEmpty
    }

    "serde via xdr string" >> prop { actual: BumpSequenceOperation =>
      Operation.decodeXDR(ByteArrays.base64(actual.encode)) mustEqual actual
    }

    "parse from json" >> prop { op: Transacted[BumpSequenceOperation] =>
      val doc =
        s"""
           | {
           |  "_links": {
           |    "self": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144"},
           |    "transaction": {"href": "https://horizon-testnet.stellar.org/transactions/17a670bc424ff5ce3b386dbfaae9990b66a2a37b4fbe51547e8794962a3f9e6a"},
           |    "effects": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144/effects"},
           |    "succeeds": {"href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659144"},
           |    "precedes": {"href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659144"}
           |  },
           |  "id": "${op.id}",
           |  "paging_token": "10157597659137",
           |  "source_account": "${op.operation.sourceAccount.get.accountId}",
           |  "type": "bump_sequence",
           |  "type_i": 11,
           |  "created_at": "${formatter.format(op.createdAt)}",
           |  "transaction_hash": "${op.txnHash}",
           |  "bump_to": ${op.operation.bumpTo}
           |}
         """.stripMargin

      parse(doc).extract[Transacted[BumpSequenceOperation]] mustEqual op
    }.setGen(genTransacted(genBumpSequenceOperation.suchThat(_.sourceAccount.nonEmpty)))
  }

} 
Example 55
Source File: CreatePassiveSellOfferOperationSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.op

import org.json4s.NoTypeHints
import org.json4s.native.JsonMethods.parse
import org.json4s.native.Serialization
import org.scalacheck.Arbitrary
import org.specs2.mutable.Specification
import stellar.sdk.util.ByteArrays.base64
import stellar.sdk.{ArbitraryInput, DomainMatchers}

class CreatePassiveSellOfferOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets {

  implicit val arb: Arbitrary[Transacted[CreatePassiveSellOfferOperation]] = Arbitrary(genTransacted(genCreatePassiveSellOfferOperation))
  implicit val formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer + OperationDeserializer

  "create passive offer operation" should {
    "serde via xdr string" >> prop { actual: CreatePassiveSellOfferOperation =>
      Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual)
    }

    "serde via xdr bytes" >> prop { actual: CreatePassiveSellOfferOperation =>
      val (remaining, decoded) = Operation.decode.run(actual.encode).value
      decoded mustEqual actual
      remaining must beEmpty
    }

    "parse from json" >> prop { op: Transacted[CreatePassiveSellOfferOperation] =>
      val doc =
        s"""
           |{
           |  "_links": {
           |    "self": {"href": "https://horizon-testnet.stellar.org/operations/10157597659137"},
           |    "transaction": {"href": "https://horizon-testnet.stellar.org/transactions/17a670bc424ff5ce3b386dbfaae9990b66a2a37b4fbe51547e8794962a3f9e6a"},
           |    "effects": {"href": "https://horizon-testnet.stellar.org/operations/10157597659137/effects"},
           |    "succeeds": {"href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659137"},
           |    "precedes": {"href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659137"}
           |  },
           |  "id": "${op.id}",
           |  "paging_token": "10157597659137",
           |  "source_account": "${op.operation.sourceAccount.get.accountId}",
           |  "type": "create_passive_sell_offer",
           |  "type_i": 4,
           |  "created_at": "${formatter.format(op.createdAt)}",
           |  "transaction_hash": "${op.txnHash}",
           |  ${amountDocPortion(op.operation.selling, assetPrefix = "selling_")},
           |  ${asset(op.operation.buying, "buying_")},
           |  "offer_id": 0,
           |  "price": "1.0",
           |  "price_r": {
           |    "d": ${op.operation.price.d},
           |    "n": ${op.operation.price.n}
           |  }
           |}
         """.stripMargin

      parse(doc).extract[Transacted[CreatePassiveSellOfferOperation]] mustEqual op
    }.setGen(genTransacted(genCreatePassiveSellOfferOperation.suchThat(_.sourceAccount.nonEmpty)))
  }

} 
Example 56
Source File: PathPaymentStrictSendOperationSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.op

import org.json4s.{Formats, NoTypeHints}
import org.json4s.native.JsonMethods.parse
import org.json4s.native.Serialization
import org.scalacheck.Arbitrary
import org.specs2.mutable.Specification
import stellar.sdk.util.ByteArrays.base64
import stellar.sdk.{ArbitraryInput, DomainMatchers}

class PathPaymentStrictSendOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets {

  implicit val arb: Arbitrary[Transacted[PathPaymentStrictSendOperation]] = Arbitrary(genTransacted(genPathPaymentStrictSendOperation))
  implicit val formats: Formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer

  "path payment operation" should {
    "serde via xdr string" >> prop { actual: PathPaymentStrictSendOperation =>
      Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual)
    }

    "serde via xdr bytes" >> prop { actual: PathPaymentStrictSendOperation =>
      val (remaining, decoded) = Operation.decode.run(actual.encode).value
      decoded mustEqual actual
      remaining must beEmpty
    }

    "parse from json" >> prop { op: Transacted[PathPaymentStrictSendOperation] =>
      val doc =
        s"""
           |{
           |  "_links":{
           |    "self":{"href":"https://horizon-testnet.stellar.org/operations/940258535411713"},
           |    "transaction":{"href":"https://horizon-testnet.stellar.org/transactions/a995af17837d1b53fb5782269250a36e9dbe74170260b46f2708e5f23f7c864a"},
           |    "effects":{"href":"https://horizon-testnet.stellar.org/operations/940258535411713/effects"},
           |    "succeeds":{"href":"https://horizon-testnet.stellar.org/effects?order=desc&cursor=940258535411713"},
           |    "precedes":{"href":"https://horizon-testnet.stellar.org/effects?order=asc&cursor=940258535411713"}
           |  },
           |  "id": "${op.id}",
           |  "paging_token": "10157597659137",
           |  "source_account": "${op.operation.sourceAccount.get.accountId}",
           |  "type":"path_payment_strict_send",
           |  "type_i":13,
           |  "created_at": "${formatter.format(op.createdAt)}",
           |  "transaction_hash": "${op.txnHash}",
           |  ${amountDocPortion(op.operation.sendAmount, assetPrefix = "source_")}
           |  ${amountDocPortion(op.operation.destinationMin, "destination_min")}
           |  "from":"${op.operation.sourceAccount.get.accountId}",
           |  "to":"${op.operation.destinationAccount.publicKey.accountId}",
           |  "path":[${if (op.operation.path.isEmpty) "" else op.operation.path.map(asset(_)).mkString("{", "},{", "}")}]
           |}
         """.stripMargin

      parse(doc).extract[Transacted[Operation]] mustEqual removeDestinationSubAccountId(op)
    }.setGen(genTransacted(genPathPaymentStrictSendOperation.suchThat(_.sourceAccount.nonEmpty)))
  }

  // Because sub accounts are not yet supported in Horizon JSON.
  private def removeDestinationSubAccountId(op: Transacted[PathPaymentStrictSendOperation]): Transacted[PathPaymentStrictSendOperation] = {
    op.copy(operation = op.operation.copy(destinationAccount = op.operation.destinationAccount.copy(subAccountId = None)))
  }
} 
Example 57
Source File: AllowTrustOperationSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.op

import org.json4s.NoTypeHints
import org.json4s.native.JsonMethods.parse
import org.json4s.native.Serialization
import org.scalacheck.Arbitrary
import org.specs2.mutable.Specification
import stellar.sdk.util.ByteArrays.base64
import stellar.sdk.{ArbitraryInput, DomainMatchers, KeyPair}

class AllowTrustOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets {

  implicit val arb: Arbitrary[Transacted[AllowTrustOperation]] = Arbitrary(genTransacted(genAllowTrustOperation))
  implicit val formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer

  "allow trust operation" should {
    "serde via xdr string" >> prop { actual: AllowTrustOperation =>
      Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual)
    }

    "serde via xdr bytes" >> prop { actual: AllowTrustOperation =>
      val (remaining, decoded) = Operation.decode.run(actual.encode).value
      decoded mustEqual actual
      remaining must beEmpty
    }

    "parse from json" >> prop { op: Transacted[AllowTrustOperation] =>
      val doc =
        s"""
           | {
           |  "_links": {
           |    "self": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144"},
           |    "transaction": {"href": "https://horizon-testnet.stellar.org/transactions/17a670bc424ff5ce3b386dbfaae9990b66a2a37b4fbe51547e8794962a3f9e6a"},
           |    "effects": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144/effects"},
           |    "succeeds": {"href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659144"},
           |    "precedes": {"href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659144"}
           |  },
           |  "id": "${op.id}",
           |  "paging_token": "10157597659137",
           |  "source_account": "${op.operation.sourceAccount.get.accountId}",
           |  "type": "allow_trust",
           |  "type_i": 7,
           |  "created_at": "${formatter.format(op.createdAt)}",
           |  "transaction_hash": "${op.txnHash}",
           |  "asset_type": "${if (op.operation.assetCode.length <= 4) "credit_alphanum4" else "credit_alphanum12"}",
           |  "asset_code": "${op.operation.assetCode}",
           |  "asset_issuer": "${op.operation.sourceAccount.get.accountId}"
           |  "trustor": "${op.operation.trustor.accountId}",
           |  "trustee": "${op.operation.sourceAccount.get.accountId}",
           |  "authorize": ${op.operation.trustLineFlags.contains(TrustLineAuthorized)}
           |  "authorize_to_maintain_liabilities": ${op.operation.trustLineFlags.contains(TrustLineCanMaintainLiabilities)}
           |}
         """.stripMargin

      val parsed = parse(doc).extract[Transacted[AllowTrustOperation]]
      parsed mustEqual op
      parsed.operation.authorize mustEqual op.operation.authorize
      parsed.operation.authorizeToMaintainLiabilities mustEqual op.operation.authorizeToMaintainLiabilities
    }.setGen(genTransacted(genAllowTrustOperation.suchThat(_.sourceAccount.nonEmpty)))
  }

} 
Example 58
Source File: ChangeTrustOperationSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.op

import org.json4s.NoTypeHints
import org.json4s.native.JsonMethods.parse
import org.json4s.native.Serialization
import org.scalacheck.Arbitrary
import org.specs2.mutable.Specification
import stellar.sdk.util.ByteArrays.base64
import stellar.sdk.{ArbitraryInput, DomainMatchers}

class ChangeTrustOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets {

  implicit val arb: Arbitrary[Transacted[ChangeTrustOperation]] = Arbitrary(genTransacted(genChangeTrustOperation))
  implicit val formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer

  "change trust operation" should {
    "serde via xdr string" >> prop { actual: ChangeTrustOperation =>
      Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual)
    }

    "serde via xdr bytes" >> prop { actual: ChangeTrustOperation =>
      val (remaining, decoded) = Operation.decode.run(actual.encode).value
      decoded mustEqual actual
      remaining must beEmpty
    }

    "parse from json" >> prop { op: Transacted[ChangeTrustOperation] =>
      val doc =
        s"""
           | {
           |  "_links": {
           |    "self": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144"},
           |    "transaction": {"href": "https://horizon-testnet.stellar.org/transactions/17a670bc424ff5ce3b386dbfaae9990b66a2a37b4fbe51547e8794962a3f9e6a"},
           |    "effects": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144/effects"},
           |    "succeeds": {"href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659144"},
           |    "precedes": {"href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659144"}
           |  },
           |  "id": "${op.id}",
           |  "paging_token": "10157597659137",
           |  "source_account": "${op.operation.sourceAccount.get.accountId}",
           |  "type": "change_trust",
           |  "type_i": 6,
           |  "created_at": "${formatter.format(op.createdAt)}",
           |  "transaction_hash": "${op.txnHash}",
           |  ${amountDocPortion(op.operation.limit, "limit")},
           |  "trustee": "${op.operation.limit.asset.issuer.accountId}",
           |  "trustor": "${op.operation.sourceAccount.get.accountId}",
           |}
         """.stripMargin

      parse(doc).extract[Transacted[ChangeTrustOperation]] mustEqual op
    }.setGen(genTransacted(genChangeTrustOperation.suchThat(_.sourceAccount.nonEmpty)))
  }

} 
Example 59
Source File: ManageDataOperationSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.op

import org.apache.commons.codec.binary.Base64
import org.json4s.NoTypeHints
import org.json4s.native.JsonMethods.parse
import org.json4s.native.Serialization
import org.scalacheck.{Arbitrary, Gen}
import org.specs2.mutable.Specification
import stellar.sdk.util.ByteArrays.base64
import stellar.sdk.{ArbitraryInput, DomainMatchers, PublicKey}

class ManageDataOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets {

  implicit val arbDelete: Arbitrary[Transacted[DeleteDataOperation]] = Arbitrary(genTransacted(genDeleteDataOperation))
  implicit val arbWrite: Arbitrary[Transacted[WriteDataOperation]] = Arbitrary(genTransacted(genWriteDataOperation))
  implicit val formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer

  def doc[O <: ManageDataOperation](op: Transacted[O]) = {
    val dataValue = op.operation match {
      case WriteDataOperation(_, value, _) => Base64.encodeBase64String(value.toArray)
      case _ => ""
    }

    s"""
      |{
      |  "_links": {
      |    "self": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144"},
      |    "transaction": {"href": "https://horizon-testnet.stellar.org/transactions/17a670bc424ff5ce3b386dbfaae9990b66a2a37b4fbe51547e8794962a3f9e6a"},
      |    "effects": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144/effects"},
      |    "succeeds": {"href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659144"},
      |    "precedes": {"href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659144"}
      |  },
      |  "id": "${op.id}",
      |  "paging_token": "10157597659137",
      |  "source_account": "${op.operation.sourceAccount.get.accountId}",
      |  "type": "manage_data",
      |  "type_i": 1,
      |  "created_at": "${formatter.format(op.createdAt)}",
      |  "transaction_hash": "${op.txnHash}",
      |  "name": "${op.operation.name}",
      |  "value": "$dataValue"
      |}""".stripMargin
  }

  "a write data operation" should {
    "serde via xdr string" >> prop { actual: WriteDataOperation =>
      Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual)
    }

    "serde via xdr bytes" >> prop { actual: WriteDataOperation =>
      val (remaining, decoded) = Operation.decode.run(actual.encode).value
      decoded must beEquivalentTo(actual)
      remaining must beEmpty
    }

    "parse from json" >> prop { op: Transacted[WriteDataOperation] =>
      parse(doc(op)).extract[Transacted[ManageDataOperation]] must beEquivalentTo(op)
    }.setGen(genTransacted(genWriteDataOperation.suchThat(_.sourceAccount.nonEmpty)))

    "encode a string payload as UTF-8 in base64" >> prop { (s: String, source: PublicKey) =>
      val value = new String(s.take(64).getBytes("UTF-8").take(60), "UTF-8")
      WriteDataOperation("name", value).value.toSeq mustEqual value.getBytes("UTF-8").toSeq
      WriteDataOperation("name", value, None).value.toSeq mustEqual value.getBytes("UTF-8").toSeq
      WriteDataOperation("name", value, Some(source)).value.toSeq mustEqual value.getBytes("UTF-8").toSeq
    }.setGen1(Arbitrary.arbString.arbitrary.suchThat(_.nonEmpty))

    "fail if the key is greater than 64 bytes" >> prop { s: String =>
      WriteDataOperation(s, "value") must throwAn[IllegalArgumentException]
    }.setGen(Gen.identifier.suchThat(_.getBytes("UTF-8").length > 64))

    "fail if the value is greater than 64 bytes" >> prop { s: String =>
      WriteDataOperation("name", s) must throwAn[IllegalArgumentException]
    }.setGen(Gen.identifier.suchThat(_.getBytes("UTF-8").length > 64))
  }

  "a delete data operation" should {
    "serde via xdr string" >> prop { actual: DeleteDataOperation =>
      Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual)
    }

    "serde via xdr bytes" >> prop { actual: DeleteDataOperation =>
      val (remaining, decoded) = Operation.decode.run(actual.encode).value
      decoded mustEqual actual
      remaining must beEmpty
    }

    "parse from json" >> prop { op: Transacted[DeleteDataOperation] =>
      parse(doc(op)).extract[Transacted[ManageDataOperation]] mustEqual op
    }.setGen(genTransacted(genDeleteDataOperation.suchThat(_.sourceAccount.nonEmpty)))
  }

}