com.fasterxml.jackson.core.JsonParser Scala Examples

The following examples show how to use com.fasterxml.jackson.core.JsonParser. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: TsStreamingTest.scala    From spark-riak-connector   with Apache License 2.0 7 votes vote down vote up
package com.basho.riak.spark.streaming

import java.nio.ByteBuffer
import java.util.concurrent.{Callable, Executors, TimeUnit}

import com.basho.riak.spark._
import com.basho.riak.spark.rdd.RiakTSTests
import com.basho.riak.spark.rdd.timeseries.{AbstractTimeSeriesTest, TimeSeriesData}
import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper, SerializationFeature}
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import org.apache.spark.sql.Row
import org.junit.Assert._
import org.junit.experimental.categories.Category
import org.junit.{After, Before, Test}

@Category(Array(classOf[RiakTSTests]))
class TsStreamingTest extends AbstractTimeSeriesTest(false) with SparkStreamingFixture {

  protected final val executorService = Executors.newCachedThreadPool()
  private val dataSource = new SocketStreamingDataSource
  private var port = -1

  @Before
  def setUp(): Unit = {
    port = dataSource.start(client => {
      testData
        .map(tolerantMapper.writeValueAsString)
        .foreach(x => client.write(ByteBuffer.wrap(s"$x\n".getBytes)))
      logInfo(s"${testData.length} values were send to client")
    })
  }

  @After
  def tearDown(): Unit = {
    dataSource.stop()
  }

  @Test(timeout = 10 * 1000) // 10 seconds timeout
  def saveToRiak(): Unit = {
    executorService.submit(new Runnable {
      override def run(): Unit = {
        ssc.socketTextStream("localhost", port)
          .map(string => {
            val tsdata = new ObjectMapper()
              .configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, true)
              .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, true)
              .configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true)
              .configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true)
              .configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false)
              .registerModule(DefaultScalaModule)
              .readValue(string, classOf[TimeSeriesData])
            Row(1, "f", tsdata.time, tsdata.user_id, tsdata.temperature_k)
          })
          .saveToRiakTS(bucketName)

        ssc.start()
        ssc.awaitTerminationOrTimeout(5 * 1000)
      }
    })

    val result = executorService.submit(new Callable[Array[Seq[Any]]] {
      override def call(): Array[Seq[Any]] = {
        var rdd = sc.riakTSTable[Row](bucketName)
          .sql(s"SELECT user_id, temperature_k FROM $bucketName $sqlWhereClause")
        var count = rdd.count()
        while (count < testData.length) {
          TimeUnit.SECONDS.sleep(2)

          rdd = sc.riakTSTable[Row](bucketName)
            .sql(s"SELECT user_id, temperature_k FROM $bucketName $sqlWhereClause")
          count = rdd.count()
        }
        rdd.collect().map(_.toSeq)
      }
    }).get()

    assertEquals(testData.length, result.length)
    assertEqualsUsingJSONIgnoreOrder(
      """
        |[
        |   ['bryce',305.37],
        |   ['bryce',300.12],
        |   ['bryce',295.95],
        |   ['ratman',362.121],
        |   ['ratman',3502.212]
        |]
      """.stripMargin, result)
  }
} 
Example 2
Source File: JacksonSupport.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.landoop.streamreactor.connect.hive

import com.fasterxml.jackson.annotation.JsonInclude
import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper}
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper

object JacksonSupport {

  val mapper: ObjectMapper with ScalaObjectMapper = new ObjectMapper with ScalaObjectMapper
  mapper.registerModule(DefaultScalaModule)

  mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL)
  mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
  mapper.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false)
  mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true)
  mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true)
  mapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true)
} 
Example 3
Source File: JSONOptions.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.catalyst.json

import java.util.{Locale, TimeZone}

import com.fasterxml.jackson.core.{JsonFactory, JsonParser}
import org.apache.commons.lang3.time.FastDateFormat

import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.util._


  def setJacksonOptions(factory: JsonFactory): Unit = {
    factory.configure(JsonParser.Feature.ALLOW_COMMENTS, allowComments)
    factory.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, allowUnquotedFieldNames)
    factory.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, allowSingleQuotes)
    factory.configure(JsonParser.Feature.ALLOW_NUMERIC_LEADING_ZEROS, allowNumericLeadingZeros)
    factory.configure(JsonParser.Feature.ALLOW_NON_NUMERIC_NUMBERS, allowNonNumericNumbers)
    factory.configure(JsonParser.Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER,
      allowBackslashEscapingAnyCharacter)
    factory.configure(JsonParser.Feature.ALLOW_UNQUOTED_CONTROL_CHARS, allowUnquotedControlChars)
  }
} 
Example 4
Source File: CreateJacksonParser.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.catalyst.json

import java.io.{ByteArrayInputStream, InputStream, InputStreamReader}

import com.fasterxml.jackson.core.{JsonFactory, JsonParser}
import org.apache.hadoop.io.Text

import org.apache.spark.unsafe.types.UTF8String

private[sql] object CreateJacksonParser extends Serializable {
  def string(jsonFactory: JsonFactory, record: String): JsonParser = {
    jsonFactory.createParser(record)
  }

  def utf8String(jsonFactory: JsonFactory, record: UTF8String): JsonParser = {
    val bb = record.getByteBuffer
    assert(bb.hasArray)

    val bain = new ByteArrayInputStream(
      bb.array(), bb.arrayOffset() + bb.position(), bb.remaining())

    jsonFactory.createParser(new InputStreamReader(bain, "UTF-8"))
  }

  def text(jsonFactory: JsonFactory, record: Text): JsonParser = {
    jsonFactory.createParser(record.getBytes, 0, record.getLength)
  }

  def inputStream(jsonFactory: JsonFactory, record: InputStream): JsonParser = {
    jsonFactory.createParser(record)
  }
} 
Example 5
Source File: FieldClassType.scala    From NSDb   with Apache License 2.0 5 votes vote down vote up
package io.radicalbit.nsdb.common.protocol

import com.fasterxml.jackson.core.{JsonGenerator, JsonParser}
import com.fasterxml.jackson.databind.annotation.{JsonDeserialize, JsonSerialize}
import com.fasterxml.jackson.databind.{DeserializationContext, SerializerProvider}
import com.fasterxml.jackson.databind.deser.std.StdDeserializer
import com.fasterxml.jackson.databind.ser.std.StdSerializer



@JsonSerialize(using = classOf[FieldClassTypeJsonSerializer])
@JsonDeserialize(using = classOf[FieldClassTypeJsonDeserializer])
sealed trait FieldClassType

case object TimestampFieldType extends FieldClassType
case object ValueFieldType     extends FieldClassType
case object DimensionFieldType extends FieldClassType
case object TagFieldType       extends FieldClassType

class FieldClassTypeJsonSerializer extends StdSerializer[FieldClassType](classOf[FieldClassType]) {

  override def serialize(value: FieldClassType, gen: JsonGenerator, provider: SerializerProvider): Unit =
    gen.writeString(value.toString)

}

class FieldClassTypeJsonDeserializer extends StdDeserializer[FieldClassType](classOf[FieldClassType]) {

  override def deserialize(p: JsonParser, ctxt: DeserializationContext): FieldClassType = {
    p.getText match {
      case "TimestampFieldType" => TimestampFieldType
      case "ValueFieldType"     => ValueFieldType
      case "DimensionFieldType" => DimensionFieldType
      case "TagFieldType"       => TagFieldType
    }
  }
} 
Example 6
Source File: SqlStatementSerialization.scala    From NSDb   with Apache License 2.0 5 votes vote down vote up
package io.radicalbit.nsdb.common.statement

import com.fasterxml.jackson.core.{JsonGenerator, JsonParser}
import com.fasterxml.jackson.databind.{DeserializationContext, SerializerProvider}
import com.fasterxml.jackson.databind.deser.std.StdDeserializer
import com.fasterxml.jackson.databind.ser.std.StdSerializer

object SqlStatementSerialization {

  object ComparisonOperatorSerialization {

    class ComparisonOperatorJsonSerializer extends StdSerializer[ComparisonOperator](classOf[ComparisonOperator]) {

      override def serialize(value: ComparisonOperator, gen: JsonGenerator, provider: SerializerProvider): Unit =
        gen.writeString(value.toString)
    }

    class ComparisonOperatorJsonDeserializer extends StdDeserializer[ComparisonOperator](classOf[ComparisonOperator]) {

      override def deserialize(p: JsonParser, ctxt: DeserializationContext): ComparisonOperator = {
        p.getText match {
          case "GreaterThanOperator"      => GreaterThanOperator
          case "GreaterOrEqualToOperator" => GreaterOrEqualToOperator
          case "LessThanOperator"         => LessThanOperator
          case "LessOrEqualToOperator"    => LessOrEqualToOperator
        }
      }
    }

  }

  object AggregationSerialization {

    class AggregationJsonSerializer extends StdSerializer[Aggregation](classOf[Aggregation]) {

      override def serialize(value: Aggregation, gen: JsonGenerator, provider: SerializerProvider): Unit =
        gen.writeString(value.toString)
    }

    class AggregationJsonDeserializer extends StdDeserializer[Aggregation](classOf[Aggregation]) {

      override def deserialize(p: JsonParser, ctxt: DeserializationContext): Aggregation = {
        p.getText match {
          case "CountAggregation" => CountAggregation
          case "MaxAggregation"   => MaxAggregation
          case "MinAggregation"   => MinAggregation
          case "SumAggregation"   => SumAggregation
          case "FirstAggregation" => FirstAggregation
          case "LastAggregation"  => LastAggregation
          case "AvgAggregation"   => AvgAggregation
        }
      }
    }

  }

  object LogicalOperatorSerialization {

    class LogicalOperatorJsonSerializer extends StdSerializer[LogicalOperator](classOf[LogicalOperator]) {

      override def serialize(value: LogicalOperator, gen: JsonGenerator, provider: SerializerProvider): Unit =
        gen.writeString(value.toString)
    }

    class LogicalOperatorJsonDeserializer extends StdDeserializer[LogicalOperator](classOf[LogicalOperator]) {

      override def deserialize(p: JsonParser, ctxt: DeserializationContext): LogicalOperator = {
        p.getText match {
          case "NotOperator" => NotOperator
          case "AndOperator" => AndOperator
          case "OrOperator"  => OrOperator
        }
      }
    }

  }
} 
Example 7
Source File: JacksonSupport.scala    From pulsar4s   with Apache License 2.0 5 votes vote down vote up
package com.sksamuel.pulsar4s.jackson

import com.fasterxml.jackson.annotation.JsonInclude
import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.module.SimpleModule
import com.fasterxml.jackson.databind.ser.std.NumberSerializers
import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper}
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper

object JacksonSupport {

  val mapper: ObjectMapper with ScalaObjectMapper = new ObjectMapper with ScalaObjectMapper
  mapper.registerModule(DefaultScalaModule)

  mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL)
  mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
  mapper.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false)
  mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true)
  mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true)
  mapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true)

  val module = new SimpleModule
  module.addSerializer(new NumberSerializers.DoubleSerializer(classOf[Double]))
} 
Example 8
Source File: JSONOptions.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources.json

import com.fasterxml.jackson.core.{JsonParser, JsonFactory}


  def setJacksonOptions(factory: JsonFactory): Unit = {
    factory.configure(JsonParser.Feature.ALLOW_COMMENTS, allowComments)
    factory.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, allowUnquotedFieldNames)
    factory.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, allowSingleQuotes)
    factory.configure(JsonParser.Feature.ALLOW_NUMERIC_LEADING_ZEROS, allowNumericLeadingZeros)
    factory.configure(JsonParser.Feature.ALLOW_NON_NUMERIC_NUMBERS, allowNonNumericNumbers)
  }
}


object JSONOptions {
  def createFromConfigMap(parameters: Map[String, String]): JSONOptions = JSONOptions(
    samplingRatio =
      parameters.get("samplingRatio").map(_.toDouble).getOrElse(1.0),
    primitivesAsString =
      parameters.get("primitivesAsString").map(_.toBoolean).getOrElse(false),
    allowComments =
      parameters.get("allowComments").map(_.toBoolean).getOrElse(false),
    allowUnquotedFieldNames =
      parameters.get("allowUnquotedFieldNames").map(_.toBoolean).getOrElse(false),
    allowSingleQuotes =
      parameters.get("allowSingleQuotes").map(_.toBoolean).getOrElse(true),
    allowNumericLeadingZeros =
      parameters.get("allowNumericLeadingZeros").map(_.toBoolean).getOrElse(false),
    allowNonNumericNumbers =
      parameters.get("allowNonNumericNumbers").map(_.toBoolean).getOrElse(true)
  )
} 
Example 9
Source File: JavaJsonUtils.scala    From asura   with MIT License 5 votes vote down vote up
package asura.common.util

import java.text.SimpleDateFormat

import com.fasterxml.jackson.annotation.JsonInclude
import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper}

object JavaJsonUtils extends JsonUtils {

  val mapper: ObjectMapper = new ObjectMapper()
  mapper.setDateFormat(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"))
  mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL)
  mapper.configure(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT, true)
  mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
  mapper.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false)
  mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true)
  mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true)
  mapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true)
} 
Example 10
Source File: JsonUtils.scala    From asura   with MIT License 5 votes vote down vote up
package asura.common.util

import java.io.InputStream
import java.text.SimpleDateFormat

import com.fasterxml.jackson.annotation.JsonInclude
import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.core.`type`.TypeReference
import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper}
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper

object JsonUtils extends JsonUtils {

  val mapper: ObjectMapper with ScalaObjectMapper = new ObjectMapper() with ScalaObjectMapper
  mapper.registerModule(DefaultScalaModule)
  mapper.setDateFormat(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"))
  mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL)
  mapper.configure(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT, true)
  mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
  mapper.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false)
  mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true)
  mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true)
  mapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true)

}

trait JsonUtils {
  val mapper: ObjectMapper

  def stringify(obj: AnyRef): String = {
    mapper.writeValueAsString(obj)
  }

  def parse[T <: AnyRef](content: String, c: Class[T]): T = {
    mapper.readValue(content, c)
  }

  def parse[T <: AnyRef](input: InputStream, c: Class[T]): T = {
    mapper.readValue(input, c)
  }

  def parse[T <: AnyRef](content: String, typeReference: TypeReference[T]): T = {
    mapper.readValue(content, typeReference)
  }
} 
Example 11
Source File: TriggerType.scala    From fusion-data   with Apache License 2.0 5 votes vote down vote up
package mass.model.job

import com.fasterxml.jackson.core.{ JsonGenerator, JsonParser }
import com.fasterxml.jackson.databind.annotation.{ JsonDeserialize, JsonSerialize }
import com.fasterxml.jackson.databind.deser.std.StdDeserializer
import com.fasterxml.jackson.databind.ser.std.StdSerializer
import com.fasterxml.jackson.databind.{ DeserializationContext, SerializerProvider }
import helloscala.common.util.{ IEnumTrait, IEnumTraitCompanion }

@JsonSerialize(using = classOf[TriggerType.EnumSer])
@JsonDeserialize(using = classOf[TriggerType.EnumDeser])
sealed abstract class TriggerType extends IEnumTrait[String] {
  override val value: String = name
}

object TriggerType extends IEnumTraitCompanion[String] {
  self =>
  override type Value = TriggerType

  case object SIMPLE extends TriggerType
  case object CRON extends TriggerType
  case object EVENT extends TriggerType

  override val values = Vector(CRON, EVENT, SIMPLE)

  override def optionFromValue(value: String): Option[TriggerType] = super.optionFromValue(value.toUpperCase())

  class EnumSer extends StdSerializer[TriggerType](classOf[TriggerType]) {
    override def serialize(value: TriggerType, gen: JsonGenerator, provider: SerializerProvider): Unit =
      gen.writeString(value.value)
  }
  class EnumDeser extends StdDeserializer[TriggerType](classOf[TriggerType]) {
    override def deserialize(p: JsonParser, ctxt: DeserializationContext): TriggerType =
      TriggerType.fromValue(p.getValueAsString.toUpperCase())
  }
} 
Example 12
Source File: Program.scala    From fusion-data   with Apache License 2.0 5 votes vote down vote up
package mass.model.job

import com.fasterxml.jackson.annotation.JsonValue
import com.fasterxml.jackson.core.{ JsonGenerator, JsonParser }
import com.fasterxml.jackson.databind.{ DeserializationContext, SerializerProvider }
import com.fasterxml.jackson.databind.annotation.{ JsonDeserialize, JsonSerialize }
import com.fasterxml.jackson.databind.deser.std.StdDeserializer
import com.fasterxml.jackson.databind.ser.std.StdSerializer
import helloscala.common.data.StringValueName

@JsonSerialize(using = classOf[Program.EnumSer])
@JsonDeserialize(using = classOf[Program.EnumDeser])
sealed abstract class Program(@JsonValue val value: String, val name: String) {
  def toValueName: StringValueName = StringValueName(value, name)
}

object Program {
  case object SCALA extends Program("scala", "Scala")
  case object JAVA extends Program("java", "Java")
  case object PYTHON extends Program("python", "Python")
  case object SH extends Program("sh", "SH")
  case object SQL extends Program("sql", "SQL")
  case object JS extends Program("js", "Javascript")

  val values = Vector(SCALA, JAVA, PYTHON, SH, SQL, JS)

  def fromValue(value: String): Program =
    optionFromValue(value).getOrElse(
      throw new NoSuchElementException(s"Program.values by name not found, it is $value."))

  def optionFromValue(value: String): Option[Program] = {
    val v = value.toLowerCase()
    values.find(_.value == v)
  }

  def fromName(name: String): Program =
    optionFromName(name).getOrElse(throw new NoSuchElementException(s"Program.values by name not found, it is $name."))

  def optionFromName(name: String): Option[Program] = {
    val n = name.toLowerCase()
    values.find(_.name == n)
  }

  class EnumSer extends StdSerializer[Program](classOf[Program]) {
    override def serialize(value: Program, gen: JsonGenerator, provider: SerializerProvider): Unit =
      gen.writeString(value.value)
  }
  class EnumDeser extends StdDeserializer[Program](classOf[Program]) {
    override def deserialize(p: JsonParser, ctxt: DeserializationContext): Program =
      Program.fromValue(p.getValueAsString)
  }
} 
Example 13
Source File: HttpOrderBook.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.api.http.entities

import com.fasterxml.jackson.core.{JsonGenerator, JsonParser}
import com.fasterxml.jackson.databind.annotation.JsonSerialize
import com.fasterxml.jackson.databind.deser.std.StdDeserializer
import com.fasterxml.jackson.databind.module.SimpleModule
import com.fasterxml.jackson.databind.ser.std.StdSerializer
import com.fasterxml.jackson.databind.{DeserializationContext, JsonNode, ObjectMapper, SerializerProvider}
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper
import com.wavesplatform.dex.domain.asset.Asset.{IssuedAsset, Waves}
import com.wavesplatform.dex.domain.asset.{Asset, AssetPair}
import com.wavesplatform.dex.domain.bytes.ByteStr
import com.wavesplatform.dex.domain.model.Denormalization
import com.wavesplatform.dex.model.LevelAgg

@JsonSerialize(using = classOf[HttpOrderBook.Serializer])
case class HttpOrderBook(timestamp: Long, pair: AssetPair, bids: Seq[LevelAgg], asks: Seq[LevelAgg], assetPairDecimals: Option[(Int, Int)] = None)

object HttpOrderBook {

  private val coreTypeSerializers = new SimpleModule()
  coreTypeSerializers.addDeserializer(classOf[AssetPair], new AssetPairDeserializer)

  private val mapper = new ObjectMapper() with ScalaObjectMapper
  mapper.registerModule(DefaultScalaModule)
  mapper.registerModule(coreTypeSerializers)

  private def serialize(value: Any): String = mapper.writeValueAsString(value)

  private class AssetPairDeserializer extends StdDeserializer[AssetPair](classOf[AssetPair]) {

    override def deserialize(p: JsonParser, ctxt: DeserializationContext): AssetPair = {
      val node = p.getCodec.readTree[JsonNode](p)

      def readAssetId(fieldName: String): Asset = {
        val x = node.get(fieldName).asText(Asset.WavesName)
        if (x == Asset.WavesName) Waves else IssuedAsset(ByteStr.decodeBase58(x).get)
      }

      AssetPair(readAssetId("amountAsset"), readAssetId("priceAsset"))
    }
  }

  private def formatValue(value: BigDecimal, decimals: Int): String = new java.text.DecimalFormat(s"0.${"0" * decimals}").format(value)

  private def denormalizeAndSerializeSide(side: Seq[LevelAgg], amountAssetDecimals: Int, priceAssetDecimals: Int, jg: JsonGenerator): Unit = {
    side.foreach { levelAgg =>
      val denormalizedPrice  = Denormalization.denormalizePrice(levelAgg.price, amountAssetDecimals, priceAssetDecimals)
      val denormalizedAmount = Denormalization.denormalizeAmountAndFee(levelAgg.amount, amountAssetDecimals)

      jg.writeStartArray(2)
      jg.writeString(formatValue(denormalizedPrice, priceAssetDecimals))
      jg.writeString(formatValue(denormalizedAmount, amountAssetDecimals))
      jg.writeEndArray()
    }
  }

  def toJson(x: HttpOrderBook): String = serialize(x)

  class Serializer extends StdSerializer[HttpOrderBook](classOf[HttpOrderBook]) {
    override def serialize(x: HttpOrderBook, j: JsonGenerator, serializerProvider: SerializerProvider): Unit = {
      j.writeStartObject()
      j.writeNumberField("timestamp", x.timestamp)

      x.assetPairDecimals.fold {

        j.writeFieldName("pair")
        j.writeStartObject()
        j.writeStringField("amountAsset", x.pair.amountAssetStr)
        j.writeStringField("priceAsset", x.pair.priceAssetStr)
        j.writeEndObject()

        j.writeArrayFieldStart("bids")
        x.bids.foreach(j.writeObject)
        j.writeEndArray()

        j.writeArrayFieldStart("asks")
        x.asks.foreach(j.writeObject)
        j.writeEndArray()

      } {
        case (amountAssetDecimals, priceAssetDecimals) =>
          j.writeArrayFieldStart("bids")
          denormalizeAndSerializeSide(x.bids, amountAssetDecimals, priceAssetDecimals, j)
          j.writeEndArray()

          j.writeArrayFieldStart("asks")
          denormalizeAndSerializeSide(x.asks, amountAssetDecimals, priceAssetDecimals, j)
          j.writeEndArray()
      }

      j.writeEndObject()
    }
  }
} 
Example 14
Source File: JacksonSupport.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.landoop.streamreactor.hive.it

import com.fasterxml.jackson.annotation.JsonInclude
import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper}
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper

object JacksonSupport {

  val mapper: ObjectMapper with ScalaObjectMapper = new ObjectMapper with ScalaObjectMapper
  mapper.registerModule(DefaultScalaModule)

  mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL)
  mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
  mapper.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false)
  mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true)
  mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true)
  mapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true)
} 
Example 15
Source File: JacksonSupport.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.landoop.streamreactor.connect.hive

import com.fasterxml.jackson.annotation.JsonInclude
import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper}
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper

object JacksonSupport {

  val mapper: ObjectMapper with ScalaObjectMapper = new ObjectMapper with ScalaObjectMapper
  mapper.registerModule(DefaultScalaModule)

  mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL)
  mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
  mapper.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false)
  mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true)
  mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true)
  mapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true)
} 
Example 16
Source File: JacksonSupport.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.landoop.streamreactor.hive.it

import com.fasterxml.jackson.annotation.JsonInclude
import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper}
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper

object JacksonSupport {

  val mapper: ObjectMapper with ScalaObjectMapper = new ObjectMapper with ScalaObjectMapper
  mapper.registerModule(DefaultScalaModule)

  mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL)
  mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
  mapper.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false)
  mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true)
  mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true)
  mapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true)
} 
Example 17
Source File: UpdateApi.scala    From iep-apps   with Apache License 2.0 5 votes vote down vote up
package com.netflix.atlas.aggregator

import javax.inject.Inject
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.model.HttpEntity
import akka.http.scaladsl.model.HttpResponse
import akka.http.scaladsl.model.MediaTypes
import akka.http.scaladsl.model.StatusCode
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Route
import com.fasterxml.jackson.core.JsonParser
import com.netflix.atlas.akka.CustomDirectives._
import com.netflix.atlas.akka.WebApi
import com.netflix.atlas.core.validation.ValidationResult
import com.netflix.atlas.eval.stream.Evaluator
import com.typesafe.scalalogging.StrictLogging

class UpdateApi @Inject()(
  evaluator: Evaluator,
  aggrService: AtlasAggregatorService
) extends WebApi
    with StrictLogging {

  import UpdateApi._

  require(aggrService != null, "no binding for aggregate registry")

  def routes: Route = {
    endpointPath("api" / "v4" / "update") {
      post {
        parseEntity(customJson(p => processPayload(p, aggrService))) { response =>
          complete(response)
        }
      }
    }
  }
}

object UpdateApi {
  private val decoder = PayloadDecoder.default

  private[aggregator] def processPayload(
    parser: JsonParser,
    service: AtlasAggregatorService
  ): HttpResponse = {
    val result = decoder.decode(parser, service)
    createResponse(result.numDatapoints, result.failures)
  }

  private val okResponse = {
    val entity = HttpEntity(MediaTypes.`application/json`, "{}")
    HttpResponse(StatusCodes.OK, entity = entity)
  }

  private def createErrorResponse(status: StatusCode, msg: FailureMessage): HttpResponse = {
    val entity = HttpEntity(MediaTypes.`application/json`, msg.toJson)
    HttpResponse(status, entity = entity)
  }

  private def createResponse(numDatapoints: Int, failures: List[ValidationResult]): HttpResponse = {
    if (failures.isEmpty) {
      okResponse
    } else {
      val numFailures = failures.size
      if (numDatapoints > numFailures) {
        // Partial failure
        val msg = FailureMessage.partial(failures, numFailures)
        createErrorResponse(StatusCodes.Accepted, msg)
      } else {
        // All datapoints dropped
        val msg = FailureMessage.error(failures, numFailures)
        createErrorResponse(StatusCodes.BadRequest, msg)
      }
    }
  }
} 
Example 18
Source File: JacksonCompat.scala    From circe-jackson   with Apache License 2.0 5 votes vote down vote up
package io.circe.jackson

import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.{ DeserializationContext, JsonNode, ObjectMapper, ObjectWriter }
import com.fasterxml.jackson.databind.node.ObjectNode

private[jackson] trait JacksonCompat {
  protected def makeWriter(mapper: ObjectMapper): ObjectWriter = mapper.writerWithDefaultPrettyPrinter[ObjectWriter]()

  protected def handleUnexpectedToken(context: DeserializationContext)(
    klass: Class[_],
    parser: JsonParser
  ): Unit =
    throw context.mappingException(klass)

  protected def objectNodeSetAll(node: ObjectNode, fields: java.util.Map[String, JsonNode]): JsonNode =
    node.setAll(fields)
} 
Example 19
Source File: JacksonCompat.scala    From circe-jackson   with Apache License 2.0 5 votes vote down vote up
package io.circe.jackson

import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.{ DeserializationContext, JsonNode, ObjectMapper, ObjectWriter }
import com.fasterxml.jackson.databind.node.ObjectNode

private[jackson] trait JacksonCompat {
  protected def makeWriter(mapper: ObjectMapper): ObjectWriter = mapper.writerWithDefaultPrettyPrinter()

  protected def handleUnexpectedToken(context: DeserializationContext)(
    klass: Class[_],
    parser: JsonParser
  ): Unit =
    context.handleUnexpectedToken(klass, parser)

  protected def objectNodeSetAll(node: ObjectNode, fields: java.util.Map[String, JsonNode]): JsonNode =
    node.setAll(fields)
} 
Example 20
Source File: JacksonCompat.scala    From circe-jackson   with Apache License 2.0 5 votes vote down vote up
package io.circe.jackson

import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.{ DeserializationContext, JsonNode, ObjectMapper, ObjectWriter }
import com.fasterxml.jackson.databind.node.ObjectNode

private[jackson] trait JacksonCompat {
  protected def makeWriter(mapper: ObjectMapper): ObjectWriter = mapper.writerWithDefaultPrettyPrinter()

  protected def handleUnexpectedToken(context: DeserializationContext)(
    klass: Class[_],
    parser: JsonParser
  ): Unit =
    throw context.mappingException(klass)

  protected def objectNodeSetAll(node: ObjectNode, fields: java.util.Map[String, JsonNode]): JsonNode =
    node.setAll(fields)
} 
Example 21
Source File: JacksonCompat.scala    From circe-jackson   with Apache License 2.0 5 votes vote down vote up
package io.circe.jackson

import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.{ DeserializationContext, JsonNode, ObjectMapper, ObjectWriter }
import com.fasterxml.jackson.databind.node.ObjectNode

private[jackson] trait JacksonCompat {
  protected def makeWriter(mapper: ObjectMapper): ObjectWriter = mapper.writerWithDefaultPrettyPrinter()

  protected def handleUnexpectedToken(context: DeserializationContext)(
    klass: Class[_],
    parser: JsonParser
  ): Unit =
    context.handleUnexpectedToken(klass, parser)

  protected def objectNodeSetAll(node: ObjectNode, fields: java.util.Map[String, JsonNode]): JsonNode =
    node.setAll[JsonNode](fields)
} 
Example 22
Source File: MListDeserializer.scala    From sope   with Apache License 2.0 5 votes vote down vote up
package com.sope.etl.transform.model

import com.fasterxml.jackson.core.{JsonParser, JsonToken}
import com.fasterxml.jackson.databind.DeserializationContext
import com.fasterxml.jackson.databind.deser.std.StdDeserializer
import com.sope.etl.annotations.SqlExpr
import com.sope.etl.transform.model.action.TransformActionRoot
import com.sope.etl.transform.model.io.input.SourceTypeRoot
import com.sope.etl.transform.model.io.output.TargetTypeRoot
import com.sope.etl.utils.SQLChecker.checkSQL
import com.sope.utils.Logging

import scala.collection.mutable
import scala.reflect.ClassTag
import scala.reflect.runtime.universe._


    if (p.getCurrentToken != JsonToken.START_ARRAY) {
      val location = p.getCurrentLocation
      log.error(s"Invalid list definition for ${p.getCurrentName} tag")
      failures += Failed("Invalid yaml list definition", location.getLineNr, location.getColumnNr)
      return MList(data, failures)
    }

    while (p.nextToken() != JsonToken.END_ARRAY) {
      if (p.getCurrentToken == JsonToken.START_OBJECT && Option(p.getCurrentName).isEmpty) {
        val location = p.getCurrentLocation
        try {
          val validElem = p.readValueAs[T](clz)
          // Check if the element has any SQL expression/ SQL to be validated
          val clazz = mirror.staticClass(validElem.getClass.getCanonicalName)
          val objMirror = mirror.reflect(validElem)
          clazz.selfType.members.collect {
            case m: MethodSymbol if m.isCaseAccessor && m.annotations.exists(_.tree.tpe =:= typeOf[SqlExpr]) =>
              val expr = objMirror.reflectField(m).get
              if (m.name.toString.trim == "sql") (expr, true) else (expr, false)
          }.foreach { case (expr, isSql) => checkSQL(expr, isSql) }
          log.trace(s"Successfully Parsed element of type $clz :- $validElem")
          data += validElem
        }
        catch {
          case e: Exception =>
            log.error(s"Parsing failed with message ${e.getMessage} at ${location.getLineNr}:${location.getColumnNr}")
            failures += Failed(e.getMessage, location.getLineNr, location.getColumnNr)
        }
      } else {
        // Cases where the next token might be an internal object/array as result of failure on the root object.
        // These are skipped and token is moved to next object at root.
        if ((p.getCurrentToken == JsonToken.START_OBJECT || p.getCurrentToken == JsonToken.START_ARRAY)
          && Option(p.getCurrentName).isDefined) {
          log.debug("Skipping Current Token: " + p.getCurrentToken + " with Name: " + p.getCurrentName)
          p.skipChildren()
        }
      }
    }
    MList(data, failures)
  }

  override def getNullValue: MList[T] = MList[T](Nil)
}

object MListDeserializer {

  class TransformationDeserializer extends MListDeserializer(classOf[Transformation])

  class ActionDeserializer extends MListDeserializer(classOf[TransformActionRoot])

  class InputDeserializer extends MListDeserializer(classOf[SourceTypeRoot])

  class TargetDeserializer extends MListDeserializer(classOf[TargetTypeRoot])

} 
Example 23
Source File: Format.scala    From comet-data-pipeline   with Apache License 2.0 5 votes vote down vote up
package com.ebiznext.comet.schema.model

import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.annotation.{JsonDeserialize, JsonSerialize}
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer
import com.fasterxml.jackson.databind.{DeserializationContext, JsonDeserializer}


@JsonSerialize(using = classOf[ToStringSerializer])
@JsonDeserialize(using = classOf[FormatDeserializer])
sealed case class Format(value: String) {
  override def toString: String = value
}

object Format {

  def fromString(value: String): Format = {
    value.toUpperCase match {
      case "DSV"                 => Format.DSV
      case "POSITION"            => Format.POSITION
      case "SIMPLE_JSON"         => Format.SIMPLE_JSON
      case "JSON" | "ARRAY_JSON" => Format.JSON
      case "CHEW"                => Format.CHEW
    }
  }

  object DSV extends Format("DSV")

  object POSITION extends Format("POSITION")

  object SIMPLE_JSON extends Format("SIMPLE_JSON")

  object JSON extends Format("JSON")

  object CHEW extends Format("CHEW")

  val formats: Set[Format] = Set(DSV, POSITION, SIMPLE_JSON, JSON, CHEW)
}

class FormatDeserializer extends JsonDeserializer[Format] {

  override def deserialize(jp: JsonParser, ctx: DeserializationContext): Format = {
    val value = jp.readValueAs[String](classOf[String])
    Format.fromString(value)
  }
} 
Example 24
Source File: JacksonSupport.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.util

import com.fasterxml.jackson.annotation.JsonInclude
import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper}
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper

object JacksonSupport {

  val mapper: ObjectMapper with ScalaObjectMapper = new ObjectMapper with ScalaObjectMapper
  mapper.registerModule(DefaultScalaModule)

  mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL)
  mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
  mapper.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false)
  mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true)
  mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true)
  mapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true)
} 
Example 25
Source File: RelativePathSupport.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator.analyze

import java.io.IOException
import java.nio.file.{Path, Paths}

import com.fasterxml.jackson.core.{JsonGenerator, JsonParser, JsonToken}
import com.fasterxml.jackson.databind._
import com.fasterxml.jackson.databind.module.SimpleModule

class RelativePathSupportingModule extends SimpleModule {
  addDeserializer(classOf[Path], new RelativePathSupportingDeserializer)
  addSerializer(classOf[Path], new RelativePathSupportingSerializer)
}

class RelativePathSupportingSerializer extends JsonSerializer[Path] {
  @throws[IOException]
  def serialize(value: Path, gen: JsonGenerator, serializers: SerializerProvider): Unit =
    value match {
      case null => gen.writeNull()
      case _ => gen.writeString(value.toString)
    }
}

class RelativePathSupportingDeserializer extends JsonDeserializer[Path] {
  @throws[IOException]
  def deserialize(p: JsonParser, ctxt: DeserializationContext): Path =
    p.getCurrentToken match {
      case JsonToken.VALUE_NULL => null
      case JsonToken.VALUE_STRING => Paths.get(p.readValueAs(classOf[String]))
      case _ => throw ctxt.wrongTokenException(p, JsonToken.VALUE_STRING, "The value of a java.nio.file.Path must be a string")
    }
} 
Example 26
Source File: SourceModuleSupport.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator.analyze

import java.io.IOException

import com.fasterxml.jackson.core.{JsonGenerator, JsonParser, JsonToken}
import com.fasterxml.jackson.databind._
import com.fasterxml.jackson.databind.module.SimpleModule
import com.wix.bazel.migrator.model.SourceModule

class SourceModuleSupportingModule(modules: Set[SourceModule]) extends SimpleModule {
  addDeserializer(classOf[SourceModule], new SourceModuleSupportingDeserializer(modules))
  addSerializer(classOf[SourceModule], new SourceModuleSupportingSerializer)
}

class SourceModuleSupportingSerializer extends JsonSerializer[SourceModule] {
  @throws[IOException]
  def serialize(value: SourceModule, gen: JsonGenerator, serializers: SerializerProvider): Unit =
    value match {
      case null => gen.writeNull()
      case _ => gen.writeString(value.relativePathFromMonoRepoRoot)
    }
}

class SourceModuleSupportingDeserializer(modules: Set[SourceModule]) extends JsonDeserializer[SourceModule] {
  @throws[IOException]
  def deserialize(p: JsonParser, ctxt: DeserializationContext): SourceModule =
    p.getCurrentToken match {
      case JsonToken.VALUE_NULL => null
      case JsonToken.VALUE_STRING => {
        val relativePath = p.readValueAs(classOf[String])
        modules.find(_.relativePathFromMonoRepoRoot == relativePath)
          .getOrElse(throw ctxt.weirdStringException(relativePath, classOf[SourceModule], s"could not find module with relative path for $relativePath"))
      }
      case token => throw ctxt.wrongTokenException(p, JsonToken.VALUE_STRING, s"The value of a module must be a string and currently is $token")
    }
} 
Example 27
Source File: CreateJacksonParser.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.catalyst.json

import java.io.{ByteArrayInputStream, InputStream, InputStreamReader}
import java.nio.channels.Channels
import java.nio.charset.Charset

import com.fasterxml.jackson.core.{JsonFactory, JsonParser}
import org.apache.hadoop.io.Text
import sun.nio.cs.StreamDecoder

import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.unsafe.types.UTF8String

private[sql] object CreateJacksonParser extends Serializable {
  def string(jsonFactory: JsonFactory, record: String): JsonParser = {
    jsonFactory.createParser(record)
  }

  def utf8String(jsonFactory: JsonFactory, record: UTF8String): JsonParser = {
    val bb = record.getByteBuffer
    assert(bb.hasArray)

    val bain = new ByteArrayInputStream(
      bb.array(), bb.arrayOffset() + bb.position(), bb.remaining())

    jsonFactory.createParser(new InputStreamReader(bain, "UTF-8"))
  }

  def text(jsonFactory: JsonFactory, record: Text): JsonParser = {
    jsonFactory.createParser(record.getBytes, 0, record.getLength)
  }

  // Jackson parsers can be ranked according to their performance:
  // 1. Array based with actual encoding UTF-8 in the array. This is the fastest parser
  //    but it doesn't allow to set encoding explicitly. Actual encoding is detected automatically
  //    by checking leading bytes of the array.
  // 2. InputStream based with actual encoding UTF-8 in the stream. Encoding is detected
  //    automatically by analyzing first bytes of the input stream.
  // 3. Reader based parser. This is the slowest parser used here but it allows to create
  //    a reader with specific encoding.
  // The method creates a reader for an array with given encoding and sets size of internal
  // decoding buffer according to size of input array.
  private def getStreamDecoder(enc: String, in: Array[Byte], length: Int): StreamDecoder = {
    val bais = new ByteArrayInputStream(in, 0, length)
    val byteChannel = Channels.newChannel(bais)
    val decodingBufferSize = Math.min(length, 8192)
    val decoder = Charset.forName(enc).newDecoder()

    StreamDecoder.forDecoder(byteChannel, decoder, decodingBufferSize)
  }

  def text(enc: String, jsonFactory: JsonFactory, record: Text): JsonParser = {
    val sd = getStreamDecoder(enc, record.getBytes, record.getLength)
    jsonFactory.createParser(sd)
  }

  def inputStream(jsonFactory: JsonFactory, is: InputStream): JsonParser = {
    jsonFactory.createParser(is)
  }

  def inputStream(enc: String, jsonFactory: JsonFactory, is: InputStream): JsonParser = {
    jsonFactory.createParser(new InputStreamReader(is, enc))
  }

  def internalRow(jsonFactory: JsonFactory, row: InternalRow): JsonParser = {
    val ba = row.getBinary(0)

    jsonFactory.createParser(ba, 0, ba.length)
  }

  def internalRow(enc: String, jsonFactory: JsonFactory, row: InternalRow): JsonParser = {
    val binary = row.getBinary(0)
    val sd = getStreamDecoder(enc, binary, binary.length)

    jsonFactory.createParser(sd)
  }
} 
Example 28
Source File: InstantModule.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.dataformats

import java.time.format.{DateTimeFormatter, DateTimeParseException}
import java.time.temporal.{TemporalAccessor, TemporalQuery}
import java.time.{Instant, LocalDateTime, ZoneOffset}

import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.module.SimpleModule
import com.fasterxml.jackson.databind.{DeserializationContext, JsonDeserializer}



class InstantModule extends SimpleModule {
  this.addDeserializer[Instant](classOf[Instant], new MilanInstantDeserializer)
}


class MilanInstantDeserializer extends JsonDeserializer[Instant] {
  private val formatsToTry = List(
    DateTimeFormatter.ISO_INSTANT,
    DateTimeFormatter.ISO_DATE_TIME,
    DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"),
    DateTimeFormatter.ISO_DATE)

  override def deserialize(parser: JsonParser, context: DeserializationContext): Instant = {
    val textValue = parser.getText
    this.parseInstant(textValue)
  }

  private val createInstant = new TemporalQuery[Instant] {
    override def queryFrom(temporal: TemporalAccessor): Instant = LocalDateTime.from(temporal).toInstant(ZoneOffset.UTC)
  }


  private def parseInstant(dateTimeString: String): Instant = {
    // Try a bunch of formats.
    // TODO: This is awful but will do for now.
    formatsToTry.map(formatter => this.tryParseFormat(dateTimeString, formatter))
      .filter(_.isDefined)
      .map(_.get)
      .headOption match {
      case Some(instant) =>
        instant

      case None =>
        throw new DateTimeParseException(s"Unable to parse datetime string '$dateTimeString'.", dateTimeString, 0)
    }
  }

  private def tryParseFormat(dateTimeString: String,
                             formatter: DateTimeFormatter): Option[Instant] = {
    try {
      Some(formatter.parse(dateTimeString, this.createInstant))
    }
    catch {
      case _: DateTimeParseException =>
        None
    }
  }
} 
Example 29
Source File: Kind.scala    From incubator-livy   with Apache License 2.0 5 votes vote down vote up
package org.apache.livy.sessions

import com.fasterxml.jackson.core.{JsonGenerator, JsonParser, JsonToken}
import com.fasterxml.jackson.databind._
import com.fasterxml.jackson.databind.module.SimpleModule

sealed abstract class Kind(val name: String) {
  override def toString: String = name
}

object Spark extends Kind("spark")

object PySpark extends Kind("pyspark")

object SparkR extends Kind("sparkr")

object Shared extends Kind("shared")

object SQL extends Kind("sql")

object Kind {

  def apply(kind: String): Kind = kind match {
    case "spark" | "scala" => Spark
    case "pyspark" | "python" => PySpark
    case "sparkr" | "r" => SparkR
    case "shared" => Shared
    case "sql" => SQL
    case other => throw new IllegalArgumentException(s"Invalid kind: $other")
  }
}

class SessionKindModule extends SimpleModule("SessionKind") {

  addSerializer(classOf[Kind], new JsonSerializer[Kind]() {
    override def serialize(value: Kind, jgen: JsonGenerator, provider: SerializerProvider): Unit = {
      jgen.writeString(value.toString)
    }
  })

  addDeserializer(classOf[Kind], new JsonDeserializer[Kind]() {
    override def deserialize(jp: JsonParser, ctxt: DeserializationContext): Kind = {
      require(jp.getCurrentToken() == JsonToken.VALUE_STRING, "Kind should be a string.")
      Kind(jp.getText())
    }
  })

} 
Example 30
Source File: JsonConverter.scala    From scala-serialization   with MIT License 5 votes vote down vote up
package com.komanov.serialization.converters

import java.time.Instant

import com.fasterxml.jackson.core.{JsonGenerator, JsonParser, Version}
import com.fasterxml.jackson.databind.Module.SetupContext
import com.fasterxml.jackson.databind._
import com.fasterxml.jackson.databind.module.{SimpleDeserializers, SimpleSerializers}
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.komanov.serialization.domain.{Site, SiteEvent, SiteEventData}


object JsonConverter extends MyConverter {

  private object InstantModule extends Module {
    override def getModuleName: String = "Instant"

    override def setupModule(context: SetupContext): Unit = {
      val serializers = new SimpleSerializers
      serializers.addSerializer(classOf[Instant], new JsonSerializer[Instant] {
        override def serialize(value: Instant, gen: JsonGenerator, serializers: SerializerProvider): Unit = {
          gen.writeNumber(value.toEpochMilli)
        }
      })

      val deserializers = new SimpleDeserializers
      deserializers.addDeserializer(classOf[Instant], new JsonDeserializer[Instant] {
        override def deserialize(p: JsonParser, ctxt: DeserializationContext): Instant = {
          Instant.ofEpochMilli(p.getLongValue)
        }
      })

      context.addSerializers(serializers)
      context.addDeserializers(deserializers)
    }

    override def version(): Version = new Version(1, 0, 0, "RELEASE", "group", "artifact")
  }

  private val objectMapper = {
    val om = new ObjectMapper()
    om.registerModule(new DefaultScalaModule)
    om.registerModule(InstantModule)
    om
  }
  private val siteReader: ObjectReader = objectMapper.readerFor(classOf[Site])
  private val siteWriter: ObjectWriter = objectMapper.writerFor(classOf[Site])

  override def toByteArray(site: Site): Array[Byte] = {
    siteWriter.writeValueAsBytes(site)
  }

  override def fromByteArray(bytes: Array[Byte]): Site = {
    siteReader.readValue(bytes)
  }

  override def toByteArray(event: SiteEvent): Array[Byte] = {
    objectMapper.writeValueAsBytes(event)
  }

  override def siteEventFromByteArray(clazz: Class[_], bytes: Array[Byte]): SiteEvent = {
    objectMapper.readValue(bytes, clazz).asInstanceOf[SiteEvent]
  }

} 
Example 31
Source File: JacksonTokenIterator.scala    From tethys   with Apache License 2.0 5 votes vote down vote up
package tethys.jackson

import com.fasterxml.jackson.core.{JsonParser, JsonTokenId}
import tethys.commons.Token
import tethys.commons.Token._
import tethys.readers.tokens.{BaseTokenIterator, TokenIterator}

import scala.annotation.switch

final class JacksonTokenIterator(jsonParser: JsonParser) extends BaseTokenIterator {
  private[this] var token: Token = fromId(jsonParser.currentTokenId())
  override def currentToken(): Token = token

  override def nextToken(): Token = {
    val t = jsonParser.nextToken()
    token = {
      if(t == null) Token.Empty
      else fromId(t.id())
    }
    token
  }

  override def fieldName(): String = jsonParser.getCurrentName

  override def string(): String = jsonParser.getValueAsString()

  override def number(): Number = jsonParser.getNumberValue

  override def short(): Short = jsonParser.getShortValue

  override def int(): Int = jsonParser.getIntValue

  override def long(): Long = jsonParser.getLongValue

  override def float(): Float = jsonParser.getFloatValue

  override def double(): Double = jsonParser.getDoubleValue

  override def boolean(): Boolean = jsonParser.getBooleanValue

  private def fromId(tokenId: Int): Token = (tokenId: @switch) match {
    case JsonTokenId.ID_START_OBJECT => ObjectStartToken
    case JsonTokenId.ID_END_OBJECT => ObjectEndToken
    case JsonTokenId.ID_START_ARRAY => ArrayStartToken
    case JsonTokenId.ID_END_ARRAY => ArrayEndToken
    case JsonTokenId.ID_FIELD_NAME => FieldNameToken
    case JsonTokenId.ID_STRING => StringValueToken
    case JsonTokenId.ID_NUMBER_INT => NumberValueToken
    case JsonTokenId.ID_NUMBER_FLOAT => NumberValueToken
    case JsonTokenId.ID_TRUE => BooleanValueToken
    case JsonTokenId.ID_FALSE => BooleanValueToken
    case JsonTokenId.ID_NULL => NullValueToken
    case _ => Token.Empty
  }
}

object JacksonTokenIterator {
  def fromFreshParser(parser: JsonParser): TokenIterator = {
    parser.nextToken()// move parser to first token
    new JacksonTokenIterator(parser)
  }
} 
Example 32
Source File: Stage.scala    From comet-data-pipeline   with Apache License 2.0 5 votes vote down vote up
package com.ebiznext.comet.schema.model

import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.annotation.{JsonDeserialize, JsonSerialize}
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer
import com.fasterxml.jackson.databind.{DeserializationContext, JsonDeserializer}


@JsonSerialize(using = classOf[ToStringSerializer])
@JsonDeserialize(using = classOf[StageDeserializer])
sealed case class Stage(value: String) {
  override def toString: String = value
}

object Stage {

  def fromString(value: String): Stage = {
    value.toUpperCase() match {
      case "UNIT"   => Stage.UNIT
      case "GLOBAL" => Stage.GLOBAL
    }
  }

  object UNIT extends Stage("UNIT")

  object GLOBAL extends Stage("GLOBAL")

  val stages: Set[Stage] = Set(UNIT, GLOBAL)
}

class StageDeserializer extends JsonDeserializer[Stage] {

  override def deserialize(jp: JsonParser, ctx: DeserializationContext): Stage = {
    val value = jp.readValueAs[String](classOf[String])
    Stage.fromString(value)
  }
} 
Example 33
Source File: IndexSink.scala    From comet-data-pipeline   with Apache License 2.0 5 votes vote down vote up
package com.ebiznext.comet.schema.model

import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.annotation.{JsonDeserialize, JsonSerialize}
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer
import com.fasterxml.jackson.databind.{DeserializationContext, JsonDeserializer}


@JsonSerialize(using = classOf[ToStringSerializer])
@JsonDeserialize(using = classOf[IndexSinkDeserializer])
sealed case class IndexSink(value: String) {
  override def toString: String = value
}

object IndexSink {

  def fromString(value: String): IndexSink = {
    value.toUpperCase match {
      case "NONE" => IndexSink.None
      case "FS"   => IndexSink.FS
      case "JDBC" => IndexSink.JDBC
      case "BQ"   => IndexSink.BQ
      case "ES"   => IndexSink.ES
    }
  }

  object None extends IndexSink("None")

  object FS extends IndexSink("FS")

  object BQ extends IndexSink("BQ")

  object ES extends IndexSink("ES")

  object JDBC extends IndexSink("JDBC")

  val sinks: Set[IndexSink] = Set(None, FS, BQ, ES, JDBC)
}

class IndexSinkDeserializer extends JsonDeserializer[IndexSink] {

  override def deserialize(jp: JsonParser, ctx: DeserializationContext): IndexSink = {
    val value = jp.readValueAs[String](classOf[String])
    IndexSink.fromString(value)
  }
} 
Example 34
Source File: Mode.scala    From comet-data-pipeline   with Apache License 2.0 5 votes vote down vote up
package com.ebiznext.comet.schema.model

import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.annotation.{JsonDeserialize, JsonSerialize}
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer
import com.fasterxml.jackson.databind.{DeserializationContext, JsonDeserializer}


@JsonSerialize(using = classOf[ToStringSerializer])
@JsonDeserialize(using = classOf[ModeDeserializer])
sealed case class Mode(value: String) {
  override def toString: String = value
}

object Mode {

  def fromString(value: String): Mode = {
    value.toUpperCase() match {
      case "FILE"            => Mode.FILE
      case "STREAM"          => Mode.STREAM
      case "FILE_AND_STREAM" => Mode.FILE_AND_STREAM
    }
  }

  object FILE extends Mode("FILE")

  object STREAM extends Mode("STREAM")

  object FILE_AND_STREAM extends Mode("FILE_AND_STREAM")

  val modes: Set[Mode] = Set(FILE, STREAM, FILE_AND_STREAM)
}

class ModeDeserializer extends JsonDeserializer[Mode] {

  override def deserialize(jp: JsonParser, ctx: DeserializationContext): Mode = {
    val value = jp.readValueAs[String](classOf[String])
    Mode.fromString(value)
  }
} 
Example 35
Source File: MetricType.scala    From comet-data-pipeline   with Apache License 2.0 5 votes vote down vote up
package com.ebiznext.comet.schema.model

import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.annotation.{JsonDeserialize, JsonSerialize}
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer
import com.fasterxml.jackson.databind.{DeserializationContext, JsonDeserializer}


@JsonSerialize(using = classOf[ToStringSerializer])
@JsonDeserialize(using = classOf[MetricTypeDeserializer])
sealed case class MetricType(value: String) {
  override def toString: String = value
}

object MetricType {

  def fromString(value: String): MetricType = {
    value.toUpperCase() match {
      case "DISCRETE"   => MetricType.DISCRETE
      case "CONTINUOUS" => MetricType.CONTINUOUS
      case "TEXT"       => MetricType.TEXT
      case "NONE"       => MetricType.NONE
    }
  }

  object DISCRETE extends MetricType("DISCRETE")

  object CONTINUOUS extends MetricType("CONTINUOUS")

  object TEXT extends MetricType("TEXT")

  object NONE extends MetricType("NONE")

  val metricTypes: Set[MetricType] = Set(NONE, DISCRETE, CONTINUOUS, TEXT)
}

class MetricTypeDeserializer extends JsonDeserializer[MetricType] {

  override def deserialize(jp: JsonParser, ctx: DeserializationContext): MetricType = {
    val value = jp.readValueAs[String](classOf[String])
    MetricType.fromString(value)
  }
} 
Example 36
Source File: Partition.scala    From comet-data-pipeline   with Apache License 2.0 5 votes vote down vote up
package com.ebiznext.comet.schema.model

import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.annotation.JsonDeserialize
import com.fasterxml.jackson.databind.node.ArrayNode
import com.fasterxml.jackson.databind.{DeserializationContext, JsonDeserializer, JsonNode}


@JsonDeserialize(using = classOf[PartitionDeserializer])
case class Partition(
  sampling: Option[Double],
  attributes: Option[List[String]]
) {
  def getAttributes(): List[String] = attributes.getOrElse(Nil)

  def getSampling() = sampling.getOrElse(0.0)

}

class PartitionDeserializer extends JsonDeserializer[Partition] {

  override def deserialize(jp: JsonParser, ctx: DeserializationContext): Partition = {
    val node: JsonNode = jp.getCodec().readTree[JsonNode](jp)
    deserialize(node)
  }

  def deserialize(node: JsonNode): Partition = {
    def isNull(field: String): Boolean =
      node.get(field) == null || node.get(field).isNull

    val sampling =
      if (isNull("sampling")) 0.0
      else
        node.get("sampling").asDouble()

    import scala.collection.JavaConverters._
    val attributes =
      if (isNull("attributes")) None
      else
        Some(
          node
            .get("attributes")
            .asInstanceOf[ArrayNode]
            .elements
            .asScala
            .toList
            .map(_.asText())
        )
    Partition(Some(sampling), attributes)
  }
} 
Example 37
Source File: UserType.scala    From comet-data-pipeline   with Apache License 2.0 5 votes vote down vote up
package com.ebiznext.comet.schema.model

import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.annotation.{JsonDeserialize, JsonSerialize}
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer
import com.fasterxml.jackson.databind.{DeserializationContext, JsonDeserializer}


@JsonSerialize(using = classOf[ToStringSerializer])
@JsonDeserialize(using = classOf[UserTypeDeserializer])
sealed case class UserType(value: String) {
  override def toString: String = value
}

object UserType {

  def fromString(value: String): UserType = {
    value.toUpperCase match {
      case "SA"    => UserType.SA
      case "USER"  => UserType.USER
      case "GROUP" => UserType.GROUP
    }
  }

  object SA extends UserType("SA")

  object USER extends UserType("USER")

  object GROUP extends UserType("GROUP")

  val formats: Set[UserType] = Set(SA, USER, GROUP)
}

class UserTypeDeserializer extends JsonDeserializer[UserType] {

  override def deserialize(jp: JsonParser, ctx: DeserializationContext): UserType = {
    val value = jp.readValueAs[String](classOf[String])
    UserType.fromString(value)
  }
} 
Example 38
Source File: Trim.scala    From comet-data-pipeline   with Apache License 2.0 5 votes vote down vote up
package com.ebiznext.comet.schema.model

import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.annotation.{JsonDeserialize, JsonSerialize}
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer
import com.fasterxml.jackson.databind.{DeserializationContext, JsonDeserializer}


@JsonSerialize(using = classOf[ToStringSerializer])
@JsonDeserialize(using = classOf[TrimDeserializer])
sealed case class Trim(value: String) {
  override def toString: String = value
}

object Trim {

  def fromString(value: String): Trim = {
    value.toUpperCase() match {
      case "LEFT"  => Trim.LEFT
      case "RIGHT" => Trim.RIGHT
      case "BOTH"  => Trim.BOTH
      case "NONE"  => Trim.NONE
    }
  }

  object LEFT extends Trim("LEFT")

  object RIGHT extends Trim("RIGHT")

  object BOTH extends Trim("BOTH")

  object NONE extends Trim("NONE")

  val modes: Set[Trim] = Set(LEFT, RIGHT, BOTH, NONE)
}

class TrimDeserializer extends JsonDeserializer[Trim] {

  override def deserialize(jp: JsonParser, ctx: DeserializationContext): Trim = {
    val value = jp.readValueAs[String](classOf[String])
    Trim.fromString(value)
  }
} 
Example 39
Source File: WriteMode.scala    From comet-data-pipeline   with Apache License 2.0 5 votes vote down vote up
package com.ebiznext.comet.schema.model

import com.ebiznext.comet.schema.model.WriteMode.{APPEND, ERROR_IF_EXISTS, IGNORE, OVERWRITE}
import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.annotation.{JsonDeserialize, JsonSerialize}
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer
import com.fasterxml.jackson.databind.{DeserializationContext, JsonDeserializer}
import org.apache.spark.sql.SaveMode


@JsonSerialize(using = classOf[ToStringSerializer])
@JsonDeserialize(using = classOf[WriteDeserializer])
sealed case class WriteMode(value: String) {
  override def toString: String = value

  def toSaveMode: SaveMode = {
    this match {
      case OVERWRITE       => SaveMode.Overwrite
      case APPEND          => SaveMode.Append
      case ERROR_IF_EXISTS => SaveMode.ErrorIfExists
      case IGNORE          => SaveMode.Ignore
      case _ =>
        throw new Exception("Should never happen")
    }
  }
}

object WriteMode {

  def fromString(value: String): WriteMode = {
    value.toUpperCase() match {
      case "OVERWRITE"       => WriteMode.OVERWRITE
      case "APPEND"          => WriteMode.APPEND
      case "ERROR_IF_EXISTS" => WriteMode.ERROR_IF_EXISTS
      case "IGNORE"          => WriteMode.IGNORE
      case _ =>
        throw new Exception(s"Invalid Write Mode try one of ${writes}")
    }
  }

  object OVERWRITE extends WriteMode("OVERWRITE")

  object APPEND extends WriteMode("APPEND")

  object ERROR_IF_EXISTS extends WriteMode("ERROR_IF_EXISTS")

  object IGNORE extends WriteMode("IGNORE")

  val writes: Set[WriteMode] = Set(OVERWRITE, APPEND, ERROR_IF_EXISTS, IGNORE)
}

class WriteDeserializer extends JsonDeserializer[WriteMode] {

  override def deserialize(jp: JsonParser, ctx: DeserializationContext): WriteMode = {
    val value = jp.readValueAs[String](classOf[String])
    WriteMode.fromString(value)
  }
} 
Example 40
Source File: EnumEntrySerializer.scala    From TransmogrifAI   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.salesforce.op.utils.json

import com.fasterxml.jackson.core.{JsonGenerator, JsonParser}
import com.fasterxml.jackson.databind.deser.std.StdDeserializer
import com.fasterxml.jackson.databind.ser.std.StdSerializer
import com.fasterxml.jackson.databind.{DeserializationContext, SerializerProvider}
import enumeratum.{Enum, EnumEntry}
import org.json4s.CustomSerializer
import org.json4s.JsonAST.JString

import scala.reflect.ClassTag


  def jackson[A <: EnumEntry: ClassTag](enum: Enum[A]): SerDes[A] = {
    val klazz = implicitly[ClassTag[A]].runtimeClass.asInstanceOf[Class[A]]
    val ser = new StdSerializer[A](klazz) {
      override def serialize(value: A, gen: JsonGenerator, provider: SerializerProvider): Unit = {
        gen.writeString(value.entryName)
      }
    }
    val des = new StdDeserializer[A](klazz) {
      override def deserialize(p: JsonParser, ctxt: DeserializationContext): A = {
        enum.withNameInsensitive(p.getValueAsString)
      }
    }
    new SerDes[A](klazz, ser, des)
  }

} 
Example 41
Source File: JSONOptions.scala    From drizzle-spark   with Apache License 2.0 4 votes vote down vote up
package org.apache.spark.sql.catalyst.json

import com.fasterxml.jackson.core.{JsonFactory, JsonParser}
import org.apache.commons.lang3.time.FastDateFormat

import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.util.{CompressionCodecs, ParseModes}


  def setJacksonOptions(factory: JsonFactory): Unit = {
    factory.configure(JsonParser.Feature.ALLOW_COMMENTS, allowComments)
    factory.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, allowUnquotedFieldNames)
    factory.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, allowSingleQuotes)
    factory.configure(JsonParser.Feature.ALLOW_NUMERIC_LEADING_ZEROS, allowNumericLeadingZeros)
    factory.configure(JsonParser.Feature.ALLOW_NON_NUMERIC_NUMBERS, allowNonNumericNumbers)
    factory.configure(JsonParser.Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER,
      allowBackslashEscapingAnyCharacter)
  }
} 
Example 42
Source File: JSONOptions.scala    From sparkoscope   with Apache License 2.0 4 votes vote down vote up
package org.apache.spark.sql.catalyst.json

import java.util.Locale

import com.fasterxml.jackson.core.{JsonFactory, JsonParser}
import org.apache.commons.lang3.time.FastDateFormat

import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.util.{CaseInsensitiveMap, CompressionCodecs, ParseModes}


  def setJacksonOptions(factory: JsonFactory): Unit = {
    factory.configure(JsonParser.Feature.ALLOW_COMMENTS, allowComments)
    factory.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, allowUnquotedFieldNames)
    factory.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, allowSingleQuotes)
    factory.configure(JsonParser.Feature.ALLOW_NUMERIC_LEADING_ZEROS, allowNumericLeadingZeros)
    factory.configure(JsonParser.Feature.ALLOW_NON_NUMERIC_NUMBERS, allowNonNumericNumbers)
    factory.configure(JsonParser.Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER,
      allowBackslashEscapingAnyCharacter)
  }
} 
Example 43
Source File: JSONOptions.scala    From multi-tenancy-spark   with Apache License 2.0 4 votes vote down vote up
package org.apache.spark.sql.catalyst.json

import java.util.Locale

import com.fasterxml.jackson.core.{JsonFactory, JsonParser}
import org.apache.commons.lang3.time.FastDateFormat

import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.util.{CaseInsensitiveMap, CompressionCodecs, ParseModes}


  def setJacksonOptions(factory: JsonFactory): Unit = {
    factory.configure(JsonParser.Feature.ALLOW_COMMENTS, allowComments)
    factory.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, allowUnquotedFieldNames)
    factory.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, allowSingleQuotes)
    factory.configure(JsonParser.Feature.ALLOW_NUMERIC_LEADING_ZEROS, allowNumericLeadingZeros)
    factory.configure(JsonParser.Feature.ALLOW_NON_NUMERIC_NUMBERS, allowNonNumericNumbers)
    factory.configure(JsonParser.Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER,
      allowBackslashEscapingAnyCharacter)
  }
}