org.json4s.jackson.JsonMethods.render Scala Examples

The following examples show how to use org.json4s.jackson.JsonMethods.render. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: UnifiedSparkListener.scala    From spark-monitoring   with MIT License 5 votes vote down vote up
package org.apache.spark.listeners

import java.time.Instant

import org.apache.spark.{SparkConf, SparkException, SparkInformation}
import org.apache.spark.internal.Logging
import org.apache.spark.listeners.sink.SparkListenerSink
import org.apache.spark.scheduler._
import org.apache.spark.sql.streaming.StreamingQueryListener
import org.apache.spark.util.JsonProtocol
import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods.{compact, render}

import scala.util.control.NonFatal


class UnifiedSparkListener(override val conf: SparkConf)
  extends UnifiedSparkListenerHandler
    with Logging
    with SparkListenerHandlers
    with StreamingListenerHandlers
    with StreamingQueryListenerHandlers {

  private val listenerSink = this.createSink(this.conf)

  override def onOtherEvent(event: SparkListenerEvent): Unit = {
    // All events in Spark that are not specific to SparkListener go through
    // this method.  The typed ListenerBus implementations intercept and forward to
    // their "local" listeners.
    // We will just handle everything here so we only have to have one listener.
    // The advantage is that this can be registered in extraListeners, so no
    // code change is required to add listener support.
    event match {
      // We will use the ClassTag for the private wrapper class to match
      case this.streamingListenerEventClassTag(e) =>
        this.onStreamingListenerEvent(e)
      case streamingQueryListenerEvent: StreamingQueryListener.Event =>
        this.onStreamingQueryListenerEvent(streamingQueryListenerEvent)
      case sparkListenerEvent: SparkListenerEvent => if (sparkListenerEvent.logEvent) {
        logSparkListenerEvent(sparkListenerEvent)
      }
    }
  }

  private def createSink(conf: SparkConf): SparkListenerSink = {
    val sink = conf.getOption("spark.unifiedListener.sink") match {
      case Some(listenerSinkClassName) => listenerSinkClassName
      case None => throw new SparkException("spark.unifiedListener.sink setting is required")
    }
    logInfo(s"Creating listener sink: ${sink}")
    org.apache.spark.util.Utils.loadExtensions(
      classOf[SparkListenerSink],
      Seq(sink),
      conf).head
  }

  protected def logSparkListenerEvent(
                                       event: SparkListenerEvent,
                                       getTimestamp: () => Instant =
                                       () => Instant.now()): Unit = {
    val json = try {
      // Add a well-known time field.
      Some(
        JsonProtocol.sparkEventToJson(event)
          .merge(render(
            SparkInformation.get() + ("SparkEventTime" -> getTimestamp().toString)
          ))
      )
    } catch {
      case NonFatal(e) =>
        logError(s"Error serializing SparkListenerEvent to JSON: $event", e)
        None
    }

    sendToSink(json)
  }

  private[spark] def sendToSink(json: Option[JValue]): Unit = {
    try {
      json match {
        case Some(j) => {
          logDebug(s"Sending event to listener sink: ${compact(j)}")
          this.listenerSink.logEvent(json)
        }
        case None => {
          logWarning("json value was None")
        }
      }
    } catch {
      case NonFatal(e) =>
        logError(s"Error sending to listener sink: $e")
    }
  }
} 
Example 2
Source File: ArrayParam.scala    From mmlspark   with MIT License 5 votes vote down vote up
// Copyright (C) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in project root for information.

package org.apache.spark.ml.param

import org.apache.spark.annotation.DeveloperApi
import org.json4s.{DefaultFormats, _}
import org.json4s.jackson.JsonMethods.{compact, parse, render}

import scala.collection.JavaConverters._


    def w(value: java.util.List[_]): ParamPair[Array[_]] = w(value.asScala.toArray)

    override def jsonEncode(value: Array[_]): String = {
      import org.json4s.JsonDSL._
      value match {
        case intArr: Array[Int] => compact(render(intArr.toSeq))
        case dbArr: Array[Double] => compact(render(dbArr.toSeq))
        case strArr: Array[String] => compact(render(strArr.toSeq))
        case blArr: Array[Boolean] => compact(render(blArr.toSeq))
        case intArr: Array[Integer] => compact(render(intArr.map(_.toLong).toSeq))
        case _ =>
          throw new IllegalArgumentException("Internal type not json serializable")
      }
    }

    override def jsonDecode(json: String): Array[_] = {
      implicit val formats: DefaultFormats.type = DefaultFormats
      parse(json).extract[Seq[_]].toArray
    }
  } 
Example 3
Source File: OpPipelineStageWriter.scala    From TransmogrifAI   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.salesforce.op.stages

import com.salesforce.op.stages.OpPipelineStageReaderWriter._
import com.salesforce.op.stages.sparkwrappers.generic.SparkWrapperParams
import org.apache.hadoop.fs.Path
import org.apache.spark.ml.util.MLWriter
import org.apache.spark.ml.{Estimator, SparkDefaultParamsReadWrite}
import org.json4s.JsonAST.{JObject, JValue}
import org.json4s.jackson.JsonMethods.{compact, render}

import scala.util.{Failure, Success}


  def writeToJson(path: String): JObject = {
    stage match {
      case _: Estimator[_] => return JObject() // no need to serialize estimators
      case s: SparkWrapperParams[_] =>
        // Set save path for all Spark wrapped stages of type [[SparkWrapperParams]] so they can save
        s.setStageSavePath(path)
      case _ =>
    }
    // We produce stage metadata for all the Spark params
    val metadata = SparkDefaultParamsReadWrite.getMetadataToSave(stage)

    // Write out the stage using the specified writer instance
    val writer = readerWriterFor[OpPipelineStageBase](stage.getClass.asInstanceOf[Class[OpPipelineStageBase]])
    val stageJson: JValue = writer.write(stage) match {
      case Failure(err) => throw new RuntimeException(s"Failed to write out stage '${stage.uid}'", err)
      case Success(json) => json
    }

    // Join metadata & with stage ctor args
    val j = metadata.merge(JObject(FieldNames.CtorArgs.entryName -> stageJson))
    render(j).asInstanceOf[JObject]
  }

} 
Example 4
Source File: SparkStageParam.scala    From TransmogrifAI   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.salesforce.op.stages

import com.salesforce.op.stages.sparkwrappers.generic.SparkWrapperParams
import org.apache.hadoop.fs.Path
import org.apache.spark.ml.PipelineStage
import org.apache.spark.ml.param.{Param, ParamPair, Params}
import org.apache.spark.ml.util.{Identifiable, MLReader, MLWritable}
import org.apache.spark.util.SparkUtils
import org.json4s.JsonAST.{JObject, JValue}
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods.{compact, parse, render}
import org.json4s.{DefaultFormats, Formats, JString}

class SparkStageParam[S <: PipelineStage with Params]
(
  parent: String,
  name: String,
  doc: String,
  isValid: Option[S] => Boolean
) extends Param[Option[S]](parent, name, doc, isValid) {

  import SparkStageParam._

  
  override def jsonDecode(jsonStr: String): Option[S] = {
    val json = parse(jsonStr)
    val uid = (json \ "uid").extractOpt[String]
    val path = (json \ "path").extractOpt[String]

    path -> uid match {
      case (None, _) | (_, None) | (_, Some(NoUID)) =>
        savePath = None
        None
      case (Some(p), Some(stageUid)) =>
        savePath = Option(p)
        val stagePath = new Path(p, stageUid).toString
        val className = (json \ "className").extract[String]
        val cls = SparkUtils.classForName(className)
        val stage = cls.getMethod("read").invoke(null).asInstanceOf[MLReader[PipelineStage]].load(stagePath)
        Option(stage).map(_.asInstanceOf[S])
    }
  }
}

object SparkStageParam {
  implicit val formats: Formats = DefaultFormats
  val NoClass = ""
  val NoUID = ""

  def updateParamsMetadataWithPath(jValue: JValue, path: String): JValue = jValue match {
    case JObject(pairs) => JObject(
      pairs.map {
        case (SparkWrapperParams.SparkStageParamName, j) =>
          SparkWrapperParams.SparkStageParamName -> j.merge(JObject("path" -> JString(path)))
        case param => param
      }
    )
    case j => throw new IllegalArgumentException(s"Cannot recognize JSON Spark params metadata: $j")
  }

} 
Example 5
Source File: TransientFeatureArrayParam.scala    From TransmogrifAI   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.salesforce.op.stages

import com.salesforce.op.features._
import org.apache.spark.ml.param._
import org.apache.spark.ml.util.Identifiable
import org.json4s.DefaultFormats
import org.json4s.JsonAST.{JArray, JValue}
import org.json4s.jackson.JsonMethods.{compact, parse, render}

import scala.util.{Failure, Success}



  override def w(value: Array[TransientFeature]): ParamPair[Array[TransientFeature]] = super.w(value)

  override def jsonEncode(value: Array[TransientFeature]): String = {
    compact(render(JArray(value.map(_.toJson).toList)))
  }

  override def jsonDecode(json: String): Array[TransientFeature] = {
    parse(json).extract[Array[JValue]].map(obj => {
      TransientFeature(obj) match {
        case Failure(e) => throw new RuntimeException("Failed to parse TransientFeature", e)
        case Success(v) => v
      }
    })
  }
} 
Example 6
Source File: OpPipelineStageReaderWriterTest.scala    From TransmogrifAI   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.salesforce.op.stages

import com.salesforce.op.features._
import com.salesforce.op.features.types._
import com.salesforce.op.stages.OpPipelineStageReaderWriter._
import com.salesforce.op.test.PassengerSparkFixtureTest
import com.salesforce.op.utils.reflection.ReflectionUtils
import com.salesforce.op.utils.spark.RichDataset._
import org.apache.spark.ml.{Model, Transformer}
import org.apache.spark.sql.types.{DataType, Metadata, MetadataBuilder}
import org.json4s.JsonAST.JValue
import org.json4s.jackson.JsonMethods.{compact, parse, pretty, render}
import org.json4s.{JArray, JObject}
import org.scalatest.FlatSpec
import org.slf4j.LoggerFactory


// TODO: consider adding a read/write test for a spark wrapped stage as well
private[stages] abstract class OpPipelineStageReaderWriterTest
  extends FlatSpec with PassengerSparkFixtureTest {

  val meta = new MetadataBuilder().putString("foo", "bar").build()
  val expectedFeaturesLength = 1
  def stage: OpPipelineStageBase with Transformer
  val expected: Array[Real]
  val hasOutputName = true

  private val log = LoggerFactory.getLogger(this.getClass)
  private lazy val savePath = tempDir + "/" + this.getClass.getSimpleName + "-" + System.currentTimeMillis()
  private lazy val writer = new OpPipelineStageWriter(stage)
  private lazy val stageJsonString: String = writer.writeToJsonString(savePath)
  private lazy val stageJson: JValue = parse(stageJsonString)
  private lazy val isModel = stage.isInstanceOf[Model[_]]
  private val FN = FieldNames

  Spec(this.getClass) should "write stage uid" in {
    log.info(pretty(stageJson))
    (stageJson \ FN.Uid.entryName).extract[String] shouldBe stage.uid
  }
  it should "write class name" in {
    (stageJson \ FN.Class.entryName).extract[String] shouldBe stage.getClass.getName
  }
  it should "write params map" in {
    val params = extractParams(stageJson).extract[Map[String, Any]]
    if (hasOutputName) {
      params should have size 4
      params.keys shouldBe Set("inputFeatures", "outputMetadata", "inputSchema", "outputFeatureName")
    } else {
      params should have size 3
      params.keys shouldBe Set("inputFeatures", "outputMetadata", "inputSchema")
    }
  }
  it should "write outputMetadata" in {
    val params = extractParams(stageJson)
    val metadataStr = compact(render(extractParams(stageJson) \ "outputMetadata"))
    val metadata = Metadata.fromJson(metadataStr)
    metadata shouldBe stage.getMetadata()
  }
  it should "write inputSchema" in {
    val schemaStr = compact(render(extractParams(stageJson) \ "inputSchema"))
    val schema = DataType.fromJson(schemaStr)
    schema shouldBe stage.getInputSchema()
  }
  it should "write input features" in {
    val jArray = (extractParams(stageJson) \ "inputFeatures").extract[JArray]
    jArray.values should have length expectedFeaturesLength
    val obj = jArray(0).extract[JObject]
    obj.values.keys shouldBe Set("name", "isResponse", "isRaw", "uid", "typeName", "stages", "originFeatures")
  }
  it should "write model ctor args" in {
    if (stage.isInstanceOf[Model[_]]) {
      val ctorArgs = (stageJson \ FN.CtorArgs.entryName).extract[JObject]
      val (_, args) = ReflectionUtils.bestCtorWithArgs(stage)
      ctorArgs.values.keys shouldBe args.map(_._1).toSet
    }
  }
  it should "load stage correctly" in {
    val reader = new OpPipelineStageReader(stage)
    val stageLoaded = reader.loadFromJsonString(stageJsonString, path = savePath)
    stageLoaded shouldBe a[OpPipelineStageBase]
    stageLoaded shouldBe a[Transformer]
    stageLoaded.getOutput() shouldBe a[FeatureLike[_]]
    val _ = stage.asInstanceOf[Transformer].transform(passengersDataSet)
    val transformed = stageLoaded.asInstanceOf[Transformer].transform(passengersDataSet)
    transformed.collect(stageLoaded.getOutput().asInstanceOf[FeatureLike[Real]]) shouldBe expected
    stageLoaded.uid shouldBe stage.uid
    stageLoaded.operationName shouldBe stage.operationName
    stageLoaded.getInputFeatures() shouldBe stage.getInputFeatures()
    stageLoaded.getInputSchema() shouldBe stage.getInputSchema()
  }

  private def extractParams(stageJson: JValue): JValue = {
    val defaultParamsMap = stageJson \ FN.DefaultParamMap.entryName
    val paramsMap = stageJson \ FN.ParamMap.entryName
    defaultParamsMap.merge(paramsMap)
  }

} 
Example 7
Source File: JObjectParam.scala    From sona   with Apache License 2.0 5 votes vote down vote up
package com.tencent.angel.sona.ml.param
import com.tencent.angel.sona.ml.util.Identifiable
import org.json4s.DefaultFormats
import org.json4s.JsonAST.JObject
import org.json4s.jackson.JsonMethods.{compact, parse, render}

class JObjectParam(parent: String, name: String, doc: String, isValid: JObject => Boolean)
  extends Param[JObject](parent, name, doc, isValid) {
  def this(parent: String, name: String, doc: String) =
    this(parent, name, doc, (value: JObject) => value != null)

  def this(parent: Identifiable, name: String, doc: String, isValid: JObject => Boolean) =
    this(parent.uid, name, doc, isValid)

  def this(parent: Identifiable, name: String, doc: String) = this(parent.uid, name, doc)

  override def w(value: JObject): ParamPair[JObject] = super.w(value)

  override def jsonEncode(value: JObject): String = {
    compact(render(value))
  }

  override def jsonDecode(json: String): JObject = {
    implicit val formats: DefaultFormats = DefaultFormats
    parse(json).asInstanceOf[JObject]
  }
} 
Example 8
Source File: LinearRegressionActor.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.transform

import akka.actor.{ActorLogging, Props}
import org.json4s._
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods.{render, pretty}
import io.coral.actors.{SimpleEmitTrigger, CoralActor}

object LinearRegressionActor {
	implicit val formats = org.json4s.DefaultFormats

	def getParams(json: JValue) = {
		for {
			intercept <- (json \ "params" \ "intercept").extractOpt[Double]
			weights <- (json \ "params" \ "weights").extractOpt[Map[String, Double]]
		} yield {
			val outcome = (json \ "params" \ "outcome").extractOpt[String]
			(intercept, weights, outcome)
		}
	}

	def apply(json: JValue): Option[Props] = {
		getParams(json).map(_ => Props(classOf[LinearRegressionActor], json))
	}
}

class LinearRegressionActor(json: JObject)
	extends CoralActor(json)
	with ActorLogging
	with SimpleEmitTrigger {
	val (intercept, weights, outcome) = LinearRegressionActor.getParams(json).get

	override def simpleEmitTrigger(json: JObject): Option[JValue] = {
		val inputVector = weights.keys.map(key => {
			(json \ key).extractOpt[Double] match {
				case Some(value) => Some(value)
				case None => None
			}
		}).toVector

		if (inputVector.exists(!_.isDefined)) {
			None
		} else {
			val result = intercept + (inputVector.flatten zip weights.values).map(x => x._1 * x._2).sum
			val name = if (outcome.isDefined) outcome.get else "score"
			Some(render(name -> result) merge json)
		}
	}
} 
Example 9
Source File: ZscoreActor.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.transform

import akka.actor.Props
import org.json4s._
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods.render
import io.coral.actors.{CollectDef, CoralActor}
import scala.concurrent.Future

object ZscoreActor {
	implicit val formats = org.json4s.DefaultFormats

	def collectAliases = List("count", "avg", "std")

	def getParams(json: JValue) = {
		for {
			field <- (json \ "params" \ "field").extractOpt[String]
			score <- (json \ "params" \ "score").extractOpt[Double]
			if (CollectDef.validCollectDef(json.asInstanceOf[JObject],
				collectAliases).isRight)
		} yield {
			(field, score)
		}
	}

	def apply(json: JValue): Option[Props] = {
		getParams(json).map(_ => Props(classOf[ZscoreActor], json))
	}
}

class ZscoreActor(json: JObject) extends CoralActor(json) {
	val (field, score) = ZscoreActor.getParams(jsonDef).get

	override def trigger = { json =>
		val value = (json \ field).extract[Double]

		for {
			count <- collect[Int]("count")
			avg <- collect[Double]("avg")
			std <- collect[Double]("std")
			outlier <- isOutlier(count, avg, std, value)
		} yield {
			determineOutput(json, outlier)
		}
	}

	private def isOutlier(count: Long, avg: Double, std: Double, value: Double): Future[Boolean] = {
		Future {
			val th = avg + score * std
			// arbitrary count threshold to make sure there
			// is enough data to perform calculation on
			(value > th) & (count > 20)
		}
	}

	private def determineOutput(json: JObject, outlier: Boolean): Option[JValue] = {
		outlier match {
			case true =>
				// produce emit my results (dataflow)
				// need to define some json schema, maybe that would help
				val result = ("outlier" -> outlier)

				// what about merging with input data?
				val js = render(result) merge json

				//emit resulting json
				Some(js)
			case _ => Some(JNothing)
		}
	}
} 
Example 10
Source File: StatsActor.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.transform

import akka.actor.{ActorLogging, Props}
import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
import org.json4s._
import org.json4s.jackson.JsonMethods.render
import io.coral.actors.{SimpleTimer, NoEmitTrigger, CoralActor}
import io.coral.lib.SummaryStatistics

import scala.language.implicitConversions

object StatsActor {
	implicit val formats = org.json4s.DefaultFormats

	def getParams(json: JValue) = {
		for {
			field <- (json \ "params" \ "field").extractOpt[String]
		} yield {
			field
		}
	}

	def apply(json: JValue): Option[Props] = {
		getParams(json).map(_ => Props(classOf[StatsActor], json))
	}
}

class StatsActor(json: JObject)
	extends CoralActor(json)
	with ActorLogging
	with NoEmitTrigger
	with SimpleTimer {
	implicit def double2jvalue(x: Double): JValue = if (x.isNaN) JNull else JDouble(x)

	val field = StatsActor.getParams(json).get
	val statistics = SummaryStatistics.mutable

	override def simpleTimer = {
		statistics.reset()
		Some(JNothing)
	}

	override def state = Map(
		("count", render(statistics.count)),
		("avg", render(statistics.average)),
		("sd", render(statistics.populationSd)),
		("min", render(statistics.min)),
		("max", render(statistics.max))
	)

	override def noEmitTrigger(json: JObject) = {
		for {
			value <- (json \ field).extractOpt[Double]
		} yield {
			statistics.append(value)
		}
	}
} 
Example 11
Source File: ThresholdActor.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.transform

import akka.actor.{ActorLogging, Props}
import org.json4s._
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods.render
import io.coral.actors.{SimpleEmitTrigger, CoralActor}

object ThresholdActor {
	implicit val formats = org.json4s.DefaultFormats

	def getParams(json: JValue) = {
		for {
			key <- (json \ "params" \ "key").extractOpt[String]
			threshold <- (json \ "params" \ "threshold").extractOpt[Double]
		} yield (key, threshold)
	}

	def apply(json: JValue): Option[Props] = {
		getParams(json).map(_ => Props(classOf[ThresholdActor], json))
	}
}

class ThresholdActor(json: JObject) extends CoralActor(json) with ActorLogging with SimpleEmitTrigger {
	val (key, threshold) = ThresholdActor.getParams(json).get

	override def simpleEmitTrigger(json: JObject): Option[JValue] = {
		for {
			value <- (json \ key).extractOpt[Double]
		} yield {
			value >= threshold match {
				case true => json
				case false => JNothing
			}
		}
	}
}