com.google.gson.JsonObject Scala Examples

The following examples show how to use com.google.gson.JsonObject. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: ExportBackGroundService.scala    From Linkis   with Apache License 2.0 5 votes vote down vote up
package com.webank.wedatasphere.linkis.entrance.background

import java.util

import com.google.gson.internal.LinkedTreeMap
import com.google.gson.{JsonObject, JsonParser}
import com.webank.wedatasphere.linkis.common.utils.Logging
import com.webank.wedatasphere.linkis.server._
import com.webank.wedatasphere.linkis.server.socket.controller.ServerEvent


class ExportBackGroundService extends AbstractBackGroundService with Logging{
  override val serviceType: String = "export"

  override def operation(serverEvent: ServerEvent): ServerEvent = {
    val params = serverEvent.getData.map { case (k, v) => k -> v.asInstanceOf[Any] }
    //val executionCode = params.get("executionCode").get
    val ec = params.get("executionCode").get.asInstanceOf[LinkedTreeMap[String,LinkedTreeMap[String,String]]]
    if(ec.get("destination")!=null && ec.get("destination").get("fieldDelimiter") != null){
      info(s"---${ec.get("destination").get("fieldDelimiter")}---")
      ec.get("destination").get("fieldDelimiter") match {
        case "\\t" =>ec.get("destination").put("fieldDelimiter","\t")
        case _ =>info("---other fieldDelimiter---")
      }
    }
    val executionCode = BDPJettyServerHelper.gson.toJson(params.get("executionCode").get)
    // TODO: Head may be removed(头可能会去掉)
    var newExecutionCode = ""
    val jsonParser = new JsonParser()
    val jsonCode = jsonParser.parse(executionCode.asInstanceOf[String]).asInstanceOf[JsonObject]
    val destination = "val destination = \"\"\"" + jsonCode.get("destination").toString + "\"\"\"\n"
    val dataInfo = jsonCode.get("dataInfo").toString
    var newDataInfo = "val dataInfo = \"\"\""
    val length = dataInfo.length
    if (length > 6000) {
      newDataInfo += dataInfo.substring(0, 6000) + "\"\"\"" + "+" + "\"\"\"" + dataInfo.substring(6000,length) + "\"\"\"\n"
    } else {
      newDataInfo += dataInfo + "\"\"\"\n"
    }
    newExecutionCode += destination
    newExecutionCode += newDataInfo
    newExecutionCode += "com.webank.wedatasphere.linkis.engine.imexport.ExportData.exportData(spark,dataInfo,destination)"
    params.put("executionCode", newExecutionCode)
    print(newExecutionCode)
    val map = new util.HashMap[String, Object]()
    params.foreach(f => map.put(f._1, f._2.asInstanceOf[Object]))
    serverEvent.setData(map)
    serverEvent
  }
} 
Example 2
Source File: LoadBackGroundService.scala    From Linkis   with Apache License 2.0 5 votes vote down vote up
package com.webank.wedatasphere.linkis.entrance.background

import java.util

import com.google.gson.{JsonObject, JsonParser}
import com.webank.wedatasphere.linkis.server._
import com.webank.wedatasphere.linkis.server.socket.controller.ServerEvent


class LoadBackGroundService extends AbstractBackGroundService {

  override val serviceType: String = "load"

  override def operation(serverEvent: ServerEvent): ServerEvent = {
    val params = serverEvent.getData.map { case (k, v) => k -> v.asInstanceOf[Any] }
    //val executionCode = params.get("executionCode").get
    //val jsonCode = params.get("executionCode").get.asInstanceOf[util.Map[String,Object]]
    val executionCode = BDPJettyServerHelper.gson.toJson(params.get("executionCode").get)
    // TODO: Head may be removed(头可能会去掉)
    var newExecutionCode = ""
    val jsonParser = new JsonParser()
    val jsonCode = jsonParser.parse(executionCode.asInstanceOf[String]).asInstanceOf[JsonObject]
    val source = "val source = \"\"\"" + jsonCode.get("source").toString + "\"\"\"\n"
    val destination = jsonCode.get("destination").toString
    var newDestination = "val destination = \"\"\""
    val length = destination.length
    if (length > 6000) {
      newDestination += destination.substring(0, 6000) + "\"\"\"" + "+" + "\"\"\"" + destination.substring(6000, length) + "\"\"\"\n"
    } else {
      newDestination += destination + "\"\"\"\n"
    }
    newExecutionCode += source
    newExecutionCode += newDestination
    newExecutionCode += "com.webank.wedatasphere.linkis.engine.imexport.LoadData.loadDataToTable(spark,source,destination)"
    params.put("executionCode", newExecutionCode)
    print(newExecutionCode)
    val map = new util.HashMap[String, Object]()
    params.foreach(f => map.put(f._1, f._2.asInstanceOf[Object]))
    serverEvent.setData(map)
    serverEvent
  }
} 
Example 3
Source File: ConstraintSuggestion.scala    From deequ   with Apache License 2.0 5 votes vote down vote up
package com.amazon.deequ.suggestions

import com.amazon.deequ.VerificationResult
import com.amazon.deequ.constraints.Constraint
import com.amazon.deequ.profiles.ColumnProfile
import com.amazon.deequ.suggestions.rules.ConstraintRule
import com.google.gson.{GsonBuilder, JsonArray, JsonObject}

case class ConstraintSuggestion(
    constraint: Constraint,
    columnName: String,
    currentValue: String,
    description: String,
    suggestingRule: ConstraintRule[ColumnProfile],
    codeForConstraint: String
)

object ConstraintSuggestions {

  private[this] val CONSTRANT_SUGGESTIONS_FIELD = "constraint_suggestions"

  private[suggestions] def toJson(constraintSuggestions: Seq[ConstraintSuggestion]): String = {

    val json = new JsonObject()

    val constraintsJson = new JsonArray()

    constraintSuggestions.foreach { constraintSuggestion =>

      val constraintJson = new JsonObject()
      addSharedProperties(constraintJson, constraintSuggestion)

      constraintsJson.add(constraintJson)
    }

    json.add(CONSTRANT_SUGGESTIONS_FIELD, constraintsJson)

    val gson = new GsonBuilder()
      .setPrettyPrinting()
      .create()

    gson.toJson(json)
  }

  private[suggestions] def evaluationResultsToJson(
      constraintSuggestions: Seq[ConstraintSuggestion],
      result: VerificationResult)
    : String = {

    val constraintResults = result.checkResults
      .map { case (_, checkResult) => checkResult }
      .headOption.map { checkResult =>
        checkResult.constraintResults
      }
      .getOrElse(Seq.empty)

    val json = new JsonObject()

    val constraintEvaluations = new JsonArray()

    val constraintResultsOnTestSet = constraintResults.map { checkResult =>
      checkResult.status.toString
    }

    constraintSuggestions.zipAll(constraintResultsOnTestSet, null, "Unknown")
      .foreach { case (constraintSuggestion, constraintResult) =>

        val constraintEvaluation = new JsonObject()
        addSharedProperties(constraintEvaluation, constraintSuggestion)

        constraintEvaluation.addProperty("constraint_result_on_test_set",
          constraintResult)

        constraintEvaluations.add(constraintEvaluation)
      }

    json.add(CONSTRANT_SUGGESTIONS_FIELD, constraintEvaluations)

    val gson = new GsonBuilder()
      .setPrettyPrinting()
      .create()

    gson.toJson(json)
  }

  private[this] def addSharedProperties(
      jsonObject: JsonObject,
      constraintSuggestion: ConstraintSuggestion)
    : Unit = {

    jsonObject.addProperty("constraint_name", constraintSuggestion.constraint.toString)
    jsonObject.addProperty("column_name", constraintSuggestion.columnName)
    jsonObject.addProperty("current_value", constraintSuggestion.currentValue)
    jsonObject.addProperty("description", constraintSuggestion.description)
    jsonObject.addProperty("suggesting_rule", constraintSuggestion.suggestingRule.toString)
    jsonObject.addProperty("rule_description", constraintSuggestion.suggestingRule.ruleDescription)
    jsonObject.addProperty("code_for_constraint", constraintSuggestion.codeForConstraint)
  }
} 
Example 4
Source File: BigQueryReader.scala    From sope   with Apache License 2.0 5 votes vote down vote up
package com.sope.spark.utils.google

import com.google.cloud.hadoop.io.bigquery.{BigQueryConfiguration, GsonBigQueryInputFormat}
import com.google.gson.JsonObject
import com.sope.utils.Logging
import org.apache.hadoop.io.LongWritable
import org.apache.spark.sql.{DataFrame, SQLContext}


  def load(): DataFrame = {
    import sqlContext.implicits._
    // Load data from BigQuery.
    val tableData = sc.newAPIHadoopRDD(
      conf,
      classOf[GsonBigQueryInputFormat],
      classOf[LongWritable],
      classOf[JsonObject])
      .map(_._2.toString)
    sqlContext.read.json(tableData.toDS)
  }
}