org.json4s.jackson.Serialization.write Scala Examples

The following examples show how to use org.json4s.jackson.Serialization.write. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: JsoneyString.scala    From sparta   with Apache License 2.0 5 votes vote down vote up
package com.stratio.sparta.sdk.properties

import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.json4s.jackson.Serialization.write

case class JsoneyString(string : String) {
  override def toString : String = string
  def toSeq : Seq[String] = {
    // transfors string of the form "[\"prop1\",\"prop2\"]" in a Seq
    string.drop(1).dropRight(1).replaceAll("\"","").split(",").toSeq
  }
}

class JsoneyStringSerializer extends CustomSerializer[JsoneyString](format => (
  {
    case obj : JObject => {
      new JsoneyString(write(obj)(implicitly(DefaultFormats + new JsoneyStringSerializer)))
    }
    case obj : org.json4s.JsonAST.JNull.type => {
      new JsoneyString(null)
    }
    case obj : JArray => {
      new JsoneyString(write(obj)(implicitly(DefaultFormats + new JsoneyStringSerializer)))
    }
    case s: JString =>
      new JsoneyString(s.s)
    case i : JInt =>
      new JsoneyString(i.num.toString())
    case b : JBool =>
      new JsoneyString(b.value.toString())
  },
  {
    case x: JsoneyString =>
      if(x.string == null) {
        new JString("")
      } else if(x.string.contains("[") && x.string.contains("{")) {
        parse(x.string)
      } else if(x.string.equals("true") || x.string.equals("false")) {
        new JBool(x.string.toBoolean)
      } else {
        new JString(x.string)
      }
  }
  )) {
} 
Example 2
Source File: JsonBodyForError.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.seahorse.scheduling.model

import org.json4s.DefaultFormats

object JsonBodyForError {

  import org.json4s.jackson.Serialization.write
  implicit val formats = DefaultFormats

  def apply(errorCode: Int, message: String): String = {
    val error = Error(
      code = errorCode,
      message = message
    )
    write(error)
  }

} 
Example 3
Source File: JsonBodyForError.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.seahorse.datasource.model

import org.json4s.DefaultFormats

object JsonBodyForError {

  import org.json4s.jackson.Serialization.write
  implicit val formats = DefaultFormats

  def apply(errorCode: Int, message: String): String = {
    val error = Error(
      code = errorCode,
      message = message
    )
    write(error)
  }

} 
Example 4
Source File: FileStreamSinkLog.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import org.apache.hadoop.fs.{FileStatus, Path}
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization
import org.json4s.jackson.Serialization.{read, write}

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.internal.SQLConf


class FileStreamSinkLog(
    metadataLogVersion: String,
    sparkSession: SparkSession,
    path: String)
  extends CompactibleFileStreamLog[SinkFileStatus](metadataLogVersion, sparkSession, path) {

  private implicit val formats = Serialization.formats(NoTypeHints)

  protected override val fileCleanupDelayMs = sparkSession.sessionState.conf.fileSinkLogCleanupDelay

  protected override val isDeletingExpiredLog = sparkSession.sessionState.conf.fileSinkLogDeletion

  protected override val defaultCompactInterval =
    sparkSession.sessionState.conf.fileSinkLogCompactInterval

  require(defaultCompactInterval > 0,
    s"Please set ${SQLConf.FILE_SINK_LOG_COMPACT_INTERVAL.key} (was $defaultCompactInterval) " +
      "to a positive value.")

  override def compactLogs(logs: Seq[SinkFileStatus]): Seq[SinkFileStatus] = {
    val deletedFiles = logs.filter(_.action == FileStreamSinkLog.DELETE_ACTION).map(_.path).toSet
    if (deletedFiles.isEmpty) {
      logs
    } else {
      logs.filter(f => !deletedFiles.contains(f.path))
    }
  }
}

object FileStreamSinkLog {
  val VERSION = "v1"
  val DELETE_ACTION = "delete"
  val ADD_ACTION = "add"
} 
Example 5
Source File: MockDataReceiver.scala    From toketi-kafka-connect-iothub   with MIT License 5 votes vote down vote up
// Copyright (c) Microsoft. All rights reserved.

package com.microsoft.azure.iot.kafka.connect.source.testhelpers

import java.text.SimpleDateFormat
import java.time.{Duration, Instant}

import com.microsoft.azure.eventhubs.impl.AmqpConstants
import com.microsoft.azure.iot.kafka.connect.source.{DataReceiver, IotMessage, JsonSerialization}
import org.json4s.jackson.Serialization.write

import scala.collection.mutable
import scala.util.Random

class MockDataReceiver(val connectionString: String, val receiverConsumerGroup: String, val partition: String,
    var offset: Option[String], val startTime: Option[Instant], val receiveTimeout: Duration
    ) extends DataReceiver with JsonSerialization {

  private val random: Random = new Random

  override def receiveData(batchSize: Int): Iterable[IotMessage] = {
    val list = scala.collection.mutable.ListBuffer.empty[IotMessage]
    for (i <- 0 until batchSize) {
      list += generateIotMessage(i)
    }
    list
  }

  def generateIotMessage(index: Int): IotMessage = {
    val temp = 70 + random.nextInt(10) + random.nextDouble()
    val deviceTemp = DeviceTemperature(temp, "F")
    val deviceTempStr = write(deviceTemp)

    val systemProperties = mutable.Map[String, Object](
      "iothub-connection-device-id" → s"device$index",
      AmqpConstants.SEQUENCE_NUMBER_ANNOTATION_NAME → index.toLong.asInstanceOf[Object],
      AmqpConstants.AMQP_PROPERTY_CORRELATION_ID → random.nextString(10),
      AmqpConstants.OFFSET_ANNOTATION_NAME → random.nextString(10),
      AmqpConstants.ENQUEUED_TIME_UTC_ANNOTATION_NAME → new SimpleDateFormat("MM/dd/yyyy").parse("12/01/2016"))

    val messageProperties = mutable.Map[String, Object](
      "timestamp" → Instant.now().toString,
      "contentType" → "temperature"
    )

    val iotMessage = IotMessage(deviceTempStr, systemProperties, messageProperties)
    iotMessage
  }

  override def close(): Unit = {}
} 
Example 6
Source File: AnnotatorParam.scala    From spark-nlp   with Apache License 2.0 5 votes vote down vote up
package com.johnsnowlabs.nlp.annotators.param

import java.util.{Date, TimeZone}

import org.apache.spark.ml.param.Param
import org.apache.spark.ml.util.Identifiable
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.json4s.jackson.Serialization.write


  object SerializableFormat extends Formats with Serializable {
    class SerializableDateFormat extends DateFormat {
      def timezone: TimeZone = throw new Exception("SerializableFormat does not implement dateformat")
      override def format(d: Date): String = throw new Exception("SerializableFormat does not implement dateformat")
      override def parse(s: String): Option[Date] = throw new Exception("SerializableFormat does not implement dateformat")
    }
    override def dateFormat: DateFormat = new SerializableDateFormat
  }

  implicit val formats = SerializableFormat

  override def jsonEncode(value: A): String = write(value.serialize)

  override def jsonDecode(json: String): A = parse(json).extract[B].deserialize
} 
Example 7
Source File: ApiService.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package services

import i18n.I18NLoc
import shared.Api
import shared.dtos.ServerModel
import org.json4s.jackson.Serialization.write


class ApiService extends Api {
  override def postServers(servers: Seq[ServerModel]): String = {
    servers.foreach(println)
    "server post success"
  }

  override def getLang(lang: String): String = {
    try {
      scala.io.Source.fromFile(s"server/src/main/scala/i18n/${lang}.json", "utf-8").mkString
    } catch {
      case e: Exception =>
        println(e.getMessage)
        ""
    }
  }
} 
Example 8
Source File: FileStreamSinkLog.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import org.apache.hadoop.fs.{FileStatus, Path}
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization
import org.json4s.jackson.Serialization.{read, write}

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.internal.SQLConf


class FileStreamSinkLog(
    metadataLogVersion: String,
    sparkSession: SparkSession,
    path: String)
  extends CompactibleFileStreamLog[SinkFileStatus](metadataLogVersion, sparkSession, path) {

  private implicit val formats = Serialization.formats(NoTypeHints)

  protected override val fileCleanupDelayMs = sparkSession.sessionState.conf.fileSinkLogCleanupDelay

  protected override val isDeletingExpiredLog = sparkSession.sessionState.conf.fileSinkLogDeletion

  protected override val defaultCompactInterval =
    sparkSession.sessionState.conf.fileSinkLogCompactInterval

  require(defaultCompactInterval > 0,
    s"Please set ${SQLConf.FILE_SINK_LOG_COMPACT_INTERVAL.key} (was $defaultCompactInterval) " +
      "to a positive value.")

  override def compactLogs(logs: Seq[SinkFileStatus]): Seq[SinkFileStatus] = {
    val deletedFiles = logs.filter(_.action == FileStreamSinkLog.DELETE_ACTION).map(_.path).toSet
    if (deletedFiles.isEmpty) {
      logs
    } else {
      logs.filter(f => !deletedFiles.contains(f.path))
    }
  }
}

object FileStreamSinkLog {
  val VERSION = "v1"
  val DELETE_ACTION = "delete"
  val ADD_ACTION = "add"
} 
Example 9
Source File: JsoneyStringTest.scala    From sparta   with Apache License 2.0 5 votes vote down vote up
package com.stratio.sparta.sdk.properties

import org.json4s.jackson.JsonMethods._
import org.json4s.jackson.Serialization.write
import org.json4s.{DefaultFormats, _}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{Matchers, WordSpecLike}

@RunWith(classOf[JUnitRunner])
class JsoneyStringTest extends WordSpecLike
with Matchers {

  "A JsoneyString" should {
    "have toString equivalent to its internal string" in {
      assertResult("foo")(new JsoneyString("foo").toString)
    }

    "be deserialized if its JSON" in {
      implicit val json4sJacksonFormats = DefaultFormats + new JsoneyStringSerializer()
      val result = parse( """{ "foo": "bar" }""").extract[JsoneyString]
      assertResult(new JsoneyString( """{"foo":"bar"}"""))(result)
    }

    "be deserialized if it's a String" in {
      implicit val json4sJacksonFormats = DefaultFormats + new JsoneyStringSerializer()
      val result = parse("\"foo\"").extract[JsoneyString]
      assertResult(new JsoneyString("foo"))(result)
    }

    "be deserialized if it's an Int" in {
      implicit val json4sJacksonFormats = DefaultFormats + new JsoneyStringSerializer()
      val result = parse("1").extract[JsoneyString]
      assertResult(new JsoneyString("1"))(result)
    }

    "be serialized as JSON" in {
      implicit val json4sJacksonFormats = DefaultFormats + new JsoneyStringSerializer()

      var result = write(new JsoneyString("foo"))
      assertResult("\"foo\"")(result)

      result = write(new JsoneyString("{\"foo\":\"bar\"}"))
      assertResult("\"{\\\"foo\\\":\\\"bar\\\"}\"")(result)
    }

    "be deserialized if it's an JBool" in {
      implicit val json4sJacksonFormats = DefaultFormats + new JsoneyStringSerializer()
      val result = parse("true").extract[JsoneyString]
      assertResult(new JsoneyString("true"))(result)
    }

    "have toSeq equivalent to its internal string" in {
      assertResult(Seq("o"))(new JsoneyString("foo").toSeq)
    }
  }
} 
Example 10
Source File: FileStreamSinkLog.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import org.apache.hadoop.fs.{FileStatus, Path}
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization
import org.json4s.jackson.Serialization.{read, write}

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.internal.SQLConf


class FileStreamSinkLog(
    metadataLogVersion: String,
    sparkSession: SparkSession,
    path: String)
  extends CompactibleFileStreamLog[SinkFileStatus](metadataLogVersion, sparkSession, path) {

  private implicit val formats = Serialization.formats(NoTypeHints)

  protected override val fileCleanupDelayMs = sparkSession.sessionState.conf.fileSinkLogCleanupDelay

  protected override val isDeletingExpiredLog = sparkSession.sessionState.conf.fileSinkLogDeletion

  protected override val compactInterval = sparkSession.sessionState.conf.fileSinkLogCompactInterval
  require(compactInterval > 0,
    s"Please set ${SQLConf.FILE_SINK_LOG_COMPACT_INTERVAL.key} (was $compactInterval) " +
      "to a positive value.")

  protected override def serializeData(data: SinkFileStatus): String = {
    write(data)
  }

  protected override def deserializeData(encodedString: String): SinkFileStatus = {
    read[SinkFileStatus](encodedString)
  }

  override def compactLogs(logs: Seq[SinkFileStatus]): Seq[SinkFileStatus] = {
    val deletedFiles = logs.filter(_.action == FileStreamSinkLog.DELETE_ACTION).map(_.path).toSet
    if (deletedFiles.isEmpty) {
      logs
    } else {
      logs.filter(f => !deletedFiles.contains(f.path))
    }
  }
}

object FileStreamSinkLog {
  val VERSION = "v1"
  val DELETE_ACTION = "delete"
  val ADD_ACTION = "add"
} 
Example 11
Source File: package.scala    From CMAK   with Apache License 2.0 5 votes vote down vote up
package kafka.manager

import java.nio.charset.StandardCharsets
import java.text.NumberFormat


package object utils {
  import org.json4s._
  import org.json4s.jackson.JsonMethods._
  import org.json4s.jackson.Serialization.{read, write}
  implicit val formats = DefaultFormats
  private[this] val numberFormat = NumberFormat.getInstance()
  
  implicit class LongFormatted(val x: Long) {
    def formattedAsDecimal = numberFormat.format(x)  
  }

  implicit def serializeString(data: String) : Array[Byte] = {
    data.getBytes(StandardCharsets.UTF_8)
  }

  implicit def deserializeString(data: Array[Byte]) : String  = {
    new String(data, StandardCharsets.UTF_8)
  }

  def toJson(map: Map[String, Any]): String = {
    write(map)
  }
  
  def toJson(s: String) : String = {
    "\"" + s + "\""
  }

  def fromJson[T](s: String) : T = {
    read(s)
  }

  def parseJson(s: String) : JValue = {
    parse(s)
  }

  @throws[UtilException]
  def checkCondition(cond: Boolean, error: UtilError) : Unit = {
    if(!cond) {
      throw new UtilException(error)
    }
  }

  @throws[UtilException]
  def throwError [T] (error: UtilError) : T = {
    throw new UtilException(error)
  }
} 
Example 12
Source File: StreamStaticDataGenerator.scala    From structured-streaming-application   with Apache License 2.0 5 votes vote down vote up
package knolx.kafka

import java.util.Properties

import akka.actor.ActorSystem
import knolx.Config.{bootstrapServer, topic}
import knolx.KnolXLogger
import knolx.spark.Stock
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization
import org.json4s.jackson.Serialization.write

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.DurationInt
import scala.util.Random


object StreamStaticDataGenerator extends App with KnolXLogger {
  val system = ActorSystem("DataStreamer")
  val props = new Properties()
  props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer)
  props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer].getName)
  props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer].getName)

  val producer = new KafkaProducer[String, String](props)

  val companyNames = List("kirloskar", "bajaj", "amul", "dlf", "ebay")
  val orderTypes = List("buy", "sell")
  val numberOfSharesList = List(1, 2, 3, 4, 5, 6, 7, 8, 9)

  implicit val formats = Serialization.formats(NoTypeHints)
  info("Streaming data into Kafka...")
  system.scheduler.schedule(0 seconds, 5 seconds) {
    companyNames.foreach { name =>
      val stock = Stock(name, Random.shuffle(numberOfSharesList).head, Random.shuffle(orderTypes).head)
      producer.send(new ProducerRecord[String, String](topic, write(stock)))
    }
  }
} 
Example 13
Source File: StreamStreamDataGenerator.scala    From structured-streaming-application   with Apache License 2.0 5 votes vote down vote up
package knolx.kafka

import java.util.Properties

import akka.actor.ActorSystem
import knolx.Config._
import knolx.KnolXLogger
import knolx.spark.Stock
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization
import org.json4s.jackson.Serialization.write

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.DurationInt
import scala.util.Random


object StreamStreamDataGenerator extends App with KnolXLogger {
  val system = ActorSystem("DataStreamer")
  val props = new Properties()
  props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer)
  props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer].getName)
  props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer].getName)

  val producer = new KafkaProducer[String, String](props)

  val companyNames = List("kirloskar", "bajaj", "amul", "dlf", "ebay")
  val orderTypes = List("buy", "sell")
  val numberOfSharesList = List(1, 2, 3, 4, 5, 6, 7, 8, 9)
  val randomCompanyNames = Random.shuffle(companyNames).drop(Random.shuffle((1 to 3).toList).head)

  implicit val formats = Serialization.formats(NoTypeHints)

  info("Streaming companies listed into Kafka...")
  system.scheduler.schedule(0 seconds, 20 seconds) {
    randomCompanyNames.foreach { name =>
      producer.send(new ProducerRecord[String, String](companiesTopic, name))
    }
  }

  info("Streaming stocks data into Kafka...")
  system.scheduler.schedule(0 seconds, 5 seconds) {
    companyNames.foreach { name =>
      val stock = Stock(name, Random.shuffle(numberOfSharesList).head, Random.shuffle(orderTypes).head)
      producer.send(new ProducerRecord[String, String](stocksTopic, write(stock)))
    }
  }
} 
Example 14
Source File: Slack.scala    From amadou   with Apache License 2.0 5 votes vote down vote up
package com.mediative.amadou

import org.apache.http.client.methods.HttpPost
import org.apache.http.entity.{ContentType, StringEntity}
import org.apache.http.impl.client.HttpClients
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization
import org.json4s.jackson.Serialization.{write}

object Slack {
  case class PostException(msg: String) extends RuntimeException(msg)

  case class Payload(
      channel: String,
      text: String,
      username: String,
      icon_emoji: String,
      link_names: Boolean)
}


  def post(message: String, icon: String = this.icon): Unit = {
    val payload = Payload(channel, message, user, icon, true)
    logger.info(s"Posting $payload to $url")

    val client        = HttpClients.createDefault()
    val requestEntity = new StringEntity(write(payload), ContentType.APPLICATION_JSON)
    val postMethod    = new HttpPost(url)
    postMethod.setEntity(requestEntity)

    val response = client.execute(postMethod)
    client.close()
    val status = response.getStatusLine
    if (status.getStatusCode != 200)
      throw PostException(
        s"$url replied with status ${status.getStatusCode}: ${status.getReasonPhrase}")
  }
} 
Example 15
Source File: ApiHandler.scala    From avoin-voitto   with MIT License 5 votes vote down vote up
package liigavoitto.api

import akka.actor.ActorSystem
import liigavoitto.fetch.{ScoresFetcher, ScoresFromFilesFetcher}
import liigavoitto.journalist.LiigaJournalist
import liigavoitto.scores.ScoresApiClient
import liigavoitto.util.DateTimeNoMillisSerializer
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization
import org.json4s.jackson.Serialization.write

class ApiHandler(implicit val system: ActorSystem) {
  implicit val formats = Serialization.formats(NoTypeHints) + DateTimeNoMillisSerializer

  val api = new ScoresApiClient()

  def report(matchId: String, lang: String) = {
    val fetcher = new ScoresFetcher(matchId, api)
    val article = LiigaJournalist.createArticle(fetcher.getEnrichedMatchData, lang)
    write(article)
  }

  def localReport(matchId: String, lang: String) = {
    val fetcher = new ScoresFromFilesFetcher(matchId)
    val article = LiigaJournalist.createArticle(fetcher.getEnrichedMatchData, lang)
    write(article)
  }
} 
Example 16
Source File: JavaCollectionSerializer.scala    From Linkis   with Apache License 2.0 5 votes vote down vote up
package com.webank.wedatasphere.linkis.rpc.transform

import com.webank.wedatasphere.linkis.server.BDPJettyServerHelper
import org.json4s.{CustomSerializer, JArray, JObject}
import org.json4s.jackson.Serialization.write
import org.json4s.jackson.JsonMethods.parse

//TODO is now only the simplest implementation, and there is a need to optimize it later.(TODO 现在只做最简单的实现,后续有需要再优化)

object JavaCollectionSerializer extends CustomSerializer[java.util.List[_]](implicit formats => ( {
  case j: JArray=> BDPJettyServerHelper.gson.fromJson(write(j), classOf[java.util.List[_]])
}, {
  case list: java.util.List[_] => parse(BDPJettyServerHelper.gson.toJson(list))
}
)
)

object JavaMapSerializer extends CustomSerializer[java.util.Map[_, _]](implicit formats => ( {
  case j: JObject => BDPJettyServerHelper.gson.fromJson(write(j), classOf[java.util.Map[_, _]])
}, {
  case map: java.util.Map[_, _] => parse(BDPJettyServerHelper.gson.toJson(map))
}
)
) 
Example 17
Source File: NotifyRMEventPublisher.scala    From Linkis   with Apache License 2.0 5 votes vote down vote up
package com.webank.wedatasphere.linkis.resourcemanager.notify

import java.util.concurrent.ScheduledThreadPoolExecutor

import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
import com.webank.wedatasphere.linkis.resourcemanager.event.notify._
import org.apache.zookeeper.ZooDefs.Ids
import org.apache.zookeeper._
import org.json4s._
import org.json4s.jackson.Serialization.{read, write}

import scala.collection.JavaConversions._


class NotifyRMEventPublisher(val topic: String, val zk: ZooKeeper) extends TopicPublisher[NotifyRMEvent] with Logging {

  implicit val formats = DefaultFormats + NotifyRMEventSerializer
  val historyRoot = "/dwc_events_history"
  val historyScheduler = new ScheduledThreadPoolExecutor(1)

  try {
    if (zk.exists("/" + topic, false) == null) zk.create("/" + topic, new Array[Byte](0), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT)
    if (zk.exists(historyRoot, false) == null) zk.create(historyRoot, new Array[Byte](0), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT)
    if (zk.exists(historyRoot + "/" + topic, false) == null) zk.create(historyRoot + "/" + topic, new Array[Byte](0), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT)
  } catch {
    case e: KeeperException => error(s"Failed to create topic[$topic]: ", e)
  }


  def publish(event: NotifyRMEvent): Unit = {
    val moduleScope = event.moduleName + "_" + event.eventScope.toString
    val path = "/" + topic + "/" + moduleScope
    val historyPath = historyRoot + path
    if (zk.exists(path, false) == null) {
      zk.create(path, serialize(event), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT)
      zk.create(historyPath, new Array[Byte](0), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT)
    } else {
      // should merge with old event
      val oldEvent = deserialize(zk.getData(path, false, null))
      zk.setData(path, serialize(event.merge(oldEvent)), -1)
    }

    recordHistory(event, historyPath)
  }

  def recordHistory(event: NotifyRMEvent, historyPath: String) = {
    info(Thread.currentThread() + "start to record event to history async")
    historyScheduler.submit(new Runnable {
      override def run(): Unit = Utils.tryAndError({
        event match {
          //for special events, don't record
          case moduleUnregisterEvent: ModuleUnregisterEvent =>
            zk.getChildren(historyPath, false).foreach { eventIndex =>
              deserialize(zk.getData(historyPath + "/" + eventIndex, false, null)) match {
                case e: ModuleInstanceEvent if e.moduleInstance.equals(moduleUnregisterEvent.moduleInstance) =>
                  zk.delete(historyPath + "/" + eventIndex, -1)
                case _ =>
              }
            }
          case userReleasedEvent: UserReleasedEvent => deleteByTicketId(userReleasedEvent.ticketId)
          case clearPrdUsedEvent: ClearPrdUsedEvent => deleteByTicketId(clearPrdUsedEvent.ticketId)
          case clearUsedEvent: ClearUsedEvent => deleteByTicketId(clearUsedEvent.ticketId)
          //for normal events, do record
          case ticketIdEvent: TicketIdEvent => zk.create(historyPath + "/" + ticketIdEvent.ticketId, serialize(event), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT_SEQUENTIAL)
          case _ => zk.create(historyPath + "/event", serialize(event), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT_SEQUENTIAL)
        }
        info(Thread.currentThread() + "finished record event to history async")
      })

      def deleteByTicketId(ticketId: String) = zk.getChildren(historyPath, false).foreach { eventIndex =>
        if (eventIndex.startsWith(ticketId)) zk.delete(historyPath + "/" + eventIndex, -1)
      }
    })
  }

  def serialize(event: NotifyRMEvent): Array[Byte] = {
    val serialized = write(event).getBytes
    info(Thread.currentThread() + "Serialized event, ready to publish: " + serialized)
    serialized
  }

  private def deserialize(bytes: Array[Byte]) = read[NotifyRMEvent](new String(bytes))

  def remove(event: NotifyRMEvent) = ZKUtil.deleteRecursive(zk, "/" + topic + "/" + event.moduleName + "_" + event.eventScope.toString)
}

object NotifyRMEventPublisher {
  def apply(topic: String, zk: ZooKeeper): NotifyRMEventPublisher = new NotifyRMEventPublisher(topic, zk)

  def apply(topic: String): NotifyRMEventPublisher = new NotifyRMEventPublisher(topic, ZookeeperUtils.getOrCreateZookeeper())
}