com.fasterxml.jackson.dataformat.yaml.YAMLFactory Scala Examples

The following examples show how to use com.fasterxml.jackson.dataformat.yaml.YAMLFactory. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: AppConfig.scala    From odsc-east-realish-predictions   with Apache License 2.0 5 votes vote down vote up
package com.twilio.open.odsc.realish.config

import java.io.File

import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory
import com.fasterxml.jackson.module.scala.DefaultScalaModule

object AppConfig {
  private val mapper = new ObjectMapper(new YAMLFactory)
  mapper.registerModule(DefaultScalaModule)

  def parse(configPath: String): AppConfig = {
    mapper.readValue(new File(configPath), classOf[AppConfig])
  }

}

@SerialVersionUID(100L)
case class AppConfig(
  sparkAppConfig: SparkAppConfig,
  streamingQueryConfig: StreamingQueryConfig
) extends Serializable

@SerialVersionUID(100L)
case class SparkAppConfig(
  appName: String,
  core: Map[String, String]
) extends Serializable

trait KafkaConsumerConfig {
  val topic: String
  val subscriptionType: String
  val conf: Map[String, String]
}

@SerialVersionUID(100L)
case class ConsumerConfig(
  topic: String,
  subscriptionType: String,
  conf: Map[String, String]
) extends KafkaConsumerConfig with Serializable

@SerialVersionUID(100L)
case class StreamingQueryConfig(
  streamName: String,
  triggerInterval: String,
  triggerEnabled: Boolean,
  windowInterval: String,
  watermarkInterval: String
) extends Serializable 
Example 2
Source File: YamlProjectOperationInfoParser.scala    From rug   with GNU General Public License v3.0 5 votes vote down vote up
package com.atomist.project.common.yaml

import java.util.regex.{Pattern, PatternSyntaxException}

import com.atomist.param._
import com.atomist.project.common.template.{InvalidTemplateException, TemplateBasedProjectOperationInfo}
import com.fasterxml.jackson.annotation.JsonProperty
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper
import org.apache.commons.lang3.builder.ReflectionToStringBuilder

import scala.util.{Failure, Success, Try}


object YamlProjectOperationInfoParser {

  private val mapper = new ObjectMapper(new YAMLFactory()) with ScalaObjectMapper
  mapper.registerModule(DefaultScalaModule)

  @throws[InvalidYamlDescriptorException]
  def parse(yaml: String): TemplateBasedProjectOperationInfo = {
    if (yaml == null || "".equals(yaml))
      throw new InvalidYamlDescriptorException("YAML content required in template metadata file")

    Try(mapper.readValue(yaml, classOf[BoundProjectOperationInfo])) match {
      case s: Success[BoundProjectOperationInfo] =>
        val badPatterns = s.value.parameters.flatMap(p => patternError(p))
        if (badPatterns.nonEmpty)
          throw new InvalidYamlDescriptorException(s"Bad regexp patterns: ${badPatterns.mkString(",")}")
        s.value
      case f: Failure[BoundProjectOperationInfo] =>
        throw new InvalidYamlDescriptorException(s"Failed to parse YAML [$yaml]: ${f.exception.getMessage}", f.exception)
    }
  }

  private def patternError(p: Parameter): Option[String] = {
    try {
      Pattern.compile(p.getPattern)
      None
    } catch {
      case pse: PatternSyntaxException => Some(s"${p.getName}: Bad regular expression pattern: ${pse.getMessage}")
    }
  }
}

private class BoundProjectOperationInfo extends TemplateBasedProjectOperationInfo {

  @JsonProperty("name")
  var name: String = _

  @JsonProperty("description")
  var description: String = _

  @JsonProperty("template_name")
  var templateName: String = _

  @JsonProperty("type")
  var _templateType: String = _

  override def templateType: Option[String] =
    if (_templateType == null || "".equals(_templateType)) None
    else Some(_templateType)

  @JsonProperty("parameters")
  private var _params: Seq[Parameter] = Nil

  @JsonProperty("tags")
  private var _tags: Seq[TagHolder] = Nil

  override def parameters: Seq[Parameter] = _params

  override def tags: Seq[Tag] = _tags.map(tw => tw.toTag)

  override def toString = ReflectionToStringBuilder.toString(this)
}

private class TagHolder {

  @JsonProperty
  var name: String = _

  @JsonProperty
  var description: String = _

  def toTag = Tag(name, description)
}

class InvalidYamlDescriptorException(msg: String, ex: Throwable = null) extends InvalidTemplateException(msg, ex) 
Example 3
Source File: SwaggerAPI.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.schema

import java.io.InputStream

import com.fasterxml.jackson.annotation.{JsonCreator, JsonProperty}
import com.fasterxml.jackson.core.JsonFactory
import com.fasterxml.jackson.databind.annotation.JsonDeserialize
import com.fasterxml.jackson.databind.{DeserializationFeature, JsonNode, MappingJsonFactory, ObjectMapper}
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import de.leanovate.swaggercheck.schema.jackson.JsonSchemaModule
import de.leanovate.swaggercheck.schema.model.{Definition, Parameter}

import scala.collection.JavaConverters._
import scala.io.Source

@JsonDeserialize(builder = classOf[SwaggerAPIBuilder])
case class SwaggerAPI(
                       basePath: Option[String],
                       paths: Map[String, Map[String, Operation]],
                       definitions: Map[String, Definition]
                     )

object SwaggerAPI {
  val jsonMapper = objectMapper(new MappingJsonFactory())
  val yamlMapper = objectMapper(new YAMLFactory())

  def parse(jsonOrYaml: String): SwaggerAPI = {
    val mapper = if (jsonOrYaml.trim().startsWith("{")) jsonMapper else yamlMapper
    mapper.readValue(jsonOrYaml, classOf[SwaggerAPI])
  }

  def parse(swaggerInput: InputStream): SwaggerAPI = {
    parse(Source.fromInputStream(swaggerInput).mkString)
  }

  def objectMapper(jsonFactory: JsonFactory): ObjectMapper = {
    val mapper = new ObjectMapper(jsonFactory)
    mapper.registerModule(DefaultScalaModule)
    mapper.registerModule(JsonSchemaModule)
    mapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES)
    mapper
  }
}

class SwaggerAPIBuilder @JsonCreator()(
                                        @JsonProperty("basePath") basePath: Option[String],
                                        @JsonProperty("consumes") consumes: Option[Seq[String]],
                                        @JsonProperty("produces") produces: Option[Seq[String]],
                                        @JsonProperty("paths") paths: Option[Map[String, JsonNode]],
                                        @JsonProperty("definitions") definitions: Option[Map[String, Definition]],
                                        @JsonProperty("parameters") globalParameters: Option[Map[String, Parameter]]
                                      ) {
  def build(): SwaggerAPI = {
    val defaultConsumes = consumes.map(_.toSet).getOrElse(Set.empty)
    val defaultProduces = produces.map(_.toSet).getOrElse(Set.empty)
    SwaggerAPI(basePath,
      paths.getOrElse(Map.empty).map {
        case (path, pathDefinition) =>
          val defaultParameters = Option(pathDefinition.get("parameters")).map {
             node =>
               node.iterator().asScala.map {
                 element => SwaggerAPI.jsonMapper.treeToValue(element, classOf[OperationParameter])
               }.toSeq
          }.getOrElse(Seq.empty)

          basePath.map(_ + path).getOrElse(path) -> pathDefinition.fields().asScala.filter(_.getKey != "parameters").map {
            entry =>
              val operation = SwaggerAPI.jsonMapper.treeToValue(entry.getValue, classOf[Operation])
              entry.getKey.toUpperCase -> operation.withDefaults(defaultParameters, defaultConsumes, defaultProduces).resolveGlobalParameters(globalParameters.getOrElse(Map()))
          }.toMap
      },
      definitions.getOrElse(Map.empty))
  }
} 
Example 4
Source File: AppConfig.scala    From spark-summit-2018   with GNU General Public License v3.0 5 votes vote down vote up
package com.twilio.open.streaming.trend.discovery.config

import java.io.File

import com.fasterxml.jackson.annotation.JsonProperty
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory
import com.fasterxml.jackson.module.scala.DefaultScalaModule

sealed trait Configuration extends Product with Serializable

@SerialVersionUID(101L)
case class AppConfiguration(
  @JsonProperty appName: String,
  @JsonProperty triggerInterval: String, // "30 seconds", "1 minute"
  @JsonProperty outputMode: String,
  @JsonProperty checkpointPath: String,
  @JsonProperty("windowInterval") windowIntervalMinutes: Long,
  @JsonProperty("watermarkInterval") watermarkIntervalMinutes: Long,
  @JsonProperty("core") sparkCoreConfig: Map[String, String],
  @JsonProperty callEventsTopic: KafkaReaderOrWriterConfig)
  extends Configuration with Serializable

trait KafkaConfig {
  val topic: String
  val subscriptionType: String
  val conf: Map[String, String]
}

case class KafkaReaderOrWriterConfig(
  override val topic: String,
  override val subscriptionType: String,
  override val conf: Map[String, String]) extends KafkaConfig with Serializable

object AppConfig {

  private val mapper = new ObjectMapper(new YAMLFactory)
  mapper.registerModule(DefaultScalaModule)

  @volatile
  private var config: AppConfiguration = _

  def apply(resourcePath: String): AppConfiguration = {
    if (config == null)
      synchronized {
        if (config == null)
          config = parse(resourcePath)
      }
    config
  }

  private def parse(resourcePath: String): AppConfiguration = {
    mapper.readValue(new File(resourcePath), classOf[AppConfiguration])
  }

} 
Example 5
Source File: CirceYaml.scala    From bazel-deps   with MIT License 5 votes vote down vote up
package com.github.johnynek.bazel_deps

import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory
import io.circe.jackson.CirceJsonModule
import io.circe.{Decoder, Json, ParsingFailure, Parser}
import scala.util.control.NonFatal


object Yaml extends Parser {
  private[this] val mapper = new ObjectMapper(new YAMLFactory()).registerModule(CirceJsonModule)
  private[this] val factory = mapper.getFactory
  override def parse(input: String): Either[ParsingFailure, Json] =
    try {
      Right(mapper.readValue(factory.createParser(input), classOf[Json]))
    } catch {
      case NonFatal(error) => Left(ParsingFailure(error.getMessage, error))
    }
} 
Example 6
Source File: WebappTestSupports.scala    From pizza-auth-3   with MIT License 5 votes vote down vote up
package moe.pizza.auth.webapp

import java.net.{Socket, InetSocketAddress, ServerSocket}

import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import moe.pizza.auth.config.ConfigFile.ConfigFile

import scala.concurrent.{Future, Await}
import scala.io.Source
import scala.util.Try
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global


object WebappTestSupports {
  val OM = new ObjectMapper(new YAMLFactory())
  OM.registerModule(DefaultScalaModule)

  def readTestConfig(): ConfigFile = {
    val config = Source
      .fromURL(getClass.getResource("/config.yml"))
      .getLines()
      .mkString("\n")
    val conf = OM.readValue[ConfigFile](config, classOf[ConfigFile])
    conf
  }

} 
Example 7
Source File: AppConfig.scala    From odsc-west-streaming-trends   with GNU General Public License v3.0 5 votes vote down vote up
package com.twilio.open.streaming.trend.discovery.config

import java.io.File

import com.fasterxml.jackson.annotation.JsonProperty
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory
import com.fasterxml.jackson.module.scala.DefaultScalaModule

sealed trait Configuration extends Product with Serializable

@SerialVersionUID(101L)
case class AppConfiguration(
  @JsonProperty appName: String,
  @JsonProperty triggerInterval: String, // "30 seconds", "1 minute"
  @JsonProperty outputMode: String,
  @JsonProperty checkpointPath: String,
  @JsonProperty("windowInterval") windowIntervalMinutes: Long,
  @JsonProperty("watermarkInterval") watermarkIntervalMinutes: Long,
  @JsonProperty("core") sparkCoreConfig: Map[String, String],
  @JsonProperty callEventsTopic: KafkaReaderOrWriterConfig)
  extends Configuration with Serializable

trait KafkaConfig {
  val topic: String
  val subscriptionType: String
  val conf: Map[String, String]
}

case class KafkaReaderOrWriterConfig(
  override val topic: String,
  override val subscriptionType: String,
  override val conf: Map[String, String]) extends KafkaConfig with Serializable

object AppConfig {

  private val mapper = new ObjectMapper(new YAMLFactory)
  mapper.registerModule(DefaultScalaModule)

  @volatile
  private var config: AppConfiguration = _

  def apply(resourcePath: String): AppConfiguration = {
    if (config == null)
      synchronized {
        if (config == null)
          config = parse(resourcePath)
      }
    config
  }

  private def parse(resourcePath: String): AppConfiguration = {
    mapper.readValue(new File(resourcePath), classOf[AppConfiguration])
  }

} 
Example 8
Source File: DataLoader.scala    From amaterasu   with Apache License 2.0 5 votes vote down vote up
package org.apache.amaterasu.leader.utilities

import java.io.{File, FileInputStream}
import java.nio.file.{Files, Paths}

import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import org.apache.amaterasu.common.configuration.ClusterConfig
import org.apache.amaterasu.common.dataobjects.{ActionData, ExecData, TaskData}
import org.apache.amaterasu.common.execution.dependencies.{Dependencies, PythonDependencies}
import org.apache.amaterasu.common.logging.Logging
import org.apache.amaterasu.common.runtime.Environment
import org.apache.mesos.protobuf.ByteString
import org.yaml.snakeyaml.Yaml

import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.io.Source


object DataLoader extends Logging {

  val mapper = new ObjectMapper()
  mapper.registerModule(DefaultScalaModule)

  val ymlMapper = new ObjectMapper(new YAMLFactory())
  ymlMapper.registerModule(DefaultScalaModule)

  def getTaskData(actionData: ActionData, env: String): ByteString = {

    val srcFile = actionData.src
    val src = Source.fromFile(s"repo/src/$srcFile").mkString
    val envValue = Source.fromFile(s"repo/env/$env/job.yml").mkString

    val envData = ymlMapper.readValue(envValue, classOf[Environment])

    val data = mapper.writeValueAsBytes(TaskData(src, envData, actionData.groupId, actionData.typeId, actionData.exports))
    ByteString.copyFrom(data)

  }

  def getExecutorData(env: String, clusterConf: ClusterConfig): ByteString = {

    // loading the job configuration
    val envValue = Source.fromFile(s"repo/env/$env/job.yml").mkString //TODO: change this to YAML
    val envData = ymlMapper.readValue(envValue, classOf[Environment])
    // loading all additional configurations
    val files = new File(s"repo/env/$env/").listFiles().filter(_.isFile).filter(_.getName != "job.yml")
    val config = files.map(yamlToMap).toMap
    // loading the job's dependencies
    var depsData: Dependencies = null
    var pyDepsData: PythonDependencies = null
    if (Files.exists(Paths.get("repo/deps/jars.yml"))) {
      val depsValue = Source.fromFile(s"repo/deps/jars.yml").mkString
      depsData = ymlMapper.readValue(depsValue, classOf[Dependencies])
    }
    if (Files.exists(Paths.get("repo/deps/python.yml"))) {
      val pyDepsValue = Source.fromFile(s"repo/deps/python.yml").mkString
      pyDepsData = ymlMapper.readValue(pyDepsValue, classOf[PythonDependencies])
    }
    val data = mapper.writeValueAsBytes(ExecData(envData, depsData, pyDepsData, config))
    ByteString.copyFrom(data)
  }

  def yamlToMap(file: File): (String, Map[String, Any]) = {

    val yaml = new Yaml()
    val conf = yaml.load(new FileInputStream(file)).asInstanceOf[java.util.Map[String, Any]].asScala.toMap

    (file.getName.replace(".yml",""), conf)
  }

}

class ConfMap[String,  T <: ConfMap[String, T]] extends mutable.ListMap[String, Either[String, T]]