com.fasterxml.jackson.core.JsonParseException Scala Examples

The following examples show how to use com.fasterxml.jackson.core.JsonParseException. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: ReplayListenerBus.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.scheduler

import java.io.{InputStream, IOException}

import scala.io.Source

import com.fasterxml.jackson.core.JsonParseException
import org.json4s.jackson.JsonMethods._

import org.apache.spark.internal.Logging
import org.apache.spark.scheduler.ReplayListenerBus._
import org.apache.spark.util.JsonProtocol


  def replay(
      logData: InputStream,
      sourceName: String,
      maybeTruncated: Boolean = false,
      eventsFilter: ReplayEventsFilter = SELECT_ALL_FILTER): Unit = {

    var currentLine: String = null
    var lineNumber: Int = 0

    try {
      val lineEntries = Source.fromInputStream(logData)
        .getLines()
        .zipWithIndex
        .filter { case (line, _) => eventsFilter(line) }

      while (lineEntries.hasNext) {
        try {
          val entry = lineEntries.next()

          currentLine = entry._1
          lineNumber = entry._2 + 1

          postToAll(JsonProtocol.sparkEventFromJson(parse(currentLine)))
        } catch {
          case jpe: JsonParseException =>
            // We can only ignore exception from last line of the file that might be truncated
            // the last entry may not be the very last line in the event log, but we treat it
            // as such in a best effort to replay the given input
            if (!maybeTruncated || lineEntries.hasNext) {
              throw jpe
            } else {
              logWarning(s"Got JsonParseException from log file $sourceName" +
                s" at line $lineNumber, the file might not have finished writing cleanly.")
            }
        }
      }
    } catch {
      case ioe: IOException =>
        throw ioe
      case e: Exception =>
        logError(s"Exception parsing Spark event log: $sourceName", e)
        logError(s"Malformed line #$lineNumber: $currentLine\n")
    }
  }

}


private[spark] object ReplayListenerBus {

  type ReplayEventsFilter = (String) => Boolean

  // utility filter that selects all event logs during replay
  val SELECT_ALL_FILTER: ReplayEventsFilter = { (eventString: String) => true }
} 
Example 2
Source File: S2GraphMutateRoute.scala    From incubator-s2graph   with Apache License 2.0 5 votes vote down vote up
package org.apache.s2graph.http

import akka.http.scaladsl.model.{ContentTypes, HttpEntity, HttpResponse, StatusCodes}
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.{ExceptionHandler, Route}
import com.fasterxml.jackson.core.JsonParseException
import org.apache.s2graph.core.rest.RequestParser
import org.apache.s2graph.core.storage.MutateResponse
import org.apache.s2graph.core.{GraphElement, S2Graph}
import org.slf4j.LoggerFactory
import play.api.libs.json.{JsValue, Json}

import scala.concurrent.{ExecutionContext, Future}

trait S2GraphMutateRoute extends PlayJsonSupport {

  val s2graph: S2Graph
  val logger = LoggerFactory.getLogger(this.getClass)

  lazy val parser = new RequestParser(s2graph)

  lazy val exceptionHandler = ExceptionHandler {
    case ex: JsonParseException => complete(StatusCodes.BadRequest -> ex.getMessage)
    case ex: java.lang.IllegalArgumentException => complete(StatusCodes.BadRequest -> ex.getMessage)
  }

  lazy val mutateVertex = path("vertex" / Segments) { params =>
    implicit val ec = s2graph.ec

    val (operation, serviceNameOpt, columnNameOpt) = params match {
      case operation :: serviceName :: columnName :: Nil => (operation, Option(serviceName), Option(columnName))
      case operation :: Nil => (operation, None, None)
      case _ => throw new RuntimeException("invalid params")
    }

    entity(as[JsValue]) { payload =>
      val future = vertexMutate(payload, operation, serviceNameOpt, columnNameOpt).map(Json.toJson(_))

      complete(future)
    }
  }

  lazy val mutateEdge = path("edge" / Segment) { operation =>
    implicit val ec = s2graph.ec

    entity(as[JsValue]) { payload =>
      val future = edgeMutate(payload, operation, withWait = true).map(Json.toJson(_))

      complete(future)
    }
  }

  def vertexMutate(jsValue: JsValue,
                   operation: String,
                   serviceNameOpt: Option[String] = None,
                   columnNameOpt: Option[String] = None,
                   withWait: Boolean = true)(implicit ec: ExecutionContext): Future[Seq[Boolean]] = {
    val vertices = parser.toVertices(jsValue, operation, serviceNameOpt, columnNameOpt)

    val verticesToStore = vertices.filterNot(_.isAsync)

    s2graph.mutateVertices(verticesToStore, withWait).map(_.map(_.isSuccess))
  }

  def edgeMutate(elementsWithTsv: Seq[(GraphElement, String)], withWait: Boolean)(implicit ec: ExecutionContext): Future[Seq[Boolean]] = {
    val elementWithIdxs = elementsWithTsv.zipWithIndex
    val (elementSync, elementAsync) = elementWithIdxs.partition { case ((element, tsv), idx) => !element.isAsync }

    val retToSkip = elementAsync.map(_._2 -> MutateResponse.Success)
    val (elementsToStore, _) = elementSync.map(_._1).unzip
    val elementsIdxToStore = elementSync.map(_._2)

    s2graph.mutateElements(elementsToStore, withWait).map { mutateResponses =>
      elementsIdxToStore.zip(mutateResponses) ++ retToSkip
    }.map(_.sortBy(_._1).map(_._2.isSuccess))
  }

  def edgeMutate(jsValue: JsValue, operation: String, withWait: Boolean)(implicit ec: ExecutionContext): Future[Seq[Boolean]] = {
    val edgesWithTsv = parser.parseJsonFormat(jsValue, operation)
    edgeMutate(edgesWithTsv, withWait)
  }

  // expose routes
  lazy val mutateRoute: Route =
    post {
      concat(
        handleExceptions(exceptionHandler) {
          mutateVertex
        },
        handleExceptions(exceptionHandler) {
          mutateEdge
        }
      )
    }

} 
Example 3
Source File: JsonRecordFactorySpec.scala    From hydra   with Apache License 2.0 5 votes vote down vote up
package hydra.kafka.producer

import com.fasterxml.jackson.core.JsonParseException
import com.fasterxml.jackson.databind.ObjectMapper
import hydra.core.ingest.HydraRequest
import hydra.core.ingest.RequestParams.{
  HYDRA_KAFKA_TOPIC_PARAM,
  HYDRA_RECORD_KEY_PARAM
}
import hydra.core.protocol.MissingMetadataException
import hydra.core.transport.AckStrategy
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.matchers.should.Matchers
import org.scalatest.funspec.AnyFunSpecLike

import scala.concurrent.ExecutionContext.Implicits.global


class JsonRecordFactorySpec
    extends Matchers
    with AnyFunSpecLike
    with ScalaFutures {

  describe("When using the JsonRecordFactory") {

    it("errors with no topic on the request") {
      val request = HydraRequest("123", """{"name":test"}""")
      val rec = JsonRecordFactory.build(request)
      whenReady(rec.failed)(_ shouldBe an[MissingMetadataException])
    }

    it("handles invalid json") {
      val request = HydraRequest("123", """{"name":test"}""")
        .withMetadata(HYDRA_KAFKA_TOPIC_PARAM -> "test-topic")
      val rec = JsonRecordFactory.build(request)
      whenReady(rec.failed)(_ shouldBe a[JsonParseException])
    }

    it("handles valid json") {
      val request = HydraRequest("123", """{"name":"test"}""")
        .withMetadata(HYDRA_KAFKA_TOPIC_PARAM -> "test-topic")
      val rec = JsonRecordFactory.build(request)
      val node = new ObjectMapper().reader().readTree("""{"name":"test"}""")
      whenReady(rec)(
        _ shouldBe JsonRecord("test-topic", None, node, AckStrategy.NoAck)
      )
    }

    it("builds") {
      val request = HydraRequest("123", """{"name":"test"}""")
        .withMetadata(HYDRA_RECORD_KEY_PARAM -> "{$.name}")
        .withMetadata(HYDRA_KAFKA_TOPIC_PARAM -> "test-topic")
      whenReady(JsonRecordFactory.build(request)) { msg =>
        msg.destination shouldBe "test-topic"
        msg.key shouldBe "test"
        msg.payload shouldBe new ObjectMapper()
          .reader()
          .readTree("""{"name":"test"}""")
      }
    }

    it("throws an error if no topic is in the request") {
      val request = HydraRequest("123", """{"name":"test"}""")
      whenReady(JsonRecordFactory.build(request).failed)(
        _ shouldBe a[MissingMetadataException]
      )
    }
  }
} 
Example 4
Source File: ParseVendorExtensionsTest.scala    From play-swagger   with MIT License 5 votes vote down vote up
package de.zalando.swagger

import java.io.File

import com.fasterxml.jackson.core.JsonParseException
import com.fasterxml.jackson.dataformat.yaml.snakeyaml.parser.ParserException
import de.zalando.apifirst.Application.ApiCall
import de.zalando.apifirst.Http.{GET, POST, PUT}
import org.scalatest.{FunSpec, MustMatchers}

class ParseVendorExtensionsTest extends FunSpec with MustMatchers with ExpectedResults {

  val ok = new File(resourcesPath + "extensions/extensions.ok.yaml")
  val nok = new File(resourcesPath + "extensions/extensions.nok.yaml")
  val hypermediaOk = new File(resourcesPath + "extensions/hypermedia.ok.yaml")
  val hypermediaNOk1 = new File(resourcesPath + "extensions/hypermedia.nok1.yaml")
  val hypermediaNOk2 = new File(resourcesPath + "extensions/hypermedia.nok2.yaml")
  val errorMapping = new File(resourcesPath + "extensions/error_mapping.yaml")

  describe("The swagger parser") {
    it("should read valid vendor extensions") {
      implicit val (uri, swagger) = StrictYamlParser.parse(ok)
      swagger.info.vendorExtensions contains "x-info-extension" mustBe true
      swagger.paths("/").vendorExtensions contains "x-path-extension" mustBe true
      swagger.paths("/").get.vendorExtensions contains "x-operation-extension" mustBe true
      swagger.paths("/").get.responses("200").vendorExtensions contains "x-response-extension" mustBe true
      swagger.tags.head.vendorExtensions contains "x-tag-extension" mustBe true
      swagger.securityDefinitions("internalApiKey").vendorExtensions contains "x-security-extension" mustBe true
    }
    it("should reject invalid vendor extensions") {
      intercept[JsonParseException] {
        StrictYamlParser.parse(nok)
      }.getClass mustBe classOf[JsonParseException]
    }
    it("should read hypermedia definitions") {
      implicit val (uri, swagger) = StrictYamlParser.parse(hypermediaOk)
      val expected = Map("resource created" ->
        Map("resource updated" -> Map("condition" -> "some rule to show the transition"), "subresource added" -> null),
        "resource updated" -> Map("subresource added" -> Map("condition" -> ""),
          "self" -> Map("condition" -> "non-empty rule")), "resource deleted" -> Map("self" -> null),
        "subresource added" -> Map("resource updated" -> null, "self" -> null, "resource deleted" -> null))
      swagger.transitions.nonEmpty mustBe true
      swagger.transitions mustEqual expected
      swagger.paths("/").get.responses("200").targetState mustEqual Some("resource created")
      swagger.paths("/").get.responses("default").targetState mustEqual None
    }
    it("should reject hypermedia definitions without well-formed definition") {
      val exception = intercept[JsonParseException] {
        StrictYamlParser.parse(hypermediaNOk1)
      }
      exception.getMessage mustEqual "Malformed transition definitions"
    }
    it("should reject hypermedia definitions with incorrect initial state") {
      intercept[ParserException] {
        StrictYamlParser.parse(hypermediaNOk2)
      }.getClass mustBe classOf[ParserException]
    }

    it("should read error mappings and assign right preference to them") {
      val (uri, model) = StrictYamlParser.parse(errorMapping)
      val ast = ModelConverter.fromModel(errorMapping.toURI, model, Option(errorMapping))
      val expectedForPUT = Map(
        "404" -> List(classOf[java.util.NoSuchElementException]),
        "403" -> List(classOf[java.lang.SecurityException]),
        "405" -> List(classOf[java.lang.IllegalStateException]),
        "400" -> List(classOf[java.util.NoSuchElementException])
      )
      val expectedForPOST = Map(
        "403" -> List(classOf[java.lang.SecurityException]),
        "404" -> List(classOf[java.util.NoSuchElementException]),
        "405" -> List(classOf[java.lang.IllegalStateException])
      )
      ast.calls.foreach {
        case ApiCall(POST, _, _, _, _, mapping, _, _, _) =>
          mapping must contain theSameElementsAs expectedForPOST
        case ApiCall(PUT, _, _, _, _, mapping, _, _, _) =>
          mapping must contain theSameElementsAs expectedForPUT
      }
    }
  }
} 
Example 5
Source File: ReplayListenerBus.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.scheduler

import java.io.{InputStream, IOException}

import scala.io.Source

import com.fasterxml.jackson.core.JsonParseException
import org.json4s.jackson.JsonMethods._

import org.apache.spark.Logging
import org.apache.spark.util.JsonProtocol


  def replay(
      logData: InputStream,
      sourceName: String,
      maybeTruncated: Boolean = false): Unit = {
    var currentLine: String = null
    var lineNumber: Int = 1
    try {
      val lines = Source.fromInputStream(logData).getLines()
      while (lines.hasNext) {
        currentLine = lines.next()
        try {
          postToAll(JsonProtocol.sparkEventFromJson(parse(currentLine)))
        } catch {
          case jpe: JsonParseException =>
            // We can only ignore exception from last line of the file that might be truncated
            if (!maybeTruncated || lines.hasNext) {
              throw jpe
            } else {
              logWarning(s"Got JsonParseException from log file $sourceName" +
                s" at line $lineNumber, the file might not have finished writing cleanly.")
            }
        }
        lineNumber += 1
      }
    } catch {
      case ioe: IOException =>
        throw ioe
      case e: Exception =>
        logError(s"Exception parsing Spark event log: $sourceName", e)
        logError(s"Malformed line #$lineNumber: $currentLine\n")
    }
  }

} 
Example 6
Source File: ReplayListenerBus.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.scheduler

import java.io.{InputStream, IOException}

import scala.io.Source

import com.fasterxml.jackson.core.JsonParseException
import org.json4s.jackson.JsonMethods._

import org.apache.spark.Logging
import org.apache.spark.util.JsonProtocol


  def replay(
      logData: InputStream,
      sourceName: String,
      maybeTruncated: Boolean = false): Unit = {
    var currentLine: String = null
    var lineNumber: Int = 1
    try {
      val lines = Source.fromInputStream(logData).getLines()
      while (lines.hasNext) {
        currentLine = lines.next()
        try {
          postToAll(JsonProtocol.sparkEventFromJson(parse(currentLine)))
        } catch {
          case jpe: JsonParseException =>
            // We can only ignore exception from last line of the file that might be truncated
            //我们只能忽略可能被截断的文件的最后一行的异常
            if (!maybeTruncated || lines.hasNext) {
              throw jpe
            } else {
              logWarning(s"Got JsonParseException from log file $sourceName" +
                s" at line $lineNumber, the file might not have finished writing cleanly.")
            }
        }
        lineNumber += 1
      }
    } catch {
      case ioe: IOException =>
        throw ioe
      case e: Exception =>
        logError(s"Exception parsing Spark event log: $sourceName", e)
        logError(s"Malformed line #$lineNumber: $currentLine\n")
    }
  }

} 
Example 7
Source File: ReplayListenerBus.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.scheduler

import java.io.{InputStream, IOException}

import scala.io.Source

import com.fasterxml.jackson.core.JsonParseException
import org.json4s.jackson.JsonMethods._

import org.apache.spark.Logging
import org.apache.spark.util.JsonProtocol


  def replay(
      logData: InputStream,
      sourceName: String,
      maybeTruncated: Boolean = false): Unit = {
    var currentLine: String = null
    var lineNumber: Int = 1
    try {
      val lines = Source.fromInputStream(logData).getLines()
      while (lines.hasNext) {
        currentLine = lines.next()
        try {
          postToAll(JsonProtocol.sparkEventFromJson(parse(currentLine)))
        } catch {
          case jpe: JsonParseException =>
            // We can only ignore exception from last line of the file that might be truncated
            if (!maybeTruncated || lines.hasNext) {
              throw jpe
            } else {
              logWarning(s"Got JsonParseException from log file $sourceName" +
                s" at line $lineNumber, the file might not have finished writing cleanly.")
            }
        }
        lineNumber += 1
      }
    } catch {
      case ioe: IOException =>
        throw ioe
      case e: Exception =>
        logError(s"Exception parsing Spark event log: $sourceName", e)
        logError(s"Malformed line #$lineNumber: $currentLine\n")
    }
  }

} 
Example 8
Source File: ParseVendorExtensionsTest.scala    From api-first-hand   with MIT License 5 votes vote down vote up
package de.zalando.swagger

import java.io.File

import com.fasterxml.jackson.core.JsonParseException
import com.fasterxml.jackson.dataformat.yaml.snakeyaml.parser.ParserException
import de.zalando.apifirst.Application.ApiCall
import de.zalando.apifirst.Http.{ GET, POST, PUT }
import org.scalatest.{ FunSpec, MustMatchers }

class ParseVendorExtensionsTest extends FunSpec with MustMatchers with ExpectedResults {

  val ok = new File(resourcesPath + "extensions/extensions.ok.yaml")
  val nok = new File(resourcesPath + "extensions/extensions.nok.yaml")
  val hypermediaOk = new File(resourcesPath + "extensions/hypermedia.ok.yaml")
  val hypermediaNOk1 = new File(resourcesPath + "extensions/hypermedia.nok1.yaml")
  val hypermediaNOk2 = new File(resourcesPath + "extensions/hypermedia.nok2.yaml")
  val errorMapping = new File(resourcesPath + "extensions/error_mapping.yaml")

  describe("The swagger parser") {
    it("should read valid vendor extensions") {
      implicit val (uri, swagger) = StrictYamlParser.parse(ok)
      swagger.info.vendorExtensions contains "x-info-extension" mustBe true
      swagger.paths("/").vendorExtensions contains "x-path-extension" mustBe true
      swagger.paths("/").get.vendorExtensions contains "x-operation-extension" mustBe true
      swagger.paths("/").get.responses("200").vendorExtensions contains "x-response-extension" mustBe true
      swagger.tags.head.vendorExtensions contains "x-tag-extension" mustBe true
      swagger.securityDefinitions("internalApiKey").vendorExtensions contains "x-security-extension" mustBe true
    }
    it("should read hypermedia definitions") {
      implicit val (uri, swagger) = StrictYamlParser.parse(hypermediaOk)
      val expected = Map(
        "resource created" ->
          Map("resource updated" -> Map("condition" -> "some rule to show the transition"), "subresource added" -> null),
        "resource updated" -> Map(
          "subresource added" -> Map("condition" -> ""),
          "self" -> Map("condition" -> "non-empty rule")
        ), "resource deleted" -> Map("self" -> null),
        "subresource added" -> Map("resource updated" -> null, "self" -> null, "resource deleted" -> null)
      )
      swagger.transitions.nonEmpty mustBe true
      swagger.transitions mustEqual expected
      swagger.paths("/").get.responses("200").targetState mustEqual Some("resource created")
      swagger.paths("/").get.responses("default").targetState mustEqual None
    }
    it("should reject hypermedia definitions without well-formed definition") {
      val exception = intercept[JsonParseException] {
        StrictYamlParser.parse(hypermediaNOk1)
      }
      exception.getMessage mustEqual "Malformed transition definitions"
    }
    it("should reject hypermedia definitions with incorrect initial state") {
      intercept[ParserException] {
        StrictYamlParser.parse(hypermediaNOk2)
      }.getClass mustBe classOf[ParserException]
    }

    it("should read error mappings and assign right preference to them") {
      val (uri, model) = StrictYamlParser.parse(errorMapping)
      val ast = ModelConverter.fromModel(errorMapping.toURI, model, Option(errorMapping))
      val expectedForPUT = Map(
        "404" -> List(classOf[java.util.NoSuchElementException]),
        "403" -> List(classOf[java.lang.SecurityException]),
        "405" -> List(classOf[java.lang.IllegalStateException]),
        "400" -> List(classOf[java.util.NoSuchElementException])
      )
      val expectedForPOST = Map(
        "403" -> List(classOf[java.lang.SecurityException]),
        "404" -> List(classOf[java.util.NoSuchElementException]),
        "405" -> List(classOf[java.lang.IllegalStateException])
      )
      ast.calls.foreach {
        case ApiCall(POST, _, _, _, _, mapping, _, _, _) =>
          mapping must contain theSameElementsAs expectedForPOST
        case ApiCall(PUT, _, _, _, _, mapping, _, _, _) =>
          mapping must contain theSameElementsAs expectedForPUT
      }
    }
  }
} 
Example 9
Source File: KafkaJsonConsumer.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.lib

import java.util.Properties

import com.fasterxml.jackson.core.JsonParseException
import kafka.consumer._
import kafka.serializer.{Decoder, DefaultDecoder}
import org.json4s.JsonAST.{JNothing, JValue}
import org.json4s.jackson.JsonMethods._

object KafkaJsonConsumer {
	def apply() = new KafkaJsonConsumer(JsonDecoder)
	def apply(decoder: Decoder[JValue]) = new KafkaJsonConsumer(decoder)
}

class KafkaJsonConsumer(decoder: Decoder[JValue]) {
	def stream(topic: String, properties: Properties): KafkaJsonStream = {
		val connection = Consumer.create(new ConsumerConfig(properties))
		val stream = connection.createMessageStreamsByFilter(
			Whitelist(topic), 1, new DefaultDecoder, decoder)(0)
		new KafkaJsonStream(connection, stream)
	}
}

class KafkaJsonStream(connection: ConsumerConnector, stream: KafkaStream[Array[Byte], JValue]) {
	private lazy val it = stream.iterator

	// this method relies on a timeout value having been set
	@inline def hasNextInTime: Boolean =
		try {
			it.hasNext
		} catch {
			case cte: ConsumerTimeoutException => false
		}

	@inline def next: JValue = it.next.message
	@inline def commitOffsets = connection.commitOffsets
}

object JsonDecoder extends Decoder[JValue] {
	val encoding = "UTF8"

	override def fromBytes(bytes: Array[Byte]): JValue = {
		val s = new String(bytes, encoding)
		try {
			parse(s)
		} catch {
			case jpe: JsonParseException => JNothing
		}
	}
}