java.nio.charset.StandardCharsets Scala Examples

The following examples show how to use java.nio.charset.StandardCharsets. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: package.scala    From franklin   with Apache License 2.0 6 votes vote down vote up
package com.azavea.franklin.api

import com.azavea.franklin.api.commands.ApiConfig
import com.azavea.stac4s._
import eu.timepit.refined.types.string.NonEmptyString

import java.net.URLEncoder
import java.nio.charset.StandardCharsets

package object implicits {

  implicit class combineNonEmptyString(s: NonEmptyString) {

    // Combining a non-empty string should always return a non-empty string
    def +(otherString: String): NonEmptyString =
      NonEmptyString.unsafeFrom(s.value.concat(otherString))
  }

  implicit class StacItemWithCog(item: StacItem) {

    def updateLinksWithHost(apiConfig: ApiConfig) = {
      val updatedLinks = item.links.map(_.addServerHost(apiConfig))
      val updatedAssets = item.assets.mapValues { asset =>
        asset.href.startsWith("/") match {
          case true => asset.copy(href = s"${apiConfig.apiHost}${asset.href}")
          case _    => asset
        }
      }
      item.copy(links = updatedLinks, assets = updatedAssets)
    }

    def addTilesLink(apiHost: String, collectionId: String, itemId: String) = {
      val cogAsset = item.assets.values.exists { asset =>
        asset._type match {
          case Some(`image/cog`) => true
          case _                 => false
        }
      }
      val updatedLinks = cogAsset match {
        case true => {
          val encodedItemId       = URLEncoder.encode(itemId, StandardCharsets.UTF_8.toString)
          val encodedCollectionId = URLEncoder.encode(collectionId, StandardCharsets.UTF_8.toString)
          val tileLink: StacLink = StacLink(
            s"$apiHost/collections/$encodedCollectionId/items/$encodedItemId/tiles",
            StacLinkType.VendorLinkType("tiles"),
            Some(`application/json`),
            Some("Tile URLs for Item")
          )
          tileLink :: item.links
        }
        case _ => item.links
      }
      (item.copy(links = updatedLinks))
    }

  }

  implicit class UpdatedStacLink(link: StacLink) {

    def addServerHost(apiConfig: ApiConfig) = {
      link.href.startsWith("/") match {
        case true => link.copy(href = s"${apiConfig.apiHost}${link.href}")
        case _    => link
      }
    }
  }

  implicit class StacCollectionWithTiles(collection: StacCollection) {

    def addTilesLink(apiHost: String): StacCollection = {
      val encodedCollectionId = URLEncoder.encode(collection.id, StandardCharsets.UTF_8.toString)
      val tileLink = StacLink(
        s"$apiHost/collections/$encodedCollectionId/tiles",
        StacLinkType.VendorLinkType("tiles"),
        Some(`application/json`),
        Some("Tile URLs for Collection")
      )
      collection.copy(links = tileLink :: collection.links)
    }

    def maybeAddTilesLink(enableTiles: Boolean, apiHost: String) =
      if (enableTiles) addTilesLink(apiHost) else collection

    def updateLinksWithHost(apiConfig: ApiConfig) = {
      val updatedLinks = collection.links.map(_.addServerHost(apiConfig))
      collection.copy(links = updatedLinks)
    }
  }
} 
Example 2
Source File: KeyUtils.scala    From daml   with Apache License 2.0 6 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.jwt

import java.io.{File, FileInputStream}
import java.nio.charset.StandardCharsets
import java.nio.file.Files
import java.security.cert.CertificateFactory
import java.security.interfaces.{ECPublicKey, RSAPrivateKey, RSAPublicKey}
import java.security.spec.PKCS8EncodedKeySpec
import java.security.KeyFactory

import com.daml.lf.data.TryOps.Bracket.bracket
import scalaz.Show
import scalaz.syntax.show._

import scala.util.Try

object KeyUtils {
  final case class Error(what: Symbol, message: String)

  object Error {
    implicit val showInstance: Show[Error] =
      Show.shows(e => s"KeyUtils.Error: ${e.what}, ${e.message}")
  }

  private val mimeCharSet = StandardCharsets.ISO_8859_1

  
  def generateJwks(keys: Map[String, RSAPublicKey]): String = {
    def generateKeyEntry(keyId: String, key: RSAPublicKey): String =
      s"""    {
         |      "kid": "$keyId",
         |      "kty": "RSA",
         |      "alg": "RS256",
         |      "use": "sig",
         |      "e": "${java.util.Base64.getUrlEncoder
           .encodeToString(key.getPublicExponent.toByteArray)}",
         |      "n": "${java.util.Base64.getUrlEncoder.encodeToString(key.getModulus.toByteArray)}"
         |    }""".stripMargin

    s"""
       |{
       |  "keys": [
       |${keys.toList.map { case (keyId, key) => generateKeyEntry(keyId, key) }.mkString(",\n")}
       |  ]
       |}
    """.stripMargin
  }
} 
Example 3
Source File: JacksonMessageWriter.scala    From drizzle-spark   with Apache License 2.0 6 votes vote down vote up
package org.apache.spark.status.api.v1

import java.io.OutputStream
import java.lang.annotation.Annotation
import java.lang.reflect.Type
import java.nio.charset.StandardCharsets
import java.text.SimpleDateFormat
import java.util.{Calendar, SimpleTimeZone}
import javax.ws.rs.Produces
import javax.ws.rs.core.{MediaType, MultivaluedMap}
import javax.ws.rs.ext.{MessageBodyWriter, Provider}

import com.fasterxml.jackson.annotation.JsonInclude
import com.fasterxml.jackson.databind.{ObjectMapper, SerializationFeature}


@Provider
@Produces(Array(MediaType.APPLICATION_JSON))
private[v1] class JacksonMessageWriter extends MessageBodyWriter[Object]{

  val mapper = new ObjectMapper() {
    override def writeValueAsString(t: Any): String = {
      super.writeValueAsString(t)
    }
  }
  mapper.registerModule(com.fasterxml.jackson.module.scala.DefaultScalaModule)
  mapper.enable(SerializationFeature.INDENT_OUTPUT)
  mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL)
  mapper.setDateFormat(JacksonMessageWriter.makeISODateFormat)

  override def isWriteable(
      aClass: Class[_],
      `type`: Type,
      annotations: Array[Annotation],
      mediaType: MediaType): Boolean = {
      true
  }

  override def writeTo(
      t: Object,
      aClass: Class[_],
      `type`: Type,
      annotations: Array[Annotation],
      mediaType: MediaType,
      multivaluedMap: MultivaluedMap[String, AnyRef],
      outputStream: OutputStream): Unit = {
    t match {
      case ErrorWrapper(err) => outputStream.write(err.getBytes(StandardCharsets.UTF_8))
      case _ => mapper.writeValue(outputStream, t)
    }
  }

  override def getSize(
      t: Object,
      aClass: Class[_],
      `type`: Type,
      annotations: Array[Annotation],
      mediaType: MediaType): Long = {
    -1L
  }
}

private[spark] object JacksonMessageWriter {
  def makeISODateFormat: SimpleDateFormat = {
    val iso8601 = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'GMT'")
    val cal = Calendar.getInstance(new SimpleTimeZone(0, "GMT"))
    iso8601.setCalendar(cal)
    iso8601
  }
} 
Example 4
Source File: Enigma.scala    From matcher   with MIT License 6 votes vote down vote up
package com.wavesplatform.dex.crypto

import java.nio.charset.StandardCharsets
import java.security.NoSuchAlgorithmException
import java.security.spec.InvalidKeySpecException

import javax.crypto.spec.{PBEKeySpec, SecretKeySpec}
import javax.crypto.{Cipher, SecretKeyFactory}

import scala.util.control.NonFatal

object Enigma {

  private[this] val KeySalt           = "0495c728-1614-41f6-8ac3-966c22b4a62d".getBytes(StandardCharsets.UTF_8)
  private[this] val AES               = "AES"
  private[this] val Algorithm         = AES + "/ECB/PKCS5Padding"
  private[this] val HashingIterations = 999999
  private[this] val KeySizeBits       = 128

  def hashPassword(password: Array[Char],
                   salt: Array[Byte],
                   iterations: Int = HashingIterations,
                   keyLength: Int = KeySizeBits,
                   hashingAlgorithm: String = "PBKDF2WithHmacSHA512"): Array[Byte] =
    try {
      val keyFactory = SecretKeyFactory.getInstance(hashingAlgorithm)
      val keySpec    = new PBEKeySpec(password, salt, iterations, keyLength)
      val key        = keyFactory.generateSecret(keySpec)
      key.getEncoded
    } catch {
      case e @ (_: NoSuchAlgorithmException | _: InvalidKeySpecException) => throw new RuntimeException("Password hashing error", e)
    }

  def prepareDefaultKey(password: String): SecretKeySpec = new SecretKeySpec(hashPassword(password.toCharArray, KeySalt), AES)

  def encrypt(key: SecretKeySpec, bytes: Array[Byte]): Array[Byte] =
    try {
      val cipher = Cipher.getInstance(Algorithm)
      cipher.init(Cipher.ENCRYPT_MODE, key)
      cipher.doFinal(bytes)
    } catch {
      case NonFatal(e) => throw new RuntimeException("Encrypt error", e)
    }

  def decrypt(key: SecretKeySpec, encryptedBytes: Array[Byte]): Array[Byte] =
    try {
      val cipher: Cipher = Cipher.getInstance(Algorithm)
      cipher.init(Cipher.DECRYPT_MODE, key)
      cipher.doFinal(encryptedBytes)
    } catch {
      case NonFatal(e) => throw new RuntimeException("Decrypt error", e)
    }
} 
Example 5
Source File: TestHdfsFileSystem.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.client.filesystem.hdfs

import java.io.{BufferedWriter, File, OutputStreamWriter}
import java.nio.charset.StandardCharsets

import oharastream.ohara.client.filesystem.{FileFilter, FileSystem, FileSystemTestBase}
import oharastream.ohara.common.exception.FileSystemException
import oharastream.ohara.common.util.CommonUtils
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

class TestHdfsFileSystem extends FileSystemTestBase {
  private[this] val tempFolder: File = CommonUtils.createTempFolder("local_hdfs")

  private[this] val hdfsURL: String = new File(tempFolder.getAbsolutePath).toURI.toString

  override protected val fileSystem: FileSystem = FileSystem.hdfsBuilder.url(hdfsURL).build

  override protected val rootDir: String = tempFolder.toString

  // override this method because the Local HDFS doesn't support append()
  @Test
  override def testAppend(): Unit = {
    val file = randomFile()
    fileSystem.create(file).close()

    intercept[FileSystemException] {
      fileSystem.append(file)
    }.getMessage shouldBe "Not supported"
  }

  // override this method because the Local HDFS doesn't support append()
  @Test
  override def testDeleteFileThatHaveBeenRead(): Unit = {
    val file              = randomFile(rootDir)
    val data: Seq[String] = Seq("123", "456")
    val writer            = new BufferedWriter(new OutputStreamWriter(fileSystem.create(file), StandardCharsets.UTF_8))
    try data.foreach(line => {
      writer.append(line)
      writer.newLine()
    })
    finally writer.close()

    fileSystem.exists(file) shouldBe true
    fileSystem.readLines(file) shouldBe data
    fileSystem.delete(file)
    fileSystem.exists(file) shouldBe false
    fileSystem.listFileNames(rootDir, FileFilter.EMPTY).size shouldBe 0
  }
} 
Example 6
Source File: AesSpec.scala    From iotchain   with MIT License 5 votes vote down vote up
package jbok.crypto.cipher

import java.nio.charset.StandardCharsets

import cats.effect.IO
import jbok.common.CommonSpec
import scodec.bits._
import tsec.cipher.symmetric.jca._
import tsec.cipher.symmetric.{Iv, PlainText}

class AesSpec extends CommonSpec {

  "AES-CBC" should {

    implicit val encryptor = AES128CBC.genEncryptor[IO]

    "correctly evaluate for the test vectors" in {
      // https://tools.ietf.org/html/rfc3602#section-4
      val testVectors = Table[String, String, ByteVector, String](
        ("key", "iv", "plaintext", "ciphertext"),
        ("06a9214036b8a15b512e03d534120006",
         "3dafba429d9eb430b422da802c9fac41",
         ByteVector("Single block msg".getBytes(StandardCharsets.US_ASCII)),
         "e353779c1079aeb82708942dbe77181a"),
        ("c286696d887c9aa0611bbb3e2025a45a",
         "562e17996d093d28ddb3ba695a2e6f58",
         hex"000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f",
         "d296cd94c2cccf8a3a863028b5e1dc0a7586602d253cfff91b8266bea6d61ab1"),
        ("6c3ea0477630ce21a2ce334aa746c2cd",
         "c782dc4c098c66cbd9cd27d825682c81",
         ByteVector("This is a 48-byte message (exactly 3 AES blocks)".getBytes(StandardCharsets.US_ASCII)),
         "d0a02b3836451753d493665d33f0e8862dea54cdb293abc7506939276772f8d5021c19216bad525c8579695d83ba2684"),
        ("56e47a38c5598974bc46903dba290349",
         "8ce82eefbea0da3c44699ed7db51b7d9",
         hex"a0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedf",
         "c30e32ffedc0774e6aff6af0869f71aa0f3af07a9a31a9c684db207eb0ef8e4e35907aa632c3ffdf868bb7b29d3d46ad83ce9f9a102ee99d49a53e87f4c3da55")
      )

      forAll(testVectors) {
        case (k, i, plaintext, _) =>
          val key    = ByteVector.fromValidHex(k)
          val iv     = ByteVector.fromValidHex(i)
          val jcaKey = AES128CBC.buildKey[IO](key.toArray).unsafeRunSync()
          val encrypted =
            AES128CBC.encrypt[IO](PlainText(plaintext.toArray), jcaKey, Iv[AES128CBC](iv.toArray)).unsafeRunSync()
          val decrypted = AES128CBC.decrypt[IO](encrypted, jcaKey).unsafeRunSync()

          decrypted shouldBe PlainText(plaintext.toArray)
      }
    }
  }

  "AES-CTR" should {
    "correctly evaluate for the test vectors" in {
      // http://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-38a.pdf Appendix F.5
      val testVectors = Table[String, String, String, String](
        ("key", "iv", "plaintext", "ciphertext"),
        ("2b7e151628aed2a6abf7158809cf4f3c",
         "f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff",
         "6bc1bee22e409f96e93d7e117393172aae2d8a571e03ac9c9eb76fac45af8e5130c81c46a35ce411e5fbc1191a0a52eff69f2445df4f9b17ad2b417be66c3710",
         "874d6191b620e3261bef6864990db6ce9806f66b7970fdff8617187bb9fffdff5ae4df3edbd5d35e5b4f09020db03eab1e031dda2fbe03d1792170a0f3009cee")
      )

      implicit val encryptor = AES128CTR.genEncryptor[IO]

      forAll(testVectors) { (k, i, p, c) =>
        val key       = ByteVector.fromValidHex(k)
        val iv        = ByteVector.fromValidHex(i)
        val plaintext = ByteVector.fromValidHex(p)
        val jcaKey    = AES128CTR.buildKey[IO](key.toArray).unsafeRunSync()

        val encrypted = AES128CTR.encrypt[IO](PlainText(plaintext.toArray), jcaKey, Iv[AES128CTR](iv.toArray)).unsafeRunSync()
        AES128CTR.decrypt[IO](encrypted, jcaKey).unsafeRunSync() shouldBe PlainText(plaintext.toArray)
      }
    }
  }
} 
Example 7
Source File: SCryptSpec.scala    From iotchain   with MIT License 5 votes vote down vote up
package jbok.crypto

import java.nio.charset.StandardCharsets

import jbok.common.CommonSpec
import jbok.crypto.password._
import scodec.bits.ByteVector

class SCryptSpec extends CommonSpec {
  "SCrypt" should {
    "correctly evaluate for the test vectors" in {
      // https://datatracker.ietf.org/doc/rfc7914/?include_text=1
      val testVectors = Table[String, String, Int, Int, Int, Int, String](
        ("passphrase", "salt", "n", "r", "p", "dklen", "derivedKey"),
        ("", "", 16, 1, 1, 64,
          "77d6576238657b203b19ca42c18a0497f16b4844e3074ae8dfdffa3fede21442fcd0069ded0948f8326a753a0fc81f17e8d3e0fb2e0d3628cf35e20c38d18906"),

        ("password", "NaCl", 1024, 8, 16, 64,
          "fdbabe1c9d3472007856e7190d01e9fe7c6ad7cbc8237830e77376634b3731622eaf30d92e22a3886ff109279d9830dac727afb94a83ee6d8360cbdfa2cc0640"),

        // takes a bit too long to be run on the CI
        // leaving this around as it's a valid test if we want to examine this function in the future
        // ("pleaseletmein", "SodiumChloride", 1048576, 8, 1, 64,
        //   "2101cb9b6a511aaeaddbbe09cf70f881ec568d574a2ffd4dabe5ee9820adaa478e56fd8f4ba5d09ffa1c6d927c40f4c337304049e8a952fbcbf45c6fa77a41a4"),

        ("pleaseletmein", "SodiumChloride", 16384, 8, 1, 64,
          "7023bdcb3afd7348461c06cd81fd38ebfda8fbba904f8e3ea9b543f6545da1f2d5432955613f0fcf62d49705242a9af9e61e85dc0d651e40dfcf017b45575887")
      )

      forAll(testVectors) { case (pass, s, n, r, p, dklen, dk) =>
        val salt = ByteVector(s.getBytes(StandardCharsets.US_ASCII))
        val derivedKey = ByteVector.fromValidHex(dk)


        SCrypt.derive(pass, salt, n, r, p, dklen) shouldBe derivedKey
      }
    }
  }
} 
Example 8
Source File: package.scala    From iotchain   with MIT License 5 votes vote down vote up
package jbok

import java.nio.charset.StandardCharsets
import java.util.Random

import jbok.crypto.hash._
import scodec.bits.ByteVector
import jbok.crypto.signature.SignatureInstances

trait StringSyntax {
  implicit final def stringSyntax(a: String): StringOps = new StringOps(a)
}

final class StringOps(val a : String) extends AnyVal {
  def utf8bytes: ByteVector = ByteVector(a.getBytes(StandardCharsets.UTF_8))
}

trait CryptoSyntax extends CryptoHasherSyntax with StringSyntax
trait CryptoInstances extends CryptoHasherInstances with SignatureInstances

package object crypto extends CryptoSyntax with CryptoInstances {
  def randomByteString(random: Random, length: Int): ByteVector =
    ByteVector(randomByteArray(random, length))

  def randomByteArray(random: Random, length: Int): Array[Byte] = {
    val bytes = Array.ofDim[Byte](length)
    random.nextBytes(bytes)
    bytes
  }
} 
Example 9
Source File: CustomReceiver.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
// scalastyle:off println
package org.apache.spark.examples.streaming

import java.io.{BufferedReader, InputStreamReader}
import java.net.Socket
import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.receiver.Receiver


  private def receive() {
   var socket: Socket = null
   var userInput: String = null
   try {
     logInfo("Connecting to " + host + ":" + port)
     socket = new Socket(host, port)
     logInfo("Connected to " + host + ":" + port)
     val reader = new BufferedReader(
       new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))
     userInput = reader.readLine()
     while(!isStopped && userInput != null) {
       store(userInput)
       userInput = reader.readLine()
     }
     reader.close()
     socket.close()
     logInfo("Stopped receiving")
     restart("Trying to connect again")
   } catch {
     case e: java.net.ConnectException =>
       restart("Error connecting to " + host + ":" + port, e)
     case t: Throwable =>
       restart("Error receiving data", t)
   }
  }
}
// scalastyle:on println 
Example 10
Source File: GraphLoaderSuite.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.graphx

import java.io.File
import java.io.FileOutputStream
import java.io.OutputStreamWriter
import java.nio.charset.StandardCharsets

import org.apache.spark.SparkFunSuite
import org.apache.spark.util.Utils

class GraphLoaderSuite extends SparkFunSuite with LocalSparkContext {

  test("GraphLoader.edgeListFile") {
    withSpark { sc =>
      val tmpDir = Utils.createTempDir()
      val graphFile = new File(tmpDir.getAbsolutePath, "graph.txt")
      val writer = new OutputStreamWriter(new FileOutputStream(graphFile), StandardCharsets.UTF_8)
      for (i <- (1 until 101)) writer.write(s"$i 0\n")
      writer.close()
      try {
        val graph = GraphLoader.edgeListFile(sc, tmpDir.getAbsolutePath)
        val neighborAttrSums = graph.aggregateMessages[Int](
          ctx => ctx.sendToDst(ctx.srcAttr),
          _ + _)
        assert(neighborAttrSums.collect.toSet === Set((0: VertexId, 100)))
      } finally {
        Utils.deleteRecursively(tmpDir)
      }
    }
  }
} 
Example 11
Source File: LibSVMRelationSuite.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ml.source.libsvm

import java.io.File
import java.nio.charset.StandardCharsets

import com.google.common.io.Files

import org.apache.spark.{SparkException, SparkFunSuite}
import org.apache.spark.ml.linalg.{DenseVector, SparseVector, Vector, Vectors}
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.sql.{Row, SaveMode}
import org.apache.spark.util.Utils


class LibSVMRelationSuite extends SparkFunSuite with MLlibTestSparkContext {
  // Path for dataset
  var path: String = _

  override def beforeAll(): Unit = {
    super.beforeAll()
    val lines =
      """
        |1 1:1.0 3:2.0 5:3.0
        |0
        |0 2:4.0 4:5.0 6:6.0
      """.stripMargin
    val dir = Utils.createDirectory(tempDir.getCanonicalPath, "data")
    val file = new File(dir, "part-00000")
    Files.write(lines, file, StandardCharsets.UTF_8)
    path = dir.toURI.toString
  }

  override def afterAll(): Unit = {
    try {
      Utils.deleteRecursively(new File(path))
    } finally {
      super.afterAll()
    }
  }

  test("select as sparse vector") {
    val df = spark.read.format("libsvm").load(path)
    assert(df.columns(0) == "label")
    assert(df.columns(1) == "features")
    val row1 = df.first()
    assert(row1.getDouble(0) == 1.0)
    val v = row1.getAs[SparseVector](1)
    assert(v == Vectors.sparse(6, Seq((0, 1.0), (2, 2.0), (4, 3.0))))
  }

  test("select as dense vector") {
    val df = spark.read.format("libsvm").options(Map("vectorType" -> "dense"))
      .load(path)
    assert(df.columns(0) == "label")
    assert(df.columns(1) == "features")
    assert(df.count() == 3)
    val row1 = df.first()
    assert(row1.getDouble(0) == 1.0)
    val v = row1.getAs[DenseVector](1)
    assert(v == Vectors.dense(1.0, 0.0, 2.0, 0.0, 3.0, 0.0))
  }

  test("select a vector with specifying the longer dimension") {
    val df = spark.read.option("numFeatures", "100").format("libsvm")
      .load(path)
    val row1 = df.first()
    val v = row1.getAs[SparseVector](1)
    assert(v == Vectors.sparse(100, Seq((0, 1.0), (2, 2.0), (4, 3.0))))
  }

  test("write libsvm data and read it again") {
    val df = spark.read.format("libsvm").load(path)
    val tempDir2 = new File(tempDir, "read_write_test")
    val writepath = tempDir2.toURI.toString
    // TODO: Remove requirement to coalesce by supporting multiple reads.
    df.coalesce(1).write.format("libsvm").mode(SaveMode.Overwrite).save(writepath)

    val df2 = spark.read.format("libsvm").load(writepath)
    val row1 = df2.first()
    val v = row1.getAs[SparseVector](1)
    assert(v == Vectors.sparse(6, Seq((0, 1.0), (2, 2.0), (4, 3.0))))
  }

  test("write libsvm data failed due to invalid schema") {
    val df = spark.read.format("text").load(path)
    intercept[SparkException] {
      df.write.format("libsvm").save(path + "_2")
    }
  }

  test("select features from libsvm relation") {
    val df = spark.read.format("libsvm").load(path)
    df.select("features").rdd.map { case Row(d: Vector) => d }.first
    df.select("features").collect
  }
} 
Example 12
Source File: KPLBasedKinesisTestUtils.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.streaming.kinesis

import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets

import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer

import com.amazonaws.services.kinesis.producer.{KinesisProducer => KPLProducer, KinesisProducerConfiguration, UserRecordResult}
import com.google.common.util.concurrent.{FutureCallback, Futures}

private[kinesis] class KPLBasedKinesisTestUtils extends KinesisTestUtils {
  override protected def getProducer(aggregate: Boolean): KinesisDataGenerator = {
    if (!aggregate) {
      new SimpleDataGenerator(kinesisClient)
    } else {
      new KPLDataGenerator(regionName)
    }
  }
}


private[kinesis] class KPLDataGenerator(regionName: String) extends KinesisDataGenerator {

  private lazy val producer: KPLProducer = {
    val conf = new KinesisProducerConfiguration()
      .setRecordMaxBufferedTime(1000)
      .setMaxConnections(1)
      .setRegion(regionName)
      .setMetricsLevel("none")

    new KPLProducer(conf)
  }

  override def sendData(streamName: String, data: Seq[Int]): Map[String, Seq[(Int, String)]] = {
    val shardIdToSeqNumbers = new mutable.HashMap[String, ArrayBuffer[(Int, String)]]()
    data.foreach { num =>
      val str = num.toString
      val data = ByteBuffer.wrap(str.getBytes(StandardCharsets.UTF_8))
      val future = producer.addUserRecord(streamName, str, data)
      val kinesisCallBack = new FutureCallback[UserRecordResult]() {
        override def onFailure(t: Throwable): Unit = {} // do nothing

        override def onSuccess(result: UserRecordResult): Unit = {
          val shardId = result.getShardId
          val seqNumber = result.getSequenceNumber()
          val sentSeqNumbers = shardIdToSeqNumbers.getOrElseUpdate(shardId,
            new ArrayBuffer[(Int, String)]())
          sentSeqNumbers += ((num, seqNumber))
        }
      }
      Futures.addCallback(future, kinesisCallBack)
    }
    producer.flushSync()
    shardIdToSeqNumbers.toMap
  }
} 
Example 13
Source File: FlumeTestUtils.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.streaming.flume

import java.net.{InetSocketAddress, ServerSocket}
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import java.util.{List => JList}
import java.util.Collections

import scala.collection.JavaConverters._

import org.apache.avro.ipc.NettyTransceiver
import org.apache.avro.ipc.specific.SpecificRequestor
import org.apache.commons.lang3.RandomUtils
import org.apache.flume.source.avro
import org.apache.flume.source.avro.{AvroFlumeEvent, AvroSourceProtocol}
import org.jboss.netty.channel.ChannelPipeline
import org.jboss.netty.channel.socket.SocketChannel
import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory
import org.jboss.netty.handler.codec.compression.{ZlibDecoder, ZlibEncoder}

import org.apache.spark.util.Utils
import org.apache.spark.SparkConf


  private class CompressionChannelFactory(compressionLevel: Int)
    extends NioClientSocketChannelFactory {

    override def newChannel(pipeline: ChannelPipeline): SocketChannel = {
      val encoder = new ZlibEncoder(compressionLevel)
      pipeline.addFirst("deflater", encoder)
      pipeline.addFirst("inflater", new ZlibDecoder())
      super.newChannel(pipeline)
    }
  }

} 
Example 14
Source File: YarnShuffleIntegrationSuite.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.yarn

import java.io.File
import java.nio.charset.StandardCharsets

import com.google.common.io.Files
import org.apache.commons.io.FileUtils
import org.apache.hadoop.yarn.conf.YarnConfiguration
import org.scalatest.Matchers

import org.apache.spark._
import org.apache.spark.internal.Logging
import org.apache.spark.network.shuffle.ShuffleTestAccessor
import org.apache.spark.network.yarn.{YarnShuffleService, YarnTestAccessor}
import org.apache.spark.tags.ExtendedYarnTest


@ExtendedYarnTest
class YarnShuffleIntegrationSuite extends BaseYarnClusterSuite {

  override def newYarnConfig(): YarnConfiguration = {
    val yarnConfig = new YarnConfiguration()
    yarnConfig.set(YarnConfiguration.NM_AUX_SERVICES, "spark_shuffle")
    yarnConfig.set(YarnConfiguration.NM_AUX_SERVICE_FMT.format("spark_shuffle"),
      classOf[YarnShuffleService].getCanonicalName)
    yarnConfig.set("spark.shuffle.service.port", "0")
    yarnConfig
  }

  test("external shuffle service") {
    val shuffleServicePort = YarnTestAccessor.getShuffleServicePort
    val shuffleService = YarnTestAccessor.getShuffleServiceInstance

    val registeredExecFile = YarnTestAccessor.getRegisteredExecutorFile(shuffleService)

    logInfo("Shuffle service port = " + shuffleServicePort)
    val result = File.createTempFile("result", null, tempDir)
    val finalState = runSpark(
      false,
      mainClassName(YarnExternalShuffleDriver.getClass),
      appArgs = Seq(result.getAbsolutePath(), registeredExecFile.getAbsolutePath),
      extraConf = Map(
        "spark.shuffle.service.enabled" -> "true",
        "spark.shuffle.service.port" -> shuffleServicePort.toString
      )
    )
    checkResult(finalState, result)
    assert(YarnTestAccessor.getRegisteredExecutorFile(shuffleService).exists())
  }
}

private object YarnExternalShuffleDriver extends Logging with Matchers {

  val WAIT_TIMEOUT_MILLIS = 10000

  def main(args: Array[String]): Unit = {
    if (args.length != 2) {
      // scalastyle:off println
      System.err.println(
        s"""
        |Invalid command line: ${args.mkString(" ")}
        |
        |Usage: ExternalShuffleDriver [result file] [registered exec file]
        """.stripMargin)
      // scalastyle:on println
      System.exit(1)
    }

    val sc = new SparkContext(new SparkConf()
      .setAppName("External Shuffle Test"))
    val conf = sc.getConf
    val status = new File(args(0))
    val registeredExecFile = new File(args(1))
    logInfo("shuffle service executor file = " + registeredExecFile)
    var result = "failure"
    val execStateCopy = new File(registeredExecFile.getAbsolutePath + "_dup")
    try {
      val data = sc.parallelize(0 until 100, 10).map { x => (x % 10) -> x }.reduceByKey{ _ + _ }.
        collect().toSet
      sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
      data should be ((0 until 10).map{x => x -> (x * 10 + 450)}.toSet)
      result = "success"
      // only one process can open a leveldb file at a time, so we copy the files
      FileUtils.copyDirectory(registeredExecFile, execStateCopy)
      assert(!ShuffleTestAccessor.reloadRegisteredExecutors(execStateCopy).isEmpty)
    } finally {
      sc.stop()
      FileUtils.deleteDirectory(execStateCopy)
      Files.write(result, status, StandardCharsets.UTF_8)
    }
  }

} 
Example 15
Source File: IOEncryptionSuite.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.yarn

import java.io._
import java.nio.charset.StandardCharsets
import java.security.PrivilegedExceptionAction
import java.util.UUID

import org.apache.hadoop.security.{Credentials, UserGroupInformation}
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Matchers}

import org.apache.spark._
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.internal.config._
import org.apache.spark.serializer._
import org.apache.spark.storage._

class IOEncryptionSuite extends SparkFunSuite with Matchers with BeforeAndAfterAll
  with BeforeAndAfterEach {
  private[this] val blockId = new TempShuffleBlockId(UUID.randomUUID())
  private[this] val conf = new SparkConf()
  private[this] val ugi = UserGroupInformation.createUserForTesting("testuser", Array("testgroup"))
  private[this] val serializer = new KryoSerializer(conf)

  override def beforeAll(): Unit = {
    System.setProperty("SPARK_YARN_MODE", "true")
    ugi.doAs(new PrivilegedExceptionAction[Unit]() {
      override def run(): Unit = {
        conf.set(IO_ENCRYPTION_ENABLED, true)
        val creds = new Credentials()
        SecurityManager.initIOEncryptionKey(conf, creds)
        SparkHadoopUtil.get.addCurrentUserCredentials(creds)
      }
    })
  }

  override def afterAll(): Unit = {
    SparkEnv.set(null)
    System.clearProperty("SPARK_YARN_MODE")
  }

  override def beforeEach(): Unit = {
    super.beforeEach()
  }

  override def afterEach(): Unit = {
    super.afterEach()
    conf.set("spark.shuffle.compress", false.toString)
    conf.set("spark.shuffle.spill.compress", false.toString)
  }

  test("IO encryption read and write") {
    ugi.doAs(new PrivilegedExceptionAction[Unit] {
      override def run(): Unit = {
        conf.set(IO_ENCRYPTION_ENABLED, true)
        conf.set("spark.shuffle.compress", false.toString)
        conf.set("spark.shuffle.spill.compress", false.toString)
        testYarnIOEncryptionWriteRead()
      }
    })
  }

  test("IO encryption read and write with shuffle compression enabled") {
    ugi.doAs(new PrivilegedExceptionAction[Unit] {
      override def run(): Unit = {
        conf.set(IO_ENCRYPTION_ENABLED, true)
        conf.set("spark.shuffle.compress", true.toString)
        conf.set("spark.shuffle.spill.compress", true.toString)
        testYarnIOEncryptionWriteRead()
      }
    })
  }

  private[this] def testYarnIOEncryptionWriteRead(): Unit = {
    val plainStr = "hello world"
    val outputStream = new ByteArrayOutputStream()
    val serializerManager = new SerializerManager(serializer, conf)
    val wrappedOutputStream = serializerManager.wrapStream(blockId, outputStream)
    wrappedOutputStream.write(plainStr.getBytes(StandardCharsets.UTF_8))
    wrappedOutputStream.close()

    val encryptedBytes = outputStream.toByteArray
    val encryptedStr = new String(encryptedBytes)
    assert(plainStr !== encryptedStr)

    val inputStream = new ByteArrayInputStream(encryptedBytes)
    val wrappedInputStream = serializerManager.wrapStream(blockId, inputStream)
    val decryptedBytes = new Array[Byte](1024)
    val len = wrappedInputStream.read(decryptedBytes)
    val decryptedStr = new String(decryptedBytes, 0, len, StandardCharsets.UTF_8)
    assert(decryptedStr === plainStr)
  }
} 
Example 16
Source File: SocketInputDStream.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.streaming.dstream

import java.io._
import java.net.{ConnectException, Socket}
import java.nio.charset.StandardCharsets

import scala.reflect.ClassTag
import scala.util.control.NonFatal

import org.apache.spark.internal.Logging
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.receiver.Receiver
import org.apache.spark.util.NextIterator

private[streaming]
class SocketInputDStream[T: ClassTag](
    _ssc: StreamingContext,
    host: String,
    port: Int,
    bytesToObjects: InputStream => Iterator[T],
    storageLevel: StorageLevel
  ) extends ReceiverInputDStream[T](_ssc) {

  def getReceiver(): Receiver[T] = {
    new SocketReceiver(host, port, bytesToObjects, storageLevel)
  }
}

private[streaming]
class SocketReceiver[T: ClassTag](
    host: String,
    port: Int,
    bytesToObjects: InputStream => Iterator[T],
    storageLevel: StorageLevel
  ) extends Receiver[T](storageLevel) with Logging {

  private var socket: Socket = _

  def onStart() {

    logInfo(s"Connecting to $host:$port")
    try {
      socket = new Socket(host, port)
    } catch {
      case e: ConnectException =>
        restart(s"Error connecting to $host:$port", e)
        return
    }
    logInfo(s"Connected to $host:$port")

    // Start the thread that receives data over a connection
    new Thread("Socket Receiver") {
      setDaemon(true)
      override def run() { receive() }
    }.start()
  }

  def onStop() {
    // in case restart thread close it twice
    synchronized {
      if (socket != null) {
        socket.close()
        socket = null
        logInfo(s"Closed socket to $host:$port")
      }
    }
  }

  
  def bytesToLines(inputStream: InputStream): Iterator[String] = {
    val dataInputStream = new BufferedReader(
      new InputStreamReader(inputStream, StandardCharsets.UTF_8))
    new NextIterator[String] {
      protected override def getNext() = {
        val nextValue = dataInputStream.readLine()
        if (nextValue == null) {
          finished = true
        }
        nextValue
      }

      protected override def close() {
        dataInputStream.close()
      }
    }
  }
} 
Example 17
Source File: RateLimitedOutputStreamSuite.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.streaming.util

import java.io.ByteArrayOutputStream
import java.nio.charset.StandardCharsets
import java.util.concurrent.TimeUnit._

import org.apache.spark.SparkFunSuite

class RateLimitedOutputStreamSuite extends SparkFunSuite {

  private def benchmark[U](f: => U): Long = {
    val start = System.nanoTime
    f
    System.nanoTime - start
  }

  test("write") {
    val underlying = new ByteArrayOutputStream
    val data = "X" * 41000
    val stream = new RateLimitedOutputStream(underlying, desiredBytesPerSec = 10000)
    val elapsedNs = benchmark { stream.write(data.getBytes(StandardCharsets.UTF_8)) }

    val seconds = SECONDS.convert(elapsedNs, NANOSECONDS)
    assert(seconds >= 4, s"Seconds value ($seconds) is less than 4.")
    assert(seconds <= 30, s"Took more than 30 seconds ($seconds) to write data.")
    assert(underlying.toString("UTF-8") === data)
  }
} 
Example 18
Source File: PythonRDDSuite.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.api.python

import java.io.{ByteArrayOutputStream, DataOutputStream}
import java.nio.charset.StandardCharsets

import org.apache.spark.SparkFunSuite

class PythonRDDSuite extends SparkFunSuite {

  test("Writing large strings to the worker") {
    val input: List[String] = List("a"*100000)
    val buffer = new DataOutputStream(new ByteArrayOutputStream)
    PythonRDD.writeIteratorToStream(input.iterator, buffer)
  }

  test("Handle nulls gracefully") {
    val buffer = new DataOutputStream(new ByteArrayOutputStream)
    // Should not have NPE when write an Iterator with null in it
    // The correctness will be tested in Python
    PythonRDD.writeIteratorToStream(Iterator("a", null), buffer)
    PythonRDD.writeIteratorToStream(Iterator(null, "a"), buffer)
    PythonRDD.writeIteratorToStream(Iterator("a".getBytes(StandardCharsets.UTF_8), null), buffer)
    PythonRDD.writeIteratorToStream(Iterator(null, "a".getBytes(StandardCharsets.UTF_8)), buffer)
    PythonRDD.writeIteratorToStream(Iterator((null, null), ("a", null), (null, "b")), buffer)
    PythonRDD.writeIteratorToStream(Iterator(
      (null, null),
      ("a".getBytes(StandardCharsets.UTF_8), null),
      (null, "b".getBytes(StandardCharsets.UTF_8))), buffer)
  }
} 
Example 19
Source File: MasterWebUISuite.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.master.ui

import java.io.DataOutputStream
import java.net.{HttpURLConnection, URL}
import java.nio.charset.StandardCharsets
import java.util.Date

import scala.collection.mutable.HashMap

import org.mockito.Mockito.{mock, times, verify, when}
import org.scalatest.BeforeAndAfterAll

import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.DeployMessages.{KillDriverResponse, RequestKillDriver}
import org.apache.spark.deploy.DeployTestUtils._
import org.apache.spark.deploy.master._
import org.apache.spark.rpc.{RpcEndpointRef, RpcEnv}


class MasterWebUISuite extends SparkFunSuite with BeforeAndAfterAll {

  val conf = new SparkConf
  val securityMgr = new SecurityManager(conf)
  val rpcEnv = mock(classOf[RpcEnv])
  val master = mock(classOf[Master])
  val masterEndpointRef = mock(classOf[RpcEndpointRef])
  when(master.securityMgr).thenReturn(securityMgr)
  when(master.conf).thenReturn(conf)
  when(master.rpcEnv).thenReturn(rpcEnv)
  when(master.self).thenReturn(masterEndpointRef)
  val masterWebUI = new MasterWebUI(master, 0)

  override def beforeAll() {
    super.beforeAll()
    masterWebUI.bind()
  }

  override def afterAll() {
    masterWebUI.stop()
    super.afterAll()
  }

  test("kill application") {
    val appDesc = createAppDesc()
    // use new start date so it isn't filtered by UI
    val activeApp = new ApplicationInfo(
      new Date().getTime, "app-0", appDesc, new Date(), null, Int.MaxValue)

    when(master.idToApp).thenReturn(HashMap[String, ApplicationInfo]((activeApp.id, activeApp)))

    val url = s"http://localhost:${masterWebUI.boundPort}/app/kill/"
    val body = convPostDataToString(Map(("id", activeApp.id), ("terminate", "true")))
    val conn = sendHttpRequest(url, "POST", body)
    conn.getResponseCode

    // Verify the master was called to remove the active app
    verify(master, times(1)).removeApplication(activeApp, ApplicationState.KILLED)
  }

  test("kill driver") {
    val activeDriverId = "driver-0"
    val url = s"http://localhost:${masterWebUI.boundPort}/driver/kill/"
    val body = convPostDataToString(Map(("id", activeDriverId), ("terminate", "true")))
    val conn = sendHttpRequest(url, "POST", body)
    conn.getResponseCode

    // Verify that master was asked to kill driver with the correct id
    verify(masterEndpointRef, times(1)).ask[KillDriverResponse](RequestKillDriver(activeDriverId))
  }

  private def convPostDataToString(data: Map[String, String]): String = {
    (for ((name, value) <- data) yield s"$name=$value").mkString("&")
  }

  
  private def sendHttpRequest(
      url: String,
      method: String,
      body: String = ""): HttpURLConnection = {
    val conn = new URL(url).openConnection().asInstanceOf[HttpURLConnection]
    conn.setRequestMethod(method)
    if (body.nonEmpty) {
      conn.setDoOutput(true)
      conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded")
      conn.setRequestProperty("Content-Length", Integer.toString(body.length))
      val out = new DataOutputStream(conn.getOutputStream)
      out.write(body.getBytes(StandardCharsets.UTF_8))
      out.close()
    }
    conn
  }
} 
Example 20
Source File: Pbkdf2HMacSha256Spec.scala    From mantis   with Apache License 2.0 5 votes vote down vote up
package io.iohk.ethereum.crypto

import java.nio.charset.StandardCharsets

import akka.util.ByteString
import org.scalatest.prop.PropertyChecks
import org.scalatest.{FlatSpec, Matchers}
import org.spongycastle.util.encoders.Hex

class Pbkdf2HMacSha256Spec extends FlatSpec with Matchers with PropertyChecks {

  "pbkdf2HMacSha256" should "correctly evaluate for the test vectors" in {

    // https://stackoverflow.com/a/5136918
    val testVectors = Table[String, String, Int, Int, String](
      ("passphrase", "salt", "c", "dklen", "derivedKey"),

      ("password", "salt", 1, 32,
        "120fb6cffcf8b32c43e7225256c4f837a86548c92ccc35480805987cb70be17b"),

      ("password", "salt", 2, 32,
        "ae4d0c95af6b46d32d0adff928f06dd02a303f8ef3c251dfd6e2d85a95474c43"),

      ("password", "salt", 4096, 32,
        "c5e478d59288c841aa530db6845c4c8d962893a001ce4e11a4963873aa98134a"),

      // takes a bit too long to be run on the CI
      // leaving this around as it's a valid test if we want to examine this function in the future
      // ("password", "salt", 16777216, 32,
      //  "cf81c66fe8cfc04d1f31ecb65dab4089f7f179e89b3b0bcb17ad10e3ac6eba46"),

      ("passwordPASSWORDpassword", "saltSALTsaltSALTsaltSALTsaltSALTsalt", 4096, 40,
        "348c89dbcbd32b2f32d814b8116e84cf2b17347ebc1800181c4e2a1fb8dd53e1c635518c7dac47e9"),

      ("pass\u0000word", "sa\u0000lt", 4096, 16,
        "89b69d0516f829893c696226650a8687")
    )

    forAll(testVectors) { (pass, s, c, dklen, dk) =>
      val salt = ByteString(s.getBytes(StandardCharsets.US_ASCII))
      val derivedKey = ByteString(Hex.decode(dk))

      pbkdf2HMacSha256(pass, salt, c, dklen) shouldEqual derivedKey
    }
  }
} 
Example 21
Source File: AesCbcSpec.scala    From mantis   with Apache License 2.0 5 votes vote down vote up
package io.iohk.ethereum.crypto

import java.nio.charset.StandardCharsets

import akka.util.ByteString
import org.scalatest.{FlatSpec, Matchers}
import org.scalatest.prop.PropertyChecks
import org.spongycastle.util.encoders.Hex

class AesCbcSpec extends FlatSpec with Matchers with PropertyChecks {

  "AES_CBC" should "correctly evaluate for the test vectors" in {

    // https://tools.ietf.org/html/rfc3602#section-4
    val testVectors = Table[String, String, ByteString, String](
      ("key", "iv", "plaintext", "ciphertext"),

      ("06a9214036b8a15b512e03d534120006",
        "3dafba429d9eb430b422da802c9fac41",
        ByteString("Single block msg".getBytes(StandardCharsets.US_ASCII)),
        "e353779c1079aeb82708942dbe77181a"),

      ("c286696d887c9aa0611bbb3e2025a45a",
        "562e17996d093d28ddb3ba695a2e6f58",
        ByteString(Hex.decode("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f")),
        "d296cd94c2cccf8a3a863028b5e1dc0a7586602d253cfff91b8266bea6d61ab1"),

      ("6c3ea0477630ce21a2ce334aa746c2cd",
        "c782dc4c098c66cbd9cd27d825682c81",
        ByteString("This is a 48-byte message (exactly 3 AES blocks)".getBytes(StandardCharsets.US_ASCII)),
        "d0a02b3836451753d493665d33f0e8862dea54cdb293abc7506939276772f8d5021c19216bad525c8579695d83ba2684"),

      ("56e47a38c5598974bc46903dba290349",
        "8ce82eefbea0da3c44699ed7db51b7d9",
        ByteString(Hex.decode("a0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedf")),
        "c30e32ffedc0774e6aff6af0869f71aa0f3af07a9a31a9c684db207eb0ef8e4e35907aa632c3ffdf868bb7b29d3d46ad83ce9f9a102ee99d49a53e87f4c3da55")
    )

    forAll(testVectors) { (k, i, plaintext, c) =>
      val key = ByteString(Hex.decode(k))
      val iv = ByteString(Hex.decode(i))
      val ciphertext = ByteString(Hex.decode(c))

      val encrypted = AES_CBC.encrypt(key, iv, plaintext)

      // all the encrypted data in our test vectors is aligned to the block size so for each plaintext message
      // a padding of size equal to block size will be added (and needs to be removed for comparison)
      encrypted.dropRight(key.length) shouldEqual ciphertext

      AES_CBC.decrypt(key, iv, encrypted) shouldEqual Some(plaintext)
    }
  }

  it should "decrypt encrypted random values" in {
    val keyGen = Generators.getByteStringGen(16, 16)
    val ivGen = Generators.getByteStringGen(16, 16)
    val plaintextGen = Generators.getByteStringGen(1, 256)

    forAll(keyGen, ivGen, plaintextGen) { (key, iv, plaintext) =>
      val encrypted = AES_CBC.encrypt(key, iv, plaintext)
      val decrypted = AES_CBC.decrypt(key, iv, encrypted)
      decrypted shouldEqual Some(plaintext)
    }
  }
} 
Example 22
Source File: ScryptSpec.scala    From mantis   with Apache License 2.0 5 votes vote down vote up
package io.iohk.ethereum.crypto

import java.nio.charset.StandardCharsets

import akka.util.ByteString
import org.scalatest.{FlatSpec, Matchers}
import org.scalatest.prop.PropertyChecks
import org.spongycastle.util.encoders.Hex

class ScryptSpec extends FlatSpec with Matchers with PropertyChecks {

  "scrypt" should "correctly evaluate for the test vectors" in {

    // https://datatracker.ietf.org/doc/rfc7914/?include_text=1
    val testVectors = Table[String, String, Int, Int, Int, Int, String](
      ("passphrase", "salt", "n", "r", "p", "dklen", "derivedKey"),
      ("", "", 16, 1, 1, 64,
        "77d6576238657b203b19ca42c18a0497f16b4844e3074ae8dfdffa3fede21442fcd0069ded0948f8326a753a0fc81f17e8d3e0fb2e0d3628cf35e20c38d18906"),

      ("password", "NaCl", 1024, 8, 16, 64,
        "fdbabe1c9d3472007856e7190d01e9fe7c6ad7cbc8237830e77376634b3731622eaf30d92e22a3886ff109279d9830dac727afb94a83ee6d8360cbdfa2cc0640"),

      // takes a bit too long to be run on the CI
      // leaving this around as it's a valid test if we want to examine this function in the future
      // ("pleaseletmein", "SodiumChloride", 1048576, 8, 1, 64,
      //   "2101cb9b6a511aaeaddbbe09cf70f881ec568d574a2ffd4dabe5ee9820adaa478e56fd8f4ba5d09ffa1c6d927c40f4c337304049e8a952fbcbf45c6fa77a41a4"),

      ("pleaseletmein", "SodiumChloride", 16384, 8, 1, 64,
        "7023bdcb3afd7348461c06cd81fd38ebfda8fbba904f8e3ea9b543f6545da1f2d5432955613f0fcf62d49705242a9af9e61e85dc0d651e40dfcf017b45575887")
    )

    forAll(testVectors) { (pass, s, n, r, p, dklen, dk) =>
      val salt = ByteString(s.getBytes(StandardCharsets.US_ASCII))
      val derivedKey = ByteString(Hex.decode(dk))

      scrypt(pass, salt, n, r, p, dklen) shouldEqual derivedKey
    }
  }
} 
Example 23
Source File: Ripemd160Spec.scala    From mantis   with Apache License 2.0 5 votes vote down vote up
package io.iohk.ethereum.crypto

import java.nio.charset.StandardCharsets

import org.scalatest.{FunSuite, Matchers}
import org.scalatest.prop.PropertyChecks
import org.spongycastle.util.encoders.Hex

class Ripemd160Spec extends FunSuite with PropertyChecks with Matchers {

  // these examples were taken from http://homes.esat.kuleuven.be/~bosselae/ripemd160.html#Outline
  val examples = Table[String, String](("input", "result"),
    ("", "9c1185a5c5e9fc54612808977ee8f548b2258d31"),
    ("a", "0bdc9d2d256b3ee9daae347be6f4dc835a467ffe"),
    ("abc", "8eb208f7e05d987a9b044a8e98c6b087f15a0bfc"),
    ("message digest", "5d0689ef49d2fae572b881b123a85ffa21595f36"),
    ("abcdefghijklmnopqrstuvwxyz", "f71c27109c692c1b56bbdceb5b9d2865b3708dbc"),
    ("abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", "12a053384a9c0c88e405a06c27dcf49ada62eb2b"),
    ("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789", "b0e20b6e3116640286ed3a87a5713079b21f5189"),
    ("1234567890" * 8, "9b752e45573d4b39f4dbd3323cab82bf63326bfb"),
    ("a" * 1000000, "52783243c1697bdbe16d37f97f68f08325dc1528")
  )

  test("RIPEMD-160") {
    forAll(examples) { (input, result) =>
      val inBytes = input.getBytes(StandardCharsets.US_ASCII)
      val hash = ripemd160(inBytes)
      val encodedHash = Hex.toHexString(hash)

      encodedHash shouldEqual result
    }
  }

} 
Example 24
Source File: AsynchbasePatcher.scala    From incubator-s2graph   with Apache License 2.0 5 votes vote down vote up
package org.apache.s2graph.core.storage.hbase

import java.lang.Integer.valueOf
import java.nio.charset.StandardCharsets
import java.util.concurrent.Callable

import net.bytebuddy.ByteBuddy
import net.bytebuddy.description.modifier.Visibility.PUBLIC
import net.bytebuddy.dynamic.loading.ClassLoadingStrategy
import net.bytebuddy.implementation.FieldAccessor
import net.bytebuddy.implementation.MethodDelegation.to
import net.bytebuddy.implementation.bind.annotation.{SuperCall, This}
import net.bytebuddy.matcher.ElementMatchers._
import org.apache.commons.io.IOUtils
import org.hbase.async._
import org.objectweb.asm.Opcodes.{ACC_FINAL, ACC_PRIVATE, ACC_PROTECTED, ACC_PUBLIC}
import org.objectweb.asm._

import scala.collection.JavaConversions._


  private def loadClass(name: String): Class[_] = {
    classLoader.getResources(s"org/hbase/async/$name.class").toSeq.headOption match {
      case Some(url) =>
        val stream = url.openStream()
        val bytes = try { IOUtils.toByteArray(stream) } finally { stream.close() }

        // patch the bytecode so that the class is no longer final and the methods are all accessible
        val cw = new ClassWriter(ClassWriter.COMPUTE_FRAMES)
        new ClassReader(bytes).accept(new ClassAdapter(cw) {
          override def visit(version: Int, access: Int, name: String, signature: String, superName: String, interfaces: Array[String]): Unit = {
            super.visit(version, access & ~ACC_FINAL, name, signature, superName, interfaces)
          }
          override def visitMethod(access: Int, name: String, desc: String, signature: String, exceptions: Array[String]): MethodVisitor = {
            super.visitMethod(access & ~ACC_PRIVATE & ~ACC_PROTECTED & ~ACC_FINAL | ACC_PUBLIC, name, desc, signature, exceptions)
          }
        }, 0)
        val patched = cw.toByteArray

        defineClass.setAccessible(true)
        defineClass.invoke(classLoader, s"org.hbase.async.$name", patched, valueOf(0), valueOf(patched.length)).asInstanceOf[Class[_]]
      case None =>
        throw new ClassNotFoundException(s"Could not find Asynchbase class: $name")
    }
  }
} 
Example 25
Source File: SKeyValue.scala    From incubator-s2graph   with Apache License 2.0 5 votes vote down vote up
package org.apache.s2graph.core.storage

import java.nio.charset.StandardCharsets
import org.apache.hadoop.hbase.util.Bytes
import org.hbase.async.KeyValue


object SKeyValue {
//  val SnapshotEdgeCf = "s".getBytes(StandardCharsets.UTF_8)
  val EdgeCf = "e".getBytes(StandardCharsets.UTF_8)
  val VertexCf = "v".getBytes(StandardCharsets.UTF_8)
  val Put = 1
  val Delete = 2
  val Increment = 3
  val Default = Put
}

case class SKeyValue(table: Array[Byte],
                     row: Array[Byte],
                     cf: Array[Byte],
                     qualifier: Array[Byte],
                     value: Array[Byte],
                     timestamp: Long,
                     operation: Int = SKeyValue.Default,
                     durability: Boolean = true) {
  def toLogString = {
    Map("table" -> Bytes.toString(table), "row" -> row.toList, "cf" -> Bytes.toString(cf),
      "qualifier" -> qualifier.toList, "value" -> value.toList, "timestamp" -> timestamp,
      "operation" -> operation, "durability" -> durability).toString()
  }
  override def toString(): String = toLogString

  def toKeyValue: KeyValue = new KeyValue(row, cf, qualifier, timestamp, value)
}

trait CanSKeyValue[T] {
  def toSKeyValue(from: T): SKeyValue
}

object CanSKeyValue {
  def instance[T](f: T => SKeyValue): CanSKeyValue[T] = new CanSKeyValue[T] {
    override def toSKeyValue(from: T): SKeyValue = f.apply(from)
  }

  // For asyncbase KeyValues
  implicit val asyncKeyValue = instance[KeyValue] { kv =>
    SKeyValue(Array.empty[Byte], kv.key(), kv.family(), kv.qualifier(), kv.value(), kv.timestamp())
  }

  implicit val hbaseKeyValue = instance[org.apache.hadoop.hbase.KeyValue] { kv =>
    SKeyValue(Array.empty[Byte], kv.getRow, kv.getFamily, kv.getQualifier, kv.getValue, kv.getTimestamp)
  }

  // For asyncbase KeyValues
  implicit val sKeyValue = instance[SKeyValue](identity)

  // For hbase KeyValues
} 
Example 26
Source File: RewriteSwaggerConfigPlugin.scala    From matcher   with MIT License 5 votes vote down vote up
import java.io.{BufferedInputStream, ByteArrayOutputStream}
import java.nio.charset.StandardCharsets
import java.nio.file.Files

import Dependencies.Version
import org.apache.commons.compress.archivers.ArchiveStreamFactory
import org.apache.commons.io.IOUtils
import sbt.Keys._
import sbt._

// See https://github.com/swagger-api/swagger-ui/issues/5710
object RewriteSwaggerConfigPlugin extends AutoPlugin {
  override val trigger = PluginTrigger.NoTrigger
  override def projectSettings: Seq[Def.Setting[_]] =
    inConfig(Compile)(
      Seq(
        resourceGenerators += Def.task {
          val jarName       = s"swagger-ui-${Version.swaggerUi}.jar"
          val indexHtmlPath = s"META-INF/resources/webjars/swagger-ui/${Version.swaggerUi}/index.html"
          val outputFile    = resourceManaged.value / indexHtmlPath

          val html = (Compile / dependencyClasspath).value
            .find(_.data.getName == jarName)
            .flatMap(jar => fileContentFromJar(jar.data, indexHtmlPath))
            .map { new String(_, StandardCharsets.UTF_8) }

          val resource = s"$jarName:$indexHtmlPath"
          html match {
            case None => throw new RuntimeException(s"Can't find $resource")
            case Some(html) =>
              val doc = org.jsoup.parser.Parser.parse(html, "127.0.0.1")
              import scala.collection.JavaConverters._
              doc
                .body()
                .children()
                .asScala
                .find { el =>
                  el.tagName() == "script" && el.html().contains("SwaggerUIBundle")
                } match {
                case None => throw new RuntimeException("Can't patch script in index.html")
                case Some(el) =>
                  val update =
                    """
const ui = SwaggerUIBundle({
    url: "/api-docs/swagger.json",
    dom_id: '#swagger-ui',
    deepLinking: true,
    presets: [ SwaggerUIBundle.presets.apis ],
    plugins: [ SwaggerUIBundle.plugins.DownloadUrl ],
    layout: "BaseLayout",
    operationsSorter: "alpha"
});
window.ui = ui;
"""
                  // Careful! ^ will be inserted as one-liner
                  el.text(update)
              }

              Files.createDirectories(outputFile.getParentFile.toPath)
              IO.write(outputFile, doc.outerHtml())
          }

          Seq(outputFile)
        }.taskValue
      ))

  private def fileContentFromJar(jar: File, fileName: String): Option[Array[Byte]] = {
    val fs      = new BufferedInputStream(Files.newInputStream(jar.toPath))
    val factory = new ArchiveStreamFactory()
    val ais     = factory.createArchiveInputStream(fs)

    try Iterator
      .continually(ais.getNextEntry)
      .takeWhile(_ != null)
      .filter(ais.canReadEntryData)
      .find(_.getName == fileName)
      .map { _ =>
        val out = new ByteArrayOutputStream()
        IOUtils.copy(ais, out)
        out.toByteArray
      } finally fs.close()
  }
} 
Example 27
Source File: VersionSourcePlugin.scala    From matcher   with MIT License 5 votes vote down vote up
import java.nio.charset.StandardCharsets

import sbt.Keys.{sourceGenerators, sourceManaged, version}
import sbt.{AutoPlugin, Def, IO, _}

object VersionSourcePlugin extends AutoPlugin {

  object V {
    val scalaPackage = SettingKey[String]("version-scala-package", "Scala package name where Version object is created")
    val subProject   = SettingKey[String]("version-sub-project", "Sub project name where Version object is created")
  }

  override def trigger: PluginTrigger = PluginTrigger.NoTrigger

  override def projectSettings: Seq[Def.Setting[_]] = {

    (Compile / sourceGenerators) += Def.task {

      val versionFile      = (Compile / sourceManaged).value / s"${V.scalaPackage.value.replace('.', '/')}/Version.scala"
      val versionExtractor = """(\d+)\.(\d+)\.(\d+).*""".r

      val (major, minor, patch) = version.value match {
        case versionExtractor(ma, mi, pa) => (ma.toInt, mi.toInt, pa.toInt)
        case x                            =>
          // SBT downloads only the latest commit, so "version" doesn't know, which tag is the nearest
          if (Option(System.getenv("TRAVIS")).exists(_.toBoolean)) (0, 0, 0)
          else throw new IllegalStateException(s"${V.subProject.value}: can't parse version by git tag: $x")
      }

      IO.write(
        versionFile,
        s"""package ${V.scalaPackage.value}
           |
           |object Version {
           |  val VersionString = "${version.value}"
           |  val VersionTuple = ($major, $minor, $patch)
           |}
           |""".stripMargin,
        charset = StandardCharsets.UTF_8
      )

      Seq(versionFile)
    }
  }
} 
Example 28
Source File: AuthServiceRestConnector.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.tool.connectors

import java.nio.charset.StandardCharsets
import java.util.concurrent.ThreadLocalRandom

import com.wavesplatform.dex.auth.JwtUtils
import com.wavesplatform.dex.cli.ErrorOr
import com.wavesplatform.dex.domain.account.KeyPair
import com.wavesplatform.dex.domain.crypto
import com.wavesplatform.dex.tool.connectors.AuthServiceRestConnector.AuthCredentials
import sttp.model.Uri.QuerySegment

case class AuthServiceRestConnector(target: String, chainId: Byte) extends RestConnector with JwtUtils {

  private def mkAuthTokenRequestParams(keyPair: KeyPair): List[QuerySegment] = {
    val jwtPayload = mkJwtSignedPayload(keyPair, networkByte = chainId)
    List(
      "grant_type" -> "password",
      "username"   -> jwtPayload.publicKey.base58,
      "password"   -> s"${jwtPayload.firstTokenExpirationInSeconds}:${jwtPayload.signature}",
      "scope"      -> jwtPayload.scope.head,
      "client_id"  -> jwtPayload.clientId
    ).map { case (k, v) => QuerySegment.KeyValue(k, v) }
  }

  def getAuthCredentials(maybeSeed: Option[String]): ErrorOr[AuthCredentials] = {

    val seed          = maybeSeed getOrElse s"minion${ThreadLocalRandom.current.nextInt}"
    val keyPair       = KeyPair(crypto secureHash (seed getBytes StandardCharsets.UTF_8))
    val requestParams = mkAuthTokenRequestParams(keyPair)
    val uri           = targetUri.copy(querySegments = requestParams)

    mkResponse { _.post(uri) }.map { j =>
      AuthCredentials(
        keyPair = keyPair,
        token = (j \ "access_token").as[String],
        seed = seed
      )
    }
  }
}

object AuthServiceRestConnector {
  final case class AuthCredentials(keyPair: KeyPair, token: String, seed: String)
} 
Example 29
Source File: WsAddressSubscribe.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.api.ws.protocol

import java.nio.charset.StandardCharsets

import cats.syntax.either._
import cats.syntax.option._
import com.wavesplatform.dex.api.ws.protocol.WsAddressSubscribe._
import com.wavesplatform.dex.domain.account.{Address, PrivateKey, PublicKey}
import com.wavesplatform.dex.domain.bytes.ByteStr
import com.wavesplatform.dex.domain.crypto
import com.wavesplatform.dex.error
import com.wavesplatform.dex.error.MatcherError
import pdi.jwt.exceptions.{JwtExpirationException, JwtLengthException}
import pdi.jwt.{JwtAlgorithm, JwtJson, JwtOptions}
import play.api.libs.functional.syntax._
import play.api.libs.json._

final case class WsAddressSubscribe(key: Address, authType: String, jwt: String) extends WsClientMessage {
  override val tpe: String = WsAddressSubscribe.tpe

  def validate(jwtPublicKey: String, networkByte: Byte): Either[MatcherError, JwtPayload] =
    for {
      _ <- Either.cond(supportedAuthTypes.contains(authType), (), error.SubscriptionAuthTypeUnsupported(supportedAuthTypes, authType))
      rawJsonPayload <- JwtJson
        .decodeJson(
          token = jwt,
          key = jwtPublicKey,
          algorithms = JwtAlgorithm.allAsymmetric(),
          options = JwtOptions(signature = true, expiration = true, notBefore = true, leeway = leewayInSeconds)
        )
        .toEither
        .left
        .map(toMatcherError(_, key))
      payload <- rawJsonPayload.validate[JwtPayload].asEither.leftMap(_ => error.JwtPayloadBroken)
      _ <- {
        val given = payload.networkByte.head.toByte
        Either.cond(given == networkByte, (), error.TokenNetworkUnexpected(networkByte, given))
      }
      _ <- Either.cond(crypto.verify(payload.signature, ByteStr(payload.toSign), payload.publicKey), (), error.InvalidJwtPayloadSignature)
      _ <- Either.cond(payload.publicKey.toAddress == key, (), error.AddressAndPublicKeyAreIncompatible(key, payload.publicKey))
    } yield payload
}

object WsAddressSubscribe {

  val tpe                = "aus"
  val defaultAuthType    = "jwt"
  val supportedAuthTypes = Set(defaultAuthType)
  val leewayInSeconds    = 10

  def wsUnapply(arg: WsAddressSubscribe): Option[(String, Address, String, String)] = (arg.tpe, arg.key, arg.authType, arg.jwt).some

  implicit val wsWsAddressSubscribeFormat: Format[WsAddressSubscribe] = (
    (__ \ "T").format[String] and
      (__ \ "S").format[Address] and
      (__ \ "t").format[String] and
      (__ \ "j").format[String]
  )(
    (_, key, authType, jwt) => WsAddressSubscribe(key, authType, jwt),
    unlift(WsAddressSubscribe.wsUnapply)
  )

  case class JwtPayload(signature: ByteStr,
                        publicKey: PublicKey,
                        networkByte: String,
                        clientId: String,
                        firstTokenExpirationInSeconds: Long,
                        activeTokenExpirationInSeconds: Long,
                        scope: List[String]) {
    def toSign: Array[Byte] = JwtPayload.toSignPrefix ++ s"$networkByte:$clientId:$firstTokenExpirationInSeconds".getBytes(StandardCharsets.UTF_8)

    def signed(privateKey: PrivateKey): JwtPayload = copy(signature = crypto.sign(privateKey, toSign))
  }

  object JwtPayload {
    val toSignPrefix: Array[Byte] = Array[Byte](-1, -1, -1, 1)

    implicit val jwtPayloadFormat: OFormat[JwtPayload] = (
      (__ \ "sig").format[ByteStr] and
        (__ \ "pk").format[PublicKey] and
        (__ \ "nb").format[String] and
        (__ \ "cid").format[String] and
        (__ \ "exp0").format[Long] and
        (__ \ "exp").format[Long] and
        (__ \ "scope").format[List[String]]
    )(JwtPayload.apply, unlift(JwtPayload.unapply))
  }

  def toMatcherError(e: Throwable, address: Address): MatcherError = e match {
    case _: JwtLengthException     => error.JwtBroken
    case _: JwtExpirationException => error.SubscriptionTokenExpired(address)
    case _                         => error.JwtCommonError(e.getMessage)
  }
} 
Example 30
Source File: HttpV1OrderBook.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.api.http.entities

import java.nio.charset.StandardCharsets

import akka.http.scaladsl.model.{HttpEntity, HttpResponse}
import io.swagger.annotations.ApiModelProperty
import play.api.libs.json.{Json, Reads}

case class HttpV1OrderBook(@ApiModelProperty(value = "Timestamp of the last Order Book update") timestamp: Long,
                           @ApiModelProperty(
                             value = "List of aggregated denormalized bid levels [price, amount]",
                             dataType = "[[Ljava.lang.String;",
                             example = """[ [ "1.18", "43800.00000000" ], [ "1.17", "52187.00000000" ], [ "1.16", "809.00000000" ] ]"""
                           ) bids: List[HttpV1LevelAgg],
                           @ApiModelProperty(
                             value = "List of aggregated denormalized ask levels [price, amount]",
                             dataType = "[[Ljava.lang.String;",
                             example = """[ [ "1.19", "2134.00000000" ], [ "1.20", "747.00000000" ] ]"""
                           ) asks: List[HttpV1LevelAgg])

object HttpV1OrderBook {

  implicit val httpV1OrderBookReads: Reads[HttpV1OrderBook] = Json.reads

  def fromHttpResponse(response: HttpResponse): HttpV1OrderBook =
    Json.parse(response.entity.asInstanceOf[HttpEntity.Strict].getData().decodeString(StandardCharsets.UTF_8)).as[HttpV1OrderBook]
} 
Example 31
Source File: MatcherSuiteBase.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.it

import java.nio.charset.StandardCharsets
import java.util.concurrent.ThreadLocalRandom

import cats.instances.FutureInstances
import com.wavesplatform.dex.asset.DoubleOps
import com.wavesplatform.dex.domain.account.KeyPair
import com.wavesplatform.dex.domain.asset.Asset
import com.wavesplatform.dex.domain.bytes.ByteStr
import com.wavesplatform.dex.domain.utils.ScorexLogging
import com.wavesplatform.dex.it.api.BaseContainersKit
import com.wavesplatform.dex.it.api.node.HasWavesNode
import com.wavesplatform.dex.it.config.{GenesisConfig, PredefinedAccounts, PredefinedAssets}
import com.wavesplatform.dex.it.dex.HasDex
import com.wavesplatform.dex.it.matchers.ItMatchers
import com.wavesplatform.dex.it.test.InformativeTestStart
import com.wavesplatform.dex.it.waves.{MkWavesEntities, ToWavesJConversions}
import com.wavesplatform.dex.test.matchers.DiffMatcherWithImplicits
import com.wavesplatform.dex.waves.WavesFeeConstants
import com.wavesplatform.it.api.ApiExtensions
import org.scalatest.concurrent.Eventually
import org.scalatest.freespec.AnyFreeSpec
import org.scalatest.matchers.should.Matchers
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, CancelAfterFailure}

import scala.concurrent.duration.DurationInt

trait MatcherSuiteBase
    extends AnyFreeSpec
    with Matchers
    with CancelAfterFailure
    with BeforeAndAfterAll
    with BeforeAndAfterEach
    with Eventually
    with BaseContainersKit
    with HasDex
    with HasWavesNode
    with MkWavesEntities
    with ApiExtensions
    with ItMatchers
    with DoubleOps
    with WavesFeeConstants
    with PredefinedAssets
    with PredefinedAccounts
    with DiffMatcherWithImplicits
    with InformativeTestStart
    with FutureInstances
    with ToWavesJConversions
    with ScorexLogging {

  GenesisConfig.setupAddressScheme()

  override protected val moduleName: String = "dex-it"

  override implicit def patienceConfig: PatienceConfig = super.patienceConfig.copy(timeout = 30.seconds, interval = 1.second)

  override protected def beforeAll(): Unit = {
    log.debug(s"Perform beforeAll")
    kafkaServer.foreach { _ =>
      createKafkaTopic(dexRunConfig.getString("waves.dex.events-queue.kafka.topic"))
    }
    wavesNode1.start()
    dex1.start()
  }

  override protected def afterAll(): Unit = {
    log.debug(s"Perform afterAll")
    stopBaseContainers()
    super.afterAll()
  }

  def createAccountWithBalance(balances: (Long, Asset)*): KeyPair = {
    val account = KeyPair(ByteStr(s"account-test-${ThreadLocalRandom.current().nextInt()}".getBytes(StandardCharsets.UTF_8)))

    balances.foreach {
      case (balance, asset) =>
        assert(
          wavesNode1.api.balance(alice, asset) >= balance,
          s"Alice doesn't have enough balance in ${asset.toString} to make a transfer"
        )
        broadcastAndAwait(mkTransfer(alice, account.toAddress, balance, asset))
    }
    account
  }
} 
Example 32
Source File: PbToDexConversions.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.grpc.integration.protobuf

import java.nio.charset.StandardCharsets

import com.google.protobuf.{ByteString => PbByteString}
import com.wavesplatform.dex.domain.account.{Address => VAddress}
import com.wavesplatform.dex.domain.asset.{Asset => VAsset}
import com.wavesplatform.dex.domain.bytes.{ByteStr => VByteStr}
import com.wavesplatform.dex.domain.utils._
import com.wavesplatform.dex.grpc.integration.dto.BriefAssetDescription
import com.wavesplatform.dex.grpc.integration.services.AssetDescriptionResponse.MaybeDescription

object PbToDexConversions {

  implicit class PbByteStringOps(val self: PbByteString) extends AnyVal {
    def toVanilla: VByteStr        = VByteStr(self.toByteArray)
    def toVanillaAsset: VAsset     = if (self.isEmpty) VAsset.Waves else VAsset.IssuedAsset(self.toVanilla)
    def toVanillaAddress: VAddress = VAddress.fromBytes { self.toByteArray } explicitGet ()
  }

  implicit class PbMaybeDescriptionOps(val self: MaybeDescription) extends AnyVal {
    def toVanilla: Option[BriefAssetDescription] = self match {
      case MaybeDescription.Empty => None
      case MaybeDescription.Description(value) =>
        Some(
          BriefAssetDescription(
            name = value.name.toString(StandardCharsets.UTF_8),
            decimals = value.decimals,
            hasScript = value.hasScript
          )
        )
    }
  }
} 
Example 33
Source File: FOps.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.it.fp

import java.nio.charset.StandardCharsets

import cats.syntax.apply._
import cats.syntax.either._
import cats.syntax.flatMap._
import cats.syntax.functor._
import com.softwaremill.sttp.{DeserializationError, Response}
import play.api.libs.json._

import scala.concurrent.duration.{FiniteDuration, _}
import scala.util.control.NonFatal

case class RepeatRequestOptions(delayBetweenRequests: FiniteDuration, maxAttempts: Int) {
  def decreaseAttempts: RepeatRequestOptions = copy(maxAttempts = maxAttempts - 1)
}

class FOps[F[_]](implicit M: ThrowableMonadError[F], W: CanWait[F]) {

  def repeatUntil[T](f: => F[T], options: RepeatRequestOptions = RepeatRequestOptions(1.second, 30))(stopCond: T => Boolean): F[T] =
    f.flatMap { firstResp =>
        (firstResp, options).tailRecM[F, (T, RepeatRequestOptions)] {
          case (resp, currOptions) =>
            if (stopCond(resp)) M.pure((resp, currOptions).asRight)
            else if (currOptions.maxAttempts <= 0) M.raiseError(new RuntimeException(s"All attempts are out! The last response is: $resp"))
            else W.wait(options.delayBetweenRequests).productR(f).map(x => (x, currOptions.decreaseAttempts).asLeft)
        }
      }
      .map(_._1)

  def repeatUntil[T](f: => F[T], delay: FiniteDuration)(pred: T => Boolean): F[T] =
    f.flatMap {
      _.tailRecM[F, T] { x =>
        if (pred(x)) M.pure(x.asRight)
        else W.wait(delay).productR(f).map(_.asLeft)
      }
    }

  def repeatUntilResponse[T](f: => F[Response[Either[DeserializationError[JsError], T]]], delay: FiniteDuration)(
      pred: Response[Either[DeserializationError[JsError], T]] => Boolean): F[T] =
    repeatUntil(f, delay)(pred).flatMap(parseResponse)

  def parseResponse[T](resp: Response[Either[DeserializationError[JsError], T]]): F[T] =
    resp.rawErrorBody match {
      case Left(e) =>
        M.raiseError[T](
          new RuntimeException(s"The server returned an error. HTTP code is ${resp.code}, body: ${new String(e, StandardCharsets.UTF_8)}"))
      case Right(Left(error)) => M.raiseError[T](new RuntimeException(s"Can't parse the response: $error"))
      case Right(Right(r))    => M.pure(r)
    }

  def parseTryResponse[E: Reads, T](resp: Response[T]): F[Either[E, T]] = resp.rawErrorBody match {
    case Right(r) => M.pure(Right(r))
    case Left(bytes) =>
      try Json.parse(bytes).validate[E] match {
        case JsSuccess(x, _) => M.pure(Left(x))
        case JsError(e)      => M.raiseError[Either[E, T]](JsResultException(e))
      } catch {
        case NonFatal(e) =>
          M.raiseError[Either[E, T]](new RuntimeException(s"The server returned an error: ${resp.code}, also can't parse as MatcherError", e))
      }
  }

  def parseTryResponseEither[E: Reads, T](resp: Response[Either[DeserializationError[JsError], T]]): F[Either[E, T]] = resp.rawErrorBody match {
    case Right(Right(r)) => M.pure(Right(r))
    case Right(Left(e))  => M.raiseError[Either[E, T]](new RuntimeException(s"The server returned success, but can't parse response: $e"))
    case Left(bytes) =>
      try Json.parse(bytes).validate[E] match {
        case JsSuccess(x, _) => M.pure(Left(x))
        case JsError(e)      => M.raiseError[Either[E, T]](JsResultException(e))
      } catch {
        case NonFatal(e) =>
          M.raiseError[Either[E, T]](new RuntimeException(s"The server returned an error: ${resp.code}, also can't parse as MatcherError", e))
      }
  }
}

object FOps {
  def apply[F[_]: CanWait: ThrowableMonadError]: FOps[F] = new FOps[F]
} 
Example 34
Source File: PredefinedAccounts.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.it.config

import java.nio.charset.StandardCharsets

import com.google.common.primitives.{Bytes, Ints}
import com.wavesplatform.dex.domain.account.KeyPair
import com.wavesplatform.dex.domain.crypto
import com.wavesplatform.dex.it.config.GenesisConfig.generatorConfig

import scala.collection.JavaConverters._

object PredefinedAccounts extends PredefinedAccounts {
  def generateNewAccount(seed: Array[Byte], nonce: Int): KeyPair = KeyPair(crypto.secureHash(Bytes.concat(Ints.toByteArray(nonce), seed)))
}

trait PredefinedAccounts {

  import PredefinedAccounts._

  private val accounts: Map[String, KeyPair] = {

    val distributionsKey = "genesis-generator.distributions"
    val distributions    = generatorConfig.getObject(distributionsKey)

    distributions
      .keySet()
      .asScala
      .map { accountName =>
        val prefix   = s"$distributionsKey.$accountName"
        val seedText = generatorConfig.getString(s"$prefix.seed-text")
        val nonce    = generatorConfig.getInt(s"$prefix.nonce")
        accountName -> generateNewAccount(seedText.getBytes(StandardCharsets.UTF_8), nonce)
      }
      .toMap
  }

  val matcher: KeyPair = accounts("matcher")
  val alice: KeyPair   = accounts("alice")
  val bob: KeyPair     = accounts("bob")
} 
Example 35
Source File: Scripts.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.it.test

import java.nio.charset.StandardCharsets

import com.google.common.primitives.Ints
import com.wavesplatform.dex.domain.bytes.ByteStr
import com.wavesplatform.dex.domain.bytes.codec.Base64
import com.wavesplatform.dex.domain.crypto.secureHash

object Scripts {
  val alwaysTrue: ByteStr  = fromBase64("AgZ7TN8j")
  val alwaysFalse: ByteStr = fromBase64("AgeJ1sz7")

  def fromBase64(x: String): ByteStr = ByteStr.decodeBase64(x).get

  
  private def renderScriptTemplate(binaryCode: Array[Byte], rawVariable: String, by: Array[Byte]): Array[Byte] =
    replaceFirst(binaryCode, rawVariable.getBytes(StandardCharsets.UTF_8), by)
      .getOrElse(throw new RuntimeException(s"Can't replace '$rawVariable'"))

  private def replaceFirst(where: Array[Byte], what: Array[Byte], by: Array[Byte]): Option[Array[Byte]] = {
    val i = where.indexOfSlice(what)
    if (i == -1) None
    else
      Some(
        Array.concat(
          where.slice(0, i - 4),
          Ints.toByteArray(by.length),
          by,
          where.drop(i + what.length)
        ))
  }
} 
Example 36
Source File: UUIDUtils.scala    From seals   with Apache License 2.0 5 votes vote down vote up
package dev.tauri.seals
package core

import java.util.UUID
import java.nio.charset.StandardCharsets

import scala.language.implicitConversions

import scodec.bits.ByteVector

final object UUIDUtils {

  implicit final class UUIDSyntax(private val self: UUID) extends AnyVal {
    def / (sub: UUID): UUIDBuilder = UUIDBuilder(self) / sub
    def / (sub: ByteVector): UUIDBuilder = UUIDBuilder(self) / sub
    def / (sub: String): UUIDBuilder = UUIDBuilder(self) / sub
  }

  final case class UUIDBuilder(namespace: UUID, name: Vector[ByteVector] = Vector.empty) {
    def / (sub: UUID): UUIDBuilder = copy(name = name :+ NsUUID.bvFromUUID(sub))
    def / (sub: ByteVector): UUIDBuilder = copy(name = name :+ sub)
    def / (sub: String): UUIDBuilder = copy(name = name :+ ByteVector.view(sub.getBytes(StandardCharsets.UTF_8)))
    def uuid: UUID = NsUUID.uuid5nestedBv(namespace, name: _*)
  }

  implicit def uuidLiteralSyntax(sc: StringContext): macros.UUIDSyntax =
    new macros.UUIDSyntax(sc)
} 
Example 37
Source File: NsUUID.scala    From seals   with Apache License 2.0 5 votes vote down vote up
package dev.tauri.seals
package core

import java.util.UUID
import java.security.MessageDigest
import java.nio.{ ByteBuffer, Buffer }
import java.nio.charset.StandardCharsets

import scodec.bits.ByteVector

private[seals] object NsUUID {

  def uuid5(ns: UUID, name: String): UUID =
    uuid5bytes(ns, ByteBuffer.wrap(name.getBytes(StandardCharsets.UTF_8)))

  def uuid5bv(ns: UUID, name: ByteVector): UUID =
    uuid5bytes(ns, name.toByteBuffer)

  private def uuid5bytes(ns: UUID, name: ByteBuffer): UUID = {
    val buf = ByteBuffer.allocate(16) // network byte order by default
    putUUIDToBuf(ns, buf)
    (buf : Buffer).rewind()
    val h = sha1()
    h.update(buf)
    h.update(name)
    val arr: Array[Byte] = h.digest().take(16)
    arr(6) = (arr(6) & 0x0f).toByte // clear version
    arr(6) = (arr(6) | 0x50).toByte // version 5
    arr(8) = (arr(8) & 0x3f).toByte // clear variant
    arr(8) = (arr(8) | 0x80).toByte // variant RFC4122
    (buf : Buffer).rewind()
    buf.put(arr)
    (buf : Buffer).rewind()
    val msl = buf.getLong()
    val lsl = buf.getLong()
    new UUID(msl, lsl)
  }

  def uuid5nested(root: UUID, names: String*): UUID =
    names.foldLeft(root)(uuid5)

  def uuid5nestedBv(root: UUID, names: ByteVector*): UUID = {
    val buf = ByteVector.concat(names).toByteBuffer
    uuid5bytes(root, buf)
  }

  def uuid5nestedNsNm(name: String, ns1: UUID, nss: UUID*): UUID =
    uuid5(uuid5nestedNs(ns1, nss: _*), name)

  def uuid5nestedNs(ns1: UUID, nss: UUID*): UUID = {
    val buf = ByteBuffer.allocate(16)
    nss.foldLeft(ns1) { (st, u) =>
      putUUIDToBuf(u, buf)
      (buf : Buffer).rewind()
      val r = uuid5bytes(st, buf)
      (buf : Buffer).rewind()
      r
    }
  }

  private def putUUIDToBuf(u: UUID, buf: ByteBuffer): Unit = {
    buf.putLong(u.getMostSignificantBits)
    buf.putLong(u.getLeastSignificantBits)
  }

  def bvFromUUID(u: UUID): ByteVector = {
    val buf = ByteBuffer.allocate(16)
    putUUIDToBuf(u, buf)
    (buf : Buffer).rewind()
    ByteVector.view(buf)
  }

  private def sha1(): MessageDigest =
    MessageDigest.getInstance("SHA-1")
} 
Example 38
Source File: CsvSourceTypeConversionTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.csv

import java.io.ByteArrayInputStream
import java.nio.charset.StandardCharsets

import io.eels.schema._
import org.scalatest.{Ignore, Matchers, WordSpec}

@Ignore
class CsvSourceTypeConversionTest extends WordSpec with Matchers {
  "CsvSource" should {
    "read schema" in {
      val exampleCsvString =
        """A,B,C,D
          |1,2.2,3,foo
          |4,5.5,6,bar
        """.stripMargin

      val stream = new ByteArrayInputStream(exampleCsvString.getBytes(StandardCharsets.UTF_8))
      val schema = new StructType(Vector(
        Field("A", IntType.Signed),
        Field("B", DoubleType),
        Field("C", IntType.Signed),
        Field("D", StringType)
      ))
      val source = new CsvSource(() => stream)
        .withSchema(schema)
      
      source.schema.fields.foreach(println)
      val ds = source.toDataStream()
      val firstRow = ds.iterator.toIterable.head
      val firstRowA = firstRow.get("A")
      println(firstRowA) // prints 1 as expected
      println(firstRowA.getClass.getTypeName) // prints java.lang.String
      assert(firstRowA == 1) // this assertion will fail because firstRowA is not an Int
    }
  }
} 
Example 39
Source File: JsonDecoderSpec.scala    From roc   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package roc
package types

import io.circe.generic.auto._
import io.circe.syntax._
import java.nio.charset.StandardCharsets
import jawn.ast.JParser
import org.scalacheck.Arbitrary.arbitrary
import org.scalacheck.Prop.forAll
import org.scalacheck.{Arbitrary, Gen}
import org.specs2.{ScalaCheck, Specification}
import roc.postgresql.Null
import roc.types.failures.{ElementDecodingFailure, NullDecodedFailure}
import roc.types.{decoders => Decoders}

final class JsonDecoderSpec extends Specification with ScalaCheck { def is = s2"""

  Json Decoder
    must correctly decode Text representation                              $testValidText
    must throw a ElementDecodingFailure when Text decoding invalid Json    $testInvalidText
    must correctly decode Binary representation                            $testValidBinary
    must throw a ElementDecodingFailure when Binary decoding invalid Json  $testInvalidBinary
    must throw a NullDecodedFailure when Null decoding Json                $testNullDecoding

                                                                               """

  private val testValidText = forAll { x: JsonContainer =>
    Decoders.jsonElementDecoder.textDecoder(x.text) must_== x.json
  }

  private val testInvalidText = forAll { x: String =>
    Decoders.jsonElementDecoder.textDecoder(x) must throwA[ElementDecodingFailure]
  }

  private val testValidBinary = forAll { x: BinaryJsonContainer =>
    Decoders.jsonElementDecoder.binaryDecoder(x.binary) must_== x.json
  }

  private val testInvalidBinary = forAll { xs: Array[Byte] =>
    Decoders.jsonElementDecoder.binaryDecoder(xs) must throwA[ElementDecodingFailure]
  }

  private val testNullDecoding =
    Decoders.jsonElementDecoder.nullDecoder(Null('doesnotmatter, -71)) must throwA[NullDecodedFailure]

  case class JsonContainer(text: String, json: Json)
  private lazy val genJsonContainer: Gen[JsonContainer] = for {
    jObject <- arbitrary[JsonObject]
  } yield {
    val text = jObject.asJson.noSpaces
    val json = JParser.parseUnsafe(text)
    new JsonContainer(text, json)
  }
  private implicit lazy val arbitraryJsonContainer: Arbitrary[JsonContainer] = 
    Arbitrary(genJsonContainer)

  case class BinaryJsonContainer(binary: Array[Byte], json: Json)
  private lazy val genBinaryJsonContainer: Gen[BinaryJsonContainer] = for {
    jObject <- arbitrary[JsonObject]
  } yield {
    val text = jObject.asJson.noSpaces
    val json = JParser.parseUnsafe(text)
    val bytes = text.getBytes(StandardCharsets.UTF_8)
    new BinaryJsonContainer(bytes, json)
  }
  private implicit lazy val arbitraryBinaryJsonContainer: Arbitrary[BinaryJsonContainer] =
    Arbitrary(genBinaryJsonContainer)

  case class JsonObject(name: String, first_names: List[String])

  private lazy val genJsonObject: Gen[JsonObject] = for {
    name <- arbitrary[String]
    first_names <- arbitrary[List[String]]
  } yield new JsonObject(name, first_names)
  private implicit lazy val arbitraryJsonObject: Arbitrary[JsonObject] = 
    Arbitrary(genJsonObject)
} 
Example 40
Source File: PacketEncoders.scala    From roc   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package roc
package postgresql
package transport

import java.nio.charset.StandardCharsets

private[postgresql] trait PacketEncoder[A <: FrontendMessage] {
  def apply(a: A): Packet
}

private[postgresql] object PacketEncoder {
  def lengthOfStartupMessageByteArray(sm: StartupMessage): Int = {
    val protocolLength  = 4 //2 shorts * 2
    val lengthOfUserLbl = lengthOfCStyleString("user")
    val lengthOfUser    = lengthOfCStyleString(sm.user)
    val lengthOfDbLbl   = lengthOfCStyleString("database")
    val lengthOfDb      = lengthOfCStyleString(sm.database)
    val extraNull       = 1

    protocolLength + lengthOfUserLbl + lengthOfUser + lengthOfDbLbl + lengthOfDb + extraNull
  }

}

private[postgresql] trait PacketEncoderImplicits {
  import PacketEncoder._

  implicit val startupMessageEncoder: PacketEncoder[StartupMessage] = 
    new PacketEncoder[StartupMessage] {
      def apply(sm: StartupMessage): Packet = {
        val buffer = BufferWriter(new Array[Byte](lengthOfStartupMessageByteArray(sm)))
        buffer.writeShort(3)
        buffer.writeShort(0)
        buffer.writeNullTerminatedString("user")
        buffer.writeNullTerminatedString(sm.user)
        buffer.writeNullTerminatedString("database")
        buffer.writeNullTerminatedString(sm.database)
        buffer.writeNull
        Packet(None, Buffer(buffer.toBytes))
      }
    }

  implicit val passwordMessageEncoder: PacketEncoder[PasswordMessage] = 
    new PacketEncoder[PasswordMessage] {
      def apply(pm: PasswordMessage): Packet = {
        val length = pm.password.getBytes(StandardCharsets.UTF_8).length
        val bw = BufferWriter(new Array[Byte](length + 1))
        bw.writeNullTerminatedString(pm.password)
        Packet(Some(Message.PasswordMessageByte), Buffer(bw.toBytes))
      }
    }

  implicit val queryMessageEncoder: PacketEncoder[Query] = 
    new PacketEncoder[Query] {
      def apply(q: Query): Packet = {
        val length = q.queryString.getBytes(StandardCharsets.UTF_8).length
        val bw     = BufferWriter(new Array[Byte](length + 1))
        bw.writeNullTerminatedString(q.queryString)
        val bytes  = bw.toBytes
        Packet(Some(Message.QueryMessageByte), Buffer(bytes))
      }
    }

  implicit val terminateMessageEncoder: PacketEncoder[Terminate] = new PacketEncoder[Terminate] {
    def apply(t: Terminate): Packet =
      Packet(Some(Message.TerminateByte), Buffer(Array.empty[Byte]))
  }
} 
Example 41
Source File: package.scala    From roc   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package roc

import roc.postgresql.transport.{PacketDecoder, PacketDecoderImplicits, Packet, PacketEncoder, 
  PacketEncoderImplicits}
import java.nio.charset.StandardCharsets

package object postgresql extends PacketEncoderImplicits with PacketDecoderImplicits {

  def encodePacket[A <: FrontendMessage: PacketEncoder](a: A): Packet = 
    implicitly[PacketEncoder[A]].apply(a)

  def decodePacket[A <: BackendMessage: PacketDecoder](p: Packet): PacketDecoder.Result[A] =
    implicitly[PacketDecoder[A]].apply(p)

  def lengthOfCStyleString(str: String): Int = {
    val bytes = str.getBytes(StandardCharsets.UTF_8)
    bytes.length + 1
  }

  def lengthOfCStyleStrings(xs: List[String]): Int = xs match {
    case h :: t => xs.map(lengthOfCStyleString).reduce(_ + _)
    case t      => 0
  }

} 
Example 42
Source File: PostgresqlLexicalGen.scala    From roc   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package roc
package postgresql

import java.nio.charset.StandardCharsets
import org.scalacheck.Gen
import org.specs2.ScalaCheck


trait PostgresqlLexicalGen extends ScalaCheck {
  // see http://www.postgresql.org/docs/current/static/sql-syntax-lexical.html
  // for more on what constitues a valid SQL Identifier
  protected val UnicodeCapitalEnglish = '\u0041' to '\u005A'
  protected val UnicodeLowerEnglish   = '\u0061' to '\u007A'
  protected val UnicodeNonLatin       = '\u0400' to '\u1FFE'
  protected val UnicodeUnderscore     = "_".getBytes(StandardCharsets.UTF_8).map(_.toChar).head
  protected val UnicodeDollarSign     = "$".getBytes(StandardCharsets.UTF_8).map(_.toChar).head
  protected val UnicodeNumbers        = '\u0030' to '\u0039'
  protected val BeginningChars = UnicodeUnderscore :: List(UnicodeCapitalEnglish, 
    UnicodeLowerEnglish, UnicodeNonLatin).flatten
  protected val SubsequentChars = UnicodeDollarSign :: BeginningChars ::: UnicodeNumbers.toList

  protected lazy val genValidBeginningIdentifier: Gen[Char] = for {
    char    <-  Gen.oneOf(BeginningChars)
  } yield char
  protected lazy val genValidSubsequentIdentifier: Gen[Char] = for {
    char    <-  Gen.oneOf(SubsequentChars)
  } yield char

  protected lazy val genValidSQLIdentifier: Gen[String] = for {
    firstChar   <-  genValidBeginningIdentifier
    chars       <-  Gen.listOf(genValidSubsequentIdentifier)
  } yield {
    val xs = firstChar :: chars
    xs.map(_.toString).reduce(_ + _)
  }

  protected lazy val genValidNumberOfShortColumns: Gen[Short] = 
    Gen.chooseNum[Short](0, 1663) // the maximum number of Postgresql columns is 1663
  protected lazy val genValidNumberOfIntColumns: Gen[Int] =
    genValidNumberOfShortColumns.map(_.toInt)
  protected lazy val genValidNonZeroNumberOfShortColumns: Gen[Short] =
    Gen.chooseNum[Short](1, 1663) // the maximum number of Postgresql columns is 1663
} 
Example 43
Source File: PackageSpec.scala    From roc   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package roc
package postgresql

import java.nio.charset.StandardCharsets
import org.scalacheck.Prop.forAll
import org.specs2.{ScalaCheck, Specification}

final class PackageSpec extends Specification with ScalaCheck { def is = s2"""

  Postgresql Package
    should calculate length of C-Style String               $test0
    should calculate length of C-Style Strings              $test1
                                                                           """

  val test0 = forAll { (str: String) =>
    val bytes  = str.getBytes(StandardCharsets.UTF_8)
    val length = bytes.length + 1 // add 1 for null character
    lengthOfCStyleString(str) must_== length
  }

  val test1 = forAll { (xs: List[String]) =>
    val length = xs match {
      case h :: t => xs.map(lengthOfCStyleString).reduce(_ + _)
      case t      => 0
    }
    lengthOfCStyleStrings(xs) must_== length
  }
} 
Example 44
Source File: MessageSpec.scala    From roc   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package roc
package postgresql

import java.nio.charset.StandardCharsets
import java.security.MessageDigest
import org.scalacheck.Arbitrary.arbitrary
import org.scalacheck.Prop.forAll
import org.scalacheck.{Arbitrary, Gen}
import org.specs2._

final class MessagesSpec extends Specification with ScalaCheck { def is = s2"""

  PasswordMessage
    should MD5 encrypt a password with given salt           $pmEncrypt
                                                                            """

  val pmEncrypt = forAll { (user: String, pm: PasswordMessage, salt: Array[Byte]) =>
    val md = MessageDigest.getInstance("MD5")
    md.update((pm.password+ user).getBytes(StandardCharsets.UTF_8))
    val unsaltedHexStr = md.digest().map(x => "%02x".format(x.byteValue)).foldLeft("")(_ + _)
    val saltedBytes = unsaltedHexStr.getBytes ++ salt
    md.reset()
    md.update(saltedBytes)
    val passwd = md.digest().map(x => "%02x".format(x.byteValue)).foldLeft("md5")(_ + _)
    passwd must_== PasswordMessage.encryptMD5Passwd(user, pm.password, salt)
  }
  
  lazy val genByte: Gen[Byte] = arbitrary[Byte]
  lazy val genSalt: Gen[Array[Byte]] = Gen.containerOfN[Array, Byte](4, genByte)
  lazy val genPasswordMessage: Gen[PasswordMessage] = for {
    password    <-  arbitrary[String]
  } yield new PasswordMessage(password)
  implicit lazy val implicitPasswordMessage: Arbitrary[PasswordMessage] = 
    Arbitrary(genPasswordMessage)
} 
Example 45
Source File: CirceSerdes.scala    From kafka-streams-circe   with Apache License 2.0 5 votes vote down vote up
package com.goyeau.kafka.streams.circe

import java.nio.charset.StandardCharsets
import java.util

import io.circe.parser._
import io.circe.{Decoder, Encoder}
import org.apache.kafka.common.errors.SerializationException
import org.apache.kafka.common.serialization.{Deserializer, Serde, Serdes, Serializer}

object CirceSerdes {

  implicit def serializer[T: Encoder]: Serializer[T] =
    new Serializer[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
      override def serialize(topic: String, caseClass: T): Array[Byte] =
        Encoder[T].apply(caseClass).noSpaces.getBytes(StandardCharsets.UTF_8)
      override def close(): Unit = ()
    }

  implicit def deserializer[T: Decoder]: Deserializer[T] =
    new Deserializer[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
      override def deserialize(topic: String, data: Array[Byte]): T =
        Option(data).fold(null.asInstanceOf[T]) { data =>
          decode[T](new String(data, StandardCharsets.UTF_8))
            .fold(error => throw new SerializationException(error), identity)
        }
      override def close(): Unit = ()
    }

  implicit def serde[CC: Encoder: Decoder]: Serde[CC] = Serdes.serdeFrom(serializer, deserializer)
} 
Example 46
Source File: WatchServiceReceiverSpec.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up
package org.apache.iota.fey

import java.nio.file.{Files, Paths}
import java.nio.charset.StandardCharsets

import akka.testkit.{EventFilter, TestProbe}

import scala.concurrent.duration.{DurationInt, FiniteDuration}
import java.io.File

import ch.qos.logback.classic.Level

class WatchServiceReceiverSpec extends BaseAkkaSpec{

  val watcherTB = TestProbe("WATCH-SERVICE")
  var watchFileTask:WatchServiceReceiver = _
  val watchTestDir = s"${CONFIG.JSON_REPOSITORY}/watchtest"

  "Creating WatchServiceReceiver" should {
    "process initial files in the JSON repository" in {
      CONFIG.JSON_EXTENSION = "json.not"
      watchFileTask = new WatchServiceReceiver(watcherTB.ref)
      watcherTB.expectMsgAllClassOf(classOf[JsonReceiverActor.JSON_RECEIVED])
      CONFIG.JSON_EXTENSION = "json.test"
    }
  }

  var watchThread: Thread = _
  "Start a Thread with WatchServiceReceiver" should {
    "Start Thread" in {
      watchThread = new Thread(watchFileTask, "TESTING-WATCHER-IN-THREAD")
      watchThread.setDaemon(true)
      watchThread.start()
      TestProbe().isThreadRunning("TESTING-WATCHER-IN-THREAD") should be(true)
    }
  }

  "Start watching directory" should {
    "Starting receiving CREATED event" taggedAs(SlowTest) in {
      watchFileTask.watch(Paths.get(watchTestDir))
      Files.write(Paths.get(s"$watchTestDir/watched.json.test"), Utils_JSONTest.create_json_test.getBytes(StandardCharsets.UTF_8))
      watcherTB.expectMsgAllClassOf(20.seconds, classOf[JsonReceiverActor.JSON_RECEIVED])
    }
    "Starting receiving UPDATE event" taggedAs(SlowTest) in {
      Files.write(Paths.get(s"$watchTestDir/watched-update.json.test"), Utils_JSONTest.delete_json_test.getBytes(StandardCharsets.UTF_8))
      Thread.sleep(200)
      Files.write(Paths.get(s"$watchTestDir/watched-update.json.test"), Utils_JSONTest.create_json_test.getBytes(StandardCharsets.UTF_8))
      watcherTB.expectMsgAllClassOf(20.seconds, classOf[JsonReceiverActor.JSON_RECEIVED])
    }
  }

  "processJson" should {
    "log to warn level when json has invalid schema" in {
      Files.write(Paths.get(s"$watchTestDir/watched-invalid.json.test"), Utils_JSONTest.test_json_schema_invalid.getBytes(StandardCharsets.UTF_8))
      watchFileTask.processJson(s"$watchTestDir/watched-invalid.json.test",new File(s"$watchTestDir/watched-invalid.json.test"))
      s"File $watchTestDir/watched-invalid.json.test not processed. Incorrect JSON schema" should beLoggedAt(Level.WARN)
    }
  }

  "interrupt watchservice" should{
    "interrupt thread" in {
      watchThread.interrupt()
    }
  }

} 
Example 47
Source File: ZincAnalysisParserTest.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.build.zinc.analysis

import java.io.InputStream
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Paths}
import java.util.UUID

import com.github.marschall.memoryfilesystem.MemoryFileSystemBuilder
import com.wixpress.build.maven.Coordinates
import org.specs2.mutable.SpecificationWithJUnit
import org.specs2.specification.Scope

class ZincAnalysisParserTest extends SpecificationWithJUnit {
  "ZincAnalysisParser" should {
    "parse repo with zinc analysis" in new baseCtx {
      private val parser = new ZincAnalysisParser(repoRoot)
      private val coordinatesToAnalyses: Map[Coordinates, List[ZincModuleAnalysis]] = parser.readModules()
      coordinatesToAnalyses must haveLength(greaterThan(0))
      private val analysisList: List[ZincModuleAnalysis] = coordinatesToAnalyses.head._2
      analysisList must haveLength(greaterThan(0))
    }
  }

  abstract class baseCtx extends Scope {
    val fileSystem = MemoryFileSystemBuilder.newLinux().build()
    val repoRoot = fileSystem.getPath("repoRoot")
    Files.createDirectories(repoRoot)
    writeResourceAsFileToPath("/pom.xml", "pom.xml", "java-junit-sample/")
    writeResourceAsFileToPath("/aggregate-pom.xml", "pom.xml", "")
    writeResourceAsFileToPath("/compile.relations", "compile.relations","java-junit-sample/target/analysis/")
    writeResourceAsFileToPath("/test-compile.relations", "test-compile.relations","java-junit-sample/target/analysis/")

    private def writeResourceAsFileToPath(resource: String, fileName: String, path: String) = {
      if (path.nonEmpty)
        Files.createDirectories(repoRoot.resolve(path))
      val stream: InputStream = getClass.getResourceAsStream(s"$resource")
      val compileRelations = scala.io.Source.fromInputStream(stream).mkString
      Files.write(repoRoot.resolve(s"$path$fileName"), compileRelations.getBytes(StandardCharsets.UTF_8))
    }

    def path(withName: String) = repoRoot.resolve(withName)
    def random = UUID.randomUUID().toString
  }
} 
Example 48
Source File: MavenCoordinatesListReaderIT.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator.utils

import java.nio.charset.StandardCharsets
import java.nio.file.{Files, NoSuchFileException, Path}

import com.github.marschall.memoryfilesystem.MemoryFileSystemBuilder
import com.wixpress.build.maven.MavenCoordinatesListReader
import com.wixpress.build.maven.MavenMakers.someCoordinates
import org.specs2.mutable.SpecificationWithJUnit
import org.specs2.specification.Scope

//noinspection TypeAnnotation
class MavenCoordinatesListReaderIT extends SpecificationWithJUnit{
  "MavenCoordinatesListReader" should {
    "read file with coordinates" in new Ctx{
      val coordinatesA = someCoordinates("a")
      val coordinatesB = someCoordinates("b")
      val fileContent = s"""${coordinatesA.serialized}
                            |${coordinatesB.serialized}""".stripMargin
      val filePath:Path = fileWithContent(fileContent)

      MavenCoordinatesListReader.coordinatesIn(filePath) mustEqual Set(coordinatesA,coordinatesB)
    }

    "ignore empty line" in new Ctx{
      val coordinatesA = someCoordinates("a")
      val coordinatesB = someCoordinates("b")
      val fileContent = s"""${coordinatesA.serialized}
                           |
                           |${coordinatesB.serialized}""".stripMargin
      val filePath:Path = fileWithContent(fileContent)

      MavenCoordinatesListReader.coordinatesIn(filePath) mustEqual Set(coordinatesA,coordinatesB)
    }

    "ignore preceding and trailing spaces" in new Ctx{
      val coordinatesA = someCoordinates("a")
      val coordinatesB = someCoordinates("b")
      val fileContent = s"    ${coordinatesA.serialized}   "
      val filePath:Path = fileWithContent(fileContent)

      MavenCoordinatesListReader.coordinatesIn(filePath) mustEqual Set(coordinatesA)
    }

    "ignore lines that starts with #" in new Ctx{
      val coordinatesA = someCoordinates("a")
      val coordinatesB = someCoordinates("b")
      val fileContent = s"""${coordinatesA.serialized}
                            |#${coordinatesB.serialized}""".stripMargin
      val filePath:Path = fileWithContent(fileContent)

      MavenCoordinatesListReader.coordinatesIn(filePath) mustEqual Set(coordinatesA)
    }

    "throw exception in case file is missing" in new Ctx{
      MavenCoordinatesListReader.coordinatesIn(fs.getPath("non-existing-file")) must throwA[NoSuchFileException]
    }
  }

  trait Ctx extends Scope{
    val fs = MemoryFileSystemBuilder.newLinux().build()
    def fileWithContent(content:String):Path = {
      val path = Files.createTempFile(fs.getPath("/"),"",".txt")
      Files.write(path, content.getBytes(StandardCharsets.UTF_8))
    }
  }

} 
Example 49
Source File: CustomReceiver.scala    From Learning-Spark-SQL   with MIT License 5 votes vote down vote up
import java.io.{BufferedReader, InputStreamReader}
import java.net.Socket
import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.receiver.Receiver


  private def receive() {
   var socket: Socket = null
   var userInput: String = null
   try {
     println("Connecting to " + host + ":" + port)
     socket = new Socket(host, port)
     println("Connected to " + host + ":" + port)
     val reader = new BufferedReader(
       new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))
     userInput = reader.readLine()
     while(!isStopped && userInput != null) {
       store(userInput)
       userInput = reader.readLine()
     }
     reader.close()
     socket.close()
     println("Stopped receiving")
     restart("Trying to connect again")
   } catch {
     case e: java.net.ConnectException =>
       restart("Error connecting to " + host + ":" + port, e)
     case t: Throwable =>
       restart("Error receiving data", t)
   }
  }
} 
Example 50
Source File: XmlOptions.scala    From spark-xml   with Apache License 2.0 5 votes vote down vote up
package com.databricks.spark.xml

import java.nio.charset.StandardCharsets

import com.databricks.spark.xml.util._


private[xml] class XmlOptions(
    @transient private val parameters: Map[String, String])
  extends Serializable {

  def this() = this(Map.empty)

  val charset = parameters.getOrElse("charset", XmlOptions.DEFAULT_CHARSET)
  val codec = parameters.get("compression").orElse(parameters.get("codec")).orNull
  val rowTag = parameters.getOrElse("rowTag", XmlOptions.DEFAULT_ROW_TAG)
  require(rowTag.nonEmpty, "'rowTag' option should not be empty string.")
  val rootTag = parameters.getOrElse("rootTag", XmlOptions.DEFAULT_ROOT_TAG)
  val samplingRatio = parameters.get("samplingRatio").map(_.toDouble).getOrElse(1.0)
  require(samplingRatio > 0, s"samplingRatio ($samplingRatio) should be greater than 0")
  val excludeAttributeFlag = parameters.get("excludeAttribute").map(_.toBoolean).getOrElse(false)
  val treatEmptyValuesAsNulls =
    parameters.get("treatEmptyValuesAsNulls").map(_.toBoolean).getOrElse(false)
  val attributePrefix =
    parameters.getOrElse("attributePrefix", XmlOptions.DEFAULT_ATTRIBUTE_PREFIX)
  require(attributePrefix.nonEmpty, "'attributePrefix' option should not be empty string.")
  val valueTag = parameters.getOrElse("valueTag", XmlOptions.DEFAULT_VALUE_TAG)
  require(valueTag.nonEmpty, "'valueTag' option should not be empty string.")
  require(valueTag != attributePrefix,
    "'valueTag' and 'attributePrefix' options should not be the same.")
  val nullValue = parameters.getOrElse("nullValue", XmlOptions.DEFAULT_NULL_VALUE)
  val columnNameOfCorruptRecord =
    parameters.getOrElse("columnNameOfCorruptRecord", "_corrupt_record")
  val ignoreSurroundingSpaces =
    parameters.get("ignoreSurroundingSpaces").map(_.toBoolean).getOrElse(false)
  val parseMode = ParseMode.fromString(parameters.getOrElse("mode", PermissiveMode.name))
  val inferSchema = parameters.get("inferSchema").map(_.toBoolean).getOrElse(true)
  val rowValidationXSDPath = parameters.get("rowValidationXSDPath").orNull
}

private[xml] object XmlOptions {
  val DEFAULT_ATTRIBUTE_PREFIX = "_"
  val DEFAULT_VALUE_TAG = "_VALUE"
  val DEFAULT_ROW_TAG = "ROW"
  val DEFAULT_ROOT_TAG = "ROWS"
  val DEFAULT_CHARSET: String = StandardCharsets.UTF_8.name
  val DEFAULT_NULL_VALUE: String = null

  def apply(parameters: Map[String, String]): XmlOptions = new XmlOptions(parameters)
} 
Example 51
Source File: XmlFileSuite.scala    From spark-xml   with Apache License 2.0 5 votes vote down vote up
package com.databricks.spark.xml.util

import java.nio.charset.{StandardCharsets, UnsupportedCharsetException}

import org.apache.spark.SparkContext
import org.scalatest.BeforeAndAfterAll
import org.scalatest.funsuite.AnyFunSuite

final class XmlFileSuite extends AnyFunSuite with BeforeAndAfterAll {

  private val booksFile = "src/test/resources/books.xml"
  private val booksUnicodeInTagNameFile = "src/test/resources/books-unicode-in-tag-name.xml"
  private val booksFileTag = "book"
  private val booksUnicodeFileTag = "\u66F8" // scalastyle:ignore
  private val numBooks = 12
  private val numBooksUnicodeInTagName = 3
  private val fiasHouse = "src/test/resources/fias_house.xml"
  private val fiasRowTag = "House"
  private val numHouses = 37
  private val utf8 = StandardCharsets.UTF_8.name

  private var sparkContext: SparkContext = _

  override def beforeAll(): Unit = {
    super.beforeAll()
    sparkContext = new SparkContext("local[2]", "TextFileSuite")
  }

  override def afterAll(): Unit = {
    try {
      sparkContext.stop()
      sparkContext = null
    } finally {
      super.afterAll()
    }
  }

  test("read utf-8 encoded file") {
    val baseRDD = XmlFile.withCharset(sparkContext, booksFile, utf8, rowTag = booksFileTag)
    assert(baseRDD.count() === numBooks)
  }

  test("read file with unicode chars in row tag name") {
    val baseRDD = XmlFile.withCharset(
      sparkContext, booksUnicodeInTagNameFile, utf8, rowTag = booksUnicodeFileTag)
    assert(baseRDD.count() === numBooksUnicodeInTagName)
  }

  test("read utf-8 encoded file with empty tag") {
    val baseRDD = XmlFile.withCharset(sparkContext, fiasHouse, utf8, rowTag = fiasRowTag)
    assert(baseRDD.count() == numHouses)
    baseRDD.collect().foreach(x => assert(x.contains("/>")))
  }

  test("unsupported charset") {
    val exception = intercept[UnsupportedCharsetException] {
      XmlFile.withCharset(sparkContext, booksFile, "frylock", rowTag = booksFileTag).count()
    }
    assert(exception.getMessage.contains("frylock"))
  }

} 
Example 52
Source File: AppendHandler.scala    From gatling-imap   with GNU Affero General Public License v3.0 5 votes vote down vote up
package com.linagora.gatling.imap.protocol.command

import java.nio.charset.StandardCharsets

import javax.mail.Flags

import akka.actor.{ActorRef, Props}
import com.linagora.gatling.imap.protocol._
import com.yahoo.imapnio.async.client.ImapAsyncSession
import com.yahoo.imapnio.async.request.AppendCommand
import com.yahoo.imapnio.async.response.ImapAsyncResponse
import io.gatling.core.akka.BaseActor

import scala.concurrent.Future
import scala.util.{Failure, Success, Try}

object AppendHandler {
  def props(session: ImapAsyncSession) = Props(new AppendHandler(session))
}

class AppendHandler(session: ImapAsyncSession) extends BaseActor {

  override def receive: Receive = {
    case Command.Append(userId, mailbox, flags, date, content) =>
      if (date.isDefined) throw new NotImplementedError("Date parameter for APPEND is still not implemented")

      logger.debug(s"APPEND receive from sender ${sender.path} on ${self.path}")
      context.become(waitCallback(sender()))
      val nullDate = null
      val crLfContent = content.replaceAll("(?<!\r)\n", "\r\n").getBytes(StandardCharsets.UTF_8)
      ImapSessionExecutor
        .listenWithHandler(self, userId, Response.Appended, callback)(logger)(session.execute(new AppendCommand(mailbox, flags.map(toImapFlags).orNull, nullDate, crLfContent)))
  }

  private def callback(response: Future[ImapAsyncResponse]) = {
    Try(response) match {
      case Success(futureResult) =>
        futureResult.onComplete(future => {
          logger.debug(s"AppendHandler command completed, success : ${future.isSuccess}")
          if (!future.isSuccess) {
            logger.error("AppendHandler command failed", future.toEither.left)
          }

        })
      case Failure(e) =>
        logger.error("ERROR when executing APPEND COMMAND", e)
        throw e
    }
  }

  private def toImapFlags(flags: Seq[String]): Flags = {
    val imapFlags = new Flags()
    flags.foreach(imapFlags.add)
    imapFlags
  }

  def waitCallback(sender: ActorRef): Receive = {
    case [email protected](_) =>
      logger.debug(s"APPEND reply to sender ${sender.path}")
      sender ! msg
      context.stop(self)
  }

} 
Example 53
Source File: JsonRequestSpec.scala    From play-ws   with Apache License 2.0 5 votes vote down vote up
package play.api.libs.ws.ahc

import java.nio.charset.StandardCharsets

import akka.actor.ActorSystem
import akka.stream.Materializer
import akka.util.ByteString
import org.mockito.Mockito.times
import org.mockito.Mockito.verify
import org.mockito.Mockito.when
import org.specs2.mock.Mockito

import org.specs2.mutable.Specification
import org.specs2.specification.AfterAll
import play.api.libs.json.JsString
import play.api.libs.json.JsValue
import play.api.libs.json.Json
import play.api.libs.ws.JsonBodyReadables
import play.api.libs.ws.JsonBodyWritables
import play.libs.ws.DefaultObjectMapper
import play.shaded.ahc.org.asynchttpclient.Response

import scala.io.Codec


class JsonRequestSpec extends Specification with Mockito with AfterAll with JsonBodyWritables {
  sequential

  implicit val system       = ActorSystem()
  implicit val materializer = Materializer.matFromSystem

  override def afterAll: Unit = {
    system.terminate()
  }

  "set a json node" in {
    val jsValue = Json.obj("k1" -> JsString("v1"))
    val client  = mock[StandaloneAhcWSClient]
    val req = new StandaloneAhcWSRequest(client, "http://playframework.com/", null)
      .withBody(jsValue)
      .asInstanceOf[StandaloneAhcWSRequest]
      .buildRequest()

    req.getHeaders.get("Content-Type") must be_==("application/json")
    ByteString.fromArray(req.getByteData).utf8String must be_==("""{"k1":"v1"}""")
  }

  "set a json node using the default object mapper" in {
    val objectMapper = DefaultObjectMapper.instance

    implicit val jsonReadable = body(objectMapper)
    val jsonNode              = objectMapper.readTree("""{"k1":"v1"}""")
    val client                = mock[StandaloneAhcWSClient]
    val req = new StandaloneAhcWSRequest(client, "http://playframework.com/", null)
      .withBody(jsonNode)
      .asInstanceOf[StandaloneAhcWSRequest]
      .buildRequest()

    req.getHeaders.get("Content-Type") must be_==("application/json")
    ByteString.fromArray(req.getByteData).utf8String must be_==("""{"k1":"v1"}""")
  }

  "read an encoding of UTF-8" in {
    val json = io.Source.fromResource("test.json")(Codec.ISO8859).getLines.mkString

    val ahcResponse = mock[Response]
    val response    = new StandaloneAhcWSResponse(ahcResponse)

    when(ahcResponse.getResponseBody(StandardCharsets.UTF_8)).thenReturn(json)
    when(ahcResponse.getContentType).thenReturn("application/json")

    val value: JsValue = JsonBodyReadables.readableAsJson.transform(response)
    verify(ahcResponse, times(1)).getResponseBody(StandardCharsets.UTF_8)
    verify(ahcResponse, times(1)).getContentType
    value.toString must beEqualTo(json)
  }

  "read an encoding of ISO-8859-1" in {
    val json = io.Source.fromResource("test.json")(Codec.ISO8859).getLines.mkString

    val ahcResponse = mock[Response]
    val response    = new StandaloneAhcWSResponse(ahcResponse)

    when(ahcResponse.getResponseBody(StandardCharsets.ISO_8859_1)).thenReturn(json)
    when(ahcResponse.getContentType).thenReturn("application/json;charset=iso-8859-1")

    val value: JsValue = JsonBodyReadables.readableAsJson.transform(response)
    verify(ahcResponse, times(1)).getResponseBody(StandardCharsets.ISO_8859_1)
    verify(ahcResponse, times(1)).getContentType
    value.toString must beEqualTo(json)
  }
} 
Example 54
Source File: AhcWSUtils.scala    From play-ws   with Apache License 2.0 5 votes vote down vote up
package play.api.libs.ws.ahc

import play.shaded.ahc.org.asynchttpclient.util.HttpUtils
import java.nio.charset.Charset
import java.nio.charset.StandardCharsets


private[ws] object AhcWSUtils {
  def getResponseBody(ahcResponse: play.shaded.ahc.org.asynchttpclient.Response): String = {
    val contentType = Option(ahcResponse.getContentType).getOrElse("application/octet-stream")
    val charset     = getCharset(contentType)
    ahcResponse.getResponseBody(charset)
  }

  def getCharset(contentType: String): Charset = {
    Option(HttpUtils.extractContentTypeCharsetAttribute(contentType)).getOrElse {
      if (contentType.startsWith("text/"))
        StandardCharsets.ISO_8859_1
      else
        StandardCharsets.UTF_8
    }
  }
} 
Example 55
Source File: CodecStreams.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources

import java.io.{InputStream, OutputStream, OutputStreamWriter}
import java.nio.charset.{Charset, StandardCharsets}

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.io.compress._
import org.apache.hadoop.mapreduce.JobContext
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
import org.apache.hadoop.util.ReflectionUtils

import org.apache.spark.TaskContext

object CodecStreams {
  private def getDecompressionCodec(config: Configuration, file: Path): Option[CompressionCodec] = {
    val compressionCodecs = new CompressionCodecFactory(config)
    Option(compressionCodecs.getCodec(file))
  }

  def createInputStream(config: Configuration, file: Path): InputStream = {
    val fs = file.getFileSystem(config)
    val inputStream: InputStream = fs.open(file)

    getDecompressionCodec(config, file)
      .map(codec => codec.createInputStream(inputStream))
      .getOrElse(inputStream)
  }

  
  def getCompressionExtension(context: JobContext): String = {
    getCompressionCodec(context)
      .map(_.getDefaultExtension)
      .getOrElse("")
  }
} 
Example 56
Source File: TextOptions.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources.text

import java.nio.charset.{Charset, StandardCharsets}

import org.apache.spark.sql.catalyst.util.{CaseInsensitiveMap, CompressionCodecs}


  val wholeText = parameters.getOrElse(WHOLETEXT, "false").toBoolean

  val encoding: Option[String] = parameters.get(ENCODING)

  val lineSeparator: Option[String] = parameters.get(LINE_SEPARATOR).map { lineSep =>
    require(lineSep.nonEmpty, s"'$LINE_SEPARATOR' cannot be an empty string.")

    lineSep
  }

  // Note that the option 'lineSep' uses a different default value in read and write.
  val lineSeparatorInRead: Option[Array[Byte]] = lineSeparator.map { lineSep =>
    lineSep.getBytes(encoding.map(Charset.forName(_)).getOrElse(StandardCharsets.UTF_8))
  }
  val lineSeparatorInWrite: Array[Byte] =
    lineSeparatorInRead.getOrElse("\n".getBytes(StandardCharsets.UTF_8))
}

private[datasources] object TextOptions {
  val COMPRESSION = "compression"
  val WHOLETEXT = "wholetext"
  val ENCODING = "encoding"
  val LINE_SEPARATOR = "lineSep"
} 
Example 57
Source File: StreamMetadata.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import java.io.{InputStreamReader, OutputStreamWriter}
import java.nio.charset.StandardCharsets
import java.util.ConcurrentModificationException

import scala.util.control.NonFatal

import org.apache.commons.io.IOUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileAlreadyExistsException, FSDataInputStream, Path}
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization

import org.apache.spark.internal.Logging
import org.apache.spark.sql.execution.streaming.CheckpointFileManager.CancellableFSDataOutputStream
import org.apache.spark.sql.streaming.StreamingQuery


  def write(
      metadata: StreamMetadata,
      metadataFile: Path,
      hadoopConf: Configuration): Unit = {
    var output: CancellableFSDataOutputStream = null
    try {
      val fileManager = CheckpointFileManager.create(metadataFile.getParent, hadoopConf)
      output = fileManager.createAtomic(metadataFile, overwriteIfPossible = false)
      val writer = new OutputStreamWriter(output)
      Serialization.write(metadata, writer)
      writer.close()
    } catch {
      case e: FileAlreadyExistsException =>
        if (output != null) {
          output.cancel()
        }
        throw new ConcurrentModificationException(
          s"Multiple streaming queries are concurrently using $metadataFile", e)
      case e: Throwable =>
        if (output != null) {
          output.cancel()
        }
        logError(s"Error writing stream metadata $metadata to $metadataFile", e)
        throw e
    }
  }
} 
Example 58
Source File: EnrichTruckData.scala    From trucking-iot   with Apache License 2.0 5 votes vote down vote up
package com.orendainx.trucking.nifi.processors

import java.io.{InputStream, OutputStream}
import java.nio.charset.StandardCharsets
import java.util.concurrent.atomic.AtomicReference
import java.util.Scanner

import com.orendainx.trucking.commons.models.{EnrichedTruckData, TruckData}
import com.orendainx.trucking.enrichment.WeatherAPI
import org.apache.nifi.annotation.behavior._
import org.apache.nifi.annotation.documentation.{CapabilityDescription, Tags}
import org.apache.nifi.components.PropertyDescriptor
import org.apache.nifi.logging.ComponentLog
import org.apache.nifi.processor.io.InputStreamCallback
import org.apache.nifi.processor.io.OutputStreamCallback
import org.apache.nifi.processor._

import scala.collection.JavaConverters._


@Tags(Array("trucking", "data", "event", "enrich", "iot"))
@CapabilityDescription("Enriches simulated truck sensor data. Find the master project and its code, documentation and corresponding tutorials at: https://github.com/orendain/trucking-iot")
@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
@TriggerSerially
@WritesAttributes(Array(
  new WritesAttribute(attribute = "dataType", description = "The class name of the resulting enriched data type.")
))
class EnrichTruckData extends AbstractProcessor {

  private var log: ComponentLog = _
  private val RelSuccess = new Relationship.Builder().name("success").description("All generated data is routed to this relationship.").build

  override def init(context: ProcessorInitializationContext): Unit = {
    log = context.getLogger
  }

  override def onTrigger(context: ProcessContext, session: ProcessSession): Unit = {

    var flowFile = session.get
    log.debug(s"Flowfile received: $flowFile")

    // Convert the entire stream of bytes from the flow file into a string
    val content = new AtomicReference[String]
    session.read(flowFile, new InputStreamCallback {
      override def process(inputStream: InputStream) = {
        val scanner = new Scanner(inputStream).useDelimiter("\\A")
        val result = if (scanner.hasNext()) scanner.next() else ""
        log.debug(s"Parsed content: $result")
        content.set(result)
      }
    })

    // Form a TruckData object from content, then creating an EnrichedTruckData object by making the appropriate
    // calls to WeatherAPI
    val truckData = TruckData.fromCSV(content.get())
    val enrichedTruckData = EnrichedTruckData(truckData, WeatherAPI.default.getFog(truckData.eventType),
      WeatherAPI.default.getRain(truckData.eventType), WeatherAPI.default.getWind(truckData.eventType))

    log.debug(s"EnrichedData generated: $enrichedTruckData")

    // Add the new data type as a flow file attribute
    flowFile = session.putAttribute(flowFile, "dataType", enrichedTruckData.getClass.getSimpleName)

    // Replace the flow file, writing in the new content
    flowFile = session.write(flowFile, new OutputStreamCallback {
      override def process(outputStream: OutputStream) =
        outputStream.write(enrichedTruckData.toCSV.getBytes(StandardCharsets.UTF_8))
    })

    // TODO: document what this does
    session.getProvenanceReporter.route(flowFile, RelSuccess)
    session.transfer(flowFile, RelSuccess)
    session.commit()
  }

  // Define properties and relationships
  override def getSupportedPropertyDescriptors: java.util.List[PropertyDescriptor] = List.empty[PropertyDescriptor].asJava

  override def getRelationships: java.util.Set[Relationship] = Set(RelSuccess).asJava
} 
Example 59
Source File: NiFiPacketToObject.scala    From trucking-iot   with Apache License 2.0 5 votes vote down vote up
package com.orendainx.trucking.storm.bolts

import java.nio.charset.StandardCharsets
import java.util

import com.orendainx.trucking.commons.models.{EnrichedTruckData, TrafficData}
import com.typesafe.scalalogging.Logger
import org.apache.nifi.storm.NiFiDataPacket
import org.apache.storm.task.{OutputCollector, TopologyContext}
import org.apache.storm.topology.OutputFieldsDeclarer
import org.apache.storm.topology.base.BaseRichBolt
import org.apache.storm.tuple.{Fields, Tuple, Values}


class NiFiPacketToObject extends BaseRichBolt {

  private lazy val log = Logger(this.getClass)
  private var outputCollector: OutputCollector = _

  override def prepare(stormConf: util.Map[_, _], context: TopologyContext, collector: OutputCollector): Unit = {
    outputCollector = collector
  }

  override def execute(tuple: Tuple): Unit = {
    val dp = tuple.getValueByField("nifiDataPacket").asInstanceOf[NiFiDataPacket]

    // Convert each tuple, really a NiFiDataPackge, into its proper case class instance (e.g. EnrichedTruckData or TrafficData)
    val (dataType, data) = dp.getAttributes.get("dataType") match {
      case typ @ "EnrichedTruckData" => (typ, EnrichedTruckData.fromCSV(new String(dp.getContent, StandardCharsets.UTF_8)))
      case typ @ "TrafficData" => (typ, TrafficData.fromCSV(new String(dp.getContent, StandardCharsets.UTF_8)))
    }

    outputCollector.emit(new Values(dataType, data))
    outputCollector.ack(tuple)
  }

  override def declareOutputFields(declarer: OutputFieldsDeclarer): Unit = declarer.declare(new Fields("dataType", "data"))
} 
Example 60
Source File: SerializedWithSchemaToObject.scala    From trucking-iot   with Apache License 2.0 5 votes vote down vote up
package com.orendainx.trucking.storm.bolts

import java.io.ByteArrayInputStream
import java.nio.charset.StandardCharsets
import java.util

import com.hortonworks.registries.schemaregistry.SchemaMetadata
import com.hortonworks.registries.schemaregistry.avro.AvroSchemaProvider
import com.hortonworks.registries.schemaregistry.client.SchemaRegistryClient
import com.hortonworks.registries.schemaregistry.serdes.avro.AvroSnapshotDeserializer
import com.orendainx.trucking.commons.models.{EnrichedTruckData, TrafficData}
import com.typesafe.scalalogging.Logger
import org.apache.avro.generic.{GenericData, GenericRecord}
import org.apache.storm.task.{OutputCollector, TopologyContext}
import org.apache.storm.topology.OutputFieldsDeclarer
import org.apache.storm.topology.base.BaseRichBolt
import org.apache.storm.tuple.{Fields, Tuple, Values}

import scala.collection.JavaConversions._


class SerializedWithSchemaToObject extends BaseRichBolt {

  private lazy val log = Logger(this.getClass)
  private var outputCollector: OutputCollector = _

  // Declare schema-related fields to be initialized when this component's prepare() method is called
  private var schemaRegistryClient: SchemaRegistryClient = _
  private var deserializer: AvroSnapshotDeserializer = _
  private var truckDataSchemaMetadata: SchemaMetadata = _
  private var trafficDataSchemaMetadata: SchemaMetadata = _

  override def prepare(stormConf: util.Map[_, _], context: TopologyContext, collector: OutputCollector): Unit = {

    outputCollector = collector

    val schemaRegistryUrl = stormConf.get(SchemaRegistryClient.Configuration.SCHEMA_REGISTRY_URL.name()).toString
    val clientConfig = Map(SchemaRegistryClient.Configuration.SCHEMA_REGISTRY_URL.name() -> schemaRegistryUrl)

    schemaRegistryClient = new SchemaRegistryClient(clientConfig)
    truckDataSchemaMetadata = schemaRegistryClient.getSchemaMetadataInfo("EnrichedTruckData").getSchemaMetadata
    trafficDataSchemaMetadata = schemaRegistryClient.getSchemaMetadataInfo("TrafficData").getSchemaMetadata
    deserializer = schemaRegistryClient.getDefaultDeserializer(AvroSchemaProvider.TYPE).asInstanceOf[AvroSnapshotDeserializer]
    deserializer.init(clientConfig)
  }

  override def execute(tuple: Tuple): Unit = {

    // Deserialize each tuple and convert it into its proper case class (e.g. EnrichedTruckData or TrafficData)
    val str = tuple.getStringByField("data").getBytes(StandardCharsets.UTF_8)
    log.info(s"str2: ${tuple.getStringByField("data")}")
    val bytes = new ByteArrayInputStream(str)
    log.info(s"bytes: $bytes")
    val (dataType, data) = tuple.getStringByField("dataType") match {
      case typ @ "EnrichedTruckData" =>
        log.info(s"des: ${deserializer.deserialize(bytes, null)}")
        (typ, recordToEnrichedTruckData(deserializer.deserialize(bytes, null).asInstanceOf[GenericData.Record]))
      case typ @ "TrafficData" =>
        log.info(s"des: ${deserializer.deserialize(bytes, null)}")
        (typ, recordToTrafficData(deserializer.deserialize(bytes, null).asInstanceOf[GenericData.Record]))
    }

    outputCollector.emit(new Values(data, dataType))
    outputCollector.ack(tuple)
  }

  override def declareOutputFields(declarer: OutputFieldsDeclarer): Unit = declarer.declare(new Fields("data", "dataType"))

  // Helper function to convert GenericRecord (result of deserializing via Schema Registry) into JVM object
  private def recordToEnrichedTruckData(r: GenericRecord): EnrichedTruckData =
    EnrichedTruckData(
      r.get("eventTime").toString.toLong,
      r.get("truckId").toString.toInt,
      r.get("driverId").toString.toInt,
      r.get("driverName").toString,
      r.get("routeId").toString.toInt,
      r.get("routeName").toString,
      r.get("latitude").toString.toDouble,
      r.get("longitude").toString.toDouble,
      r.get("speed").toString.toInt,
      r.get("eventType").toString,
      r.get("foggy").toString.toInt,
      r.get("rainy").toString.toInt,
      r.get("windy").toString.toInt)

  // Helper function to convert GenericRecord (result of deserializing via Schema Registry) into JVM object
  private def recordToTrafficData(r: GenericRecord): TrafficData =
    TrafficData(r.get("eventTime").toString.toLong, r.get("routeId").toString.toInt, r.get("congestionLevel").toString.toInt)
} 
Example 61
Source File: ObjectToCSVString.scala    From trucking-iot   with Apache License 2.0 5 votes vote down vote up
package com.orendainx.trucking.storm.bolts

import java.nio.charset.StandardCharsets
import java.util

import com.orendainx.trucking.commons.models.{EnrichedTruckAndTrafficData, WindowedDriverStats}
import com.typesafe.scalalogging.Logger
import org.apache.storm.task.{OutputCollector, TopologyContext}
import org.apache.storm.topology.OutputFieldsDeclarer
import org.apache.storm.topology.base.BaseRichBolt
import org.apache.storm.tuple.{Fields, Tuple, Values}


class ObjectToCSVString extends BaseRichBolt {

  private lazy val log = Logger(this.getClass)
  private var outputCollector: OutputCollector = _

  override def prepare(stormConf: util.Map[_, _], context: TopologyContext, collector: OutputCollector): Unit = {
    outputCollector = collector
  }

  override def execute(tuple: Tuple): Unit = {
    val str = tuple.getStringByField("dataType") match {
      case "EnrichedTruckAndTrafficData" => tuple.getValueByField("data").asInstanceOf[EnrichedTruckAndTrafficData].toCSV
      case "WindowedDriverStats" => tuple.getValueByField("data").asInstanceOf[WindowedDriverStats].toCSV
    }

    outputCollector.emit(new Values(str))
    outputCollector.ack(tuple)
  }

  override def declareOutputFields(declarer: OutputFieldsDeclarer): Unit = declarer.declare(new Fields("data"))
} 
Example 62
Source File: BytesWithSchemaToObject.scala    From trucking-iot   with Apache License 2.0 5 votes vote down vote up
package com.orendainx.trucking.storm.bolts

import java.io.ByteArrayInputStream
import java.nio.charset.StandardCharsets
import java.util

import com.hortonworks.registries.schemaregistry.SchemaMetadata
import com.hortonworks.registries.schemaregistry.avro.AvroSchemaProvider
import com.hortonworks.registries.schemaregistry.client.SchemaRegistryClient
import com.hortonworks.registries.schemaregistry.serdes.avro.AvroSnapshotDeserializer
import com.orendainx.trucking.commons.models.{EnrichedTruckData, TrafficData}
import com.typesafe.scalalogging.Logger
import org.apache.avro.generic.{GenericData, GenericRecord}
import org.apache.storm.task.{OutputCollector, TopologyContext}
import org.apache.storm.topology.OutputFieldsDeclarer
import org.apache.storm.topology.base.BaseRichBolt
import org.apache.storm.tuple.{Fields, Tuple, Values}

import scala.collection.JavaConversions._



  // Helper function to convert GenericRecord (result of deserializing via Schema Registry) into JVM object
  private def recordToEnrichedTruckData(r: GenericRecord): EnrichedTruckData =
    EnrichedTruckData(
      r.get("eventTime").toString.toLong,
      r.get("truckId").toString.toInt,
      r.get("driverId").toString.toInt,
      r.get("driverName").toString,
      r.get("routeId").toString.toInt,
      r.get("routeName").toString,
      r.get("latitude").toString.toDouble,
      r.get("longitude").toString.toDouble,
      r.get("speed").toString.toInt,
      r.get("eventType").toString,
      r.get("foggy").toString.toInt,
      r.get("rainy").toString.toInt,
      r.get("windy").toString.toInt)

  // Helper function to convert GenericRecord (result of deserializing via Schema Registry) into JVM object
  private def recordToTrafficData(r: GenericRecord): TrafficData =
    TrafficData(r.get("eventTime").toString.toLong, r.get("routeId").toString.toInt, r.get("congestionLevel").toString.toInt)
} 
Example 63
Source File: SerDe.scala    From maha   with Apache License 2.0 5 votes vote down vote up
// Copyright 2017, Yahoo Holdings Inc.
// Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms.
package com.yahoo.maha.serde

import java.nio.charset.StandardCharsets
import com.google.common.primitives.Longs

trait SerDe[T] {
  def serialize(t: T): Array[Byte]
  def deserialize(bytes: Array[Byte]): T
}

object StringSerDe extends SerDe[String] {
  override def serialize(str: String): Array[Byte] = {
    require(str != null, "Cannot serialize null string")
    str.getBytes(StandardCharsets.UTF_8)
  }

  override def deserialize(bytes: Array[Byte]): String = {
    require(bytes != null, "Cannot deserialize null byte array")
    new String(bytes, StandardCharsets.UTF_8)
  }

}

object BytesSerDe extends SerDe[Array[Byte]] {
  override def serialize(ba: Array[Byte]): Array[Byte] = {
    require(ba != null, "Cannot serialize null array")
    ba
  }

  override def deserialize(bytes: Array[Byte]): Array[Byte] = {
    require(bytes != null, "Cannot deserialize null byte array")
    bytes
  }
}

object LongSerDe extends SerDe[Long] {
  override def serialize(t: Long): Array[Byte] = {
    Longs.toByteArray(t)
  }

  override def deserialize(bytes: Array[Byte]): Long = {
    Longs.fromByteArray(bytes)
  }
} 
Example 64
Source File: RowCSVWriter.scala    From maha   with Apache License 2.0 5 votes vote down vote up
// Copyright 2017, Yahoo Holdings Inc.
// Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms.
package com.yahoo.maha.report


  def close() {
    csvWriter.close()
  }

}

trait RowCSVWriterProvider {
  def newRowCSVWriter: RowCSVWriter
}

case class FileRowCSVWriterProvider(file: File) extends RowCSVWriterProvider {
  def newRowCSVWriter: RowCSVWriter = {
    if(file.exists() && file.length() > 0) {
      Files.write(file.toPath, Array[Byte](), StandardOpenOption.TRUNCATE_EXISTING) // Clear file
    }
    val fos = new FileOutputStream(file.getAbsoluteFile, true)
    val writerTry = safeCloseable(fos)(new OutputStreamWriter(_, StandardCharsets.UTF_8))
      .flatMap(safeCloseable(_)(new BufferedWriter(_)))
      .flatMap(safeCloseable(_)(new RowCSVWriter(_, RowCSVWriter.DEFAULT_SEPARATOR)))
    require(writerTry.isSuccess, s"Failed to create RowCSVWriter safely : $writerTry")
    writerTry.get
  }
} 
Example 65
Source File: NodeIdVectorClockBase64.scala    From JustinDB   with Apache License 2.0 5 votes vote down vote up
package justin.db.versioning

import java.nio.charset.{Charset, StandardCharsets}
import java.util.Base64

import justin.db.consistenthashing.NodeId
import justin.db.vectorclocks.{Counter, VectorClock}
import spray.json.DefaultJsonProtocol._
import spray.json._

import scala.util.Try

object NodeIdVectorClockBase64 {
  val charset: Charset = StandardCharsets.UTF_8
}

class NodeIdVectorClockBase64 {
  import NodeIdVectorClockBase64._

  def encode(vclock: VectorClock[NodeId]): Try[String] = Try {
    val vcClockBytes = vclock.toList
      .map { case (nodeId, counter) => (nodeId.id.toString, counter.value) }
      .toJson
      .compactPrint
      .getBytes(charset)

    Base64.getEncoder.encodeToString(vcClockBytes)
  }

  def decode(base64: String): Try[VectorClock[NodeId]] = Try {
    val decodedMap = new String(Base64.getDecoder.decode(base64), charset)
      .parseJson.convertTo[List[(String, Int)]]
      .map { case (k, v) => (NodeId(k.toInt), Counter(v))}
      .toMap

    VectorClock.apply(decodedMap)
  }
} 
Example 66
Source File: EnumSpec.scala    From finagle-postgres   with Apache License 2.0 5 votes vote down vote up
package com.twitter.finagle.postgres.generic

import java.nio.charset.StandardCharsets

import com.twitter.finagle.postgres.generic.enumeration.InvalidValue
import com.twitter.finagle.postgres.values.{ValueDecoder, ValueEncoder}
import com.twitter.util.{Return, Throw}
import io.netty.buffer.Unpooled
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers

class EnumSpec extends AnyFlatSpec with Matchers {

  sealed trait TestEnum
  case object CaseOne extends TestEnum
  case object CaseTwo extends TestEnum

  sealed trait AnotherBranch extends TestEnum
  case object CaseThree extends AnotherBranch

  val UTF8 = StandardCharsets.UTF_8


  "Enum decoding" should "decode enumeration ADTs from strings" in  {

    val decoder = ValueDecoder[TestEnum]

    decoder.decodeText("enum_recv", "CaseOne") shouldEqual Return(CaseOne)
    decoder.decodeText("enum_recv", "CaseTwo") shouldEqual Return(CaseTwo)
    decoder.decodeText("enum_recv", "CaseThree") shouldEqual Return(CaseThree)

    decoder.decodeBinary(
      "enum_recv",
      Unpooled.copiedBuffer("CaseOne", UTF8),
      UTF8
    ) shouldEqual Return(CaseOne)

    decoder.decodeBinary(
      "enum_recv",
      Unpooled.copiedBuffer("CaseTwo", UTF8),
      UTF8
    ) shouldEqual Return(CaseTwo)

    decoder.decodeBinary(
      "enum_recv",
      Unpooled.copiedBuffer("CaseThree", UTF8),
      UTF8
    ) shouldEqual Return(CaseThree)

  }

  it should "fail for an invalid value" in {
    val decoder = ValueDecoder[TestEnum]

    decoder.decodeText("enum_recv", "CasePurple") shouldEqual Throw(InvalidValue("CasePurple"))
    decoder.decodeBinary(
      "enum_recv",
      Unpooled.copiedBuffer("CasePurple", UTF8),
      UTF8
    ) shouldEqual Throw(InvalidValue("CasePurple"))

  }

  "Enum encoding" should "encode enumeration ADTs to Strings" in {
    val encoder = ValueEncoder[TestEnum]
    encoder.encodeText(CaseOne) shouldEqual Some("CaseOne")
    encoder.encodeText(CaseTwo) shouldEqual Some("CaseTwo")
    encoder.encodeText(CaseThree) shouldEqual Some("CaseThree")
    encoder.encodeBinary(CaseOne, UTF8).get.toString(UTF8) shouldEqual "CaseOne"
    encoder.encodeBinary(CaseTwo, UTF8).get.toString(UTF8) shouldEqual "CaseTwo"
    encoder.encodeBinary(CaseThree, UTF8).get.toString(UTF8) shouldEqual "CaseThree"
  }

} 
Example 67
Source File: Generators.scala    From finagle-postgres   with Apache License 2.0 5 votes vote down vote up
package com.twitter.finagle.postgres

import java.nio.charset.StandardCharsets
import java.time.{ZonedDateTime, _}
import java.time.temporal.JulianFields
import java.util.UUID

import org.scalacheck.{Arbitrary, Gen}
import Arbitrary.arbitrary
import com.twitter.finagle.postgres.values.Interval

object Generators {
  //need a more sensible BigDecimal generator, because ScalaCheck goes crazy with it and we can't even stringify them
  //this will be sufficient to test the decoder
  implicit val arbBD: Arbitrary[BigDecimal] = Arbitrary(for {
    precision <- Gen.choose(1, 32)
    scale <- Gen.choose(-precision, precision)
    digits <- Gen.listOfN[Char](precision, Gen.numChar)
  } yield BigDecimal(BigDecimal(digits.mkString).bigDecimal.movePointLeft(scale)))

  implicit val arbDate = Arbitrary[LocalDate](for {
    julian <- Gen.choose(1721060, 5373484)  //Postgres date parser doesn't like dates outside year range 0000-9999
  } yield LocalDate.now().`with`(JulianFields.JULIAN_DAY, julian))

  implicit val arbTime: Arbitrary[LocalTime] = Arbitrary[LocalTime](for {
    usec <- Gen.choose(0L, 24L * 60 * 60 * 1000000 - 1)
  } yield LocalTime.ofNanoOfDay(usec * 1000))

  implicit val arbInstant = Arbitrary[Instant](for {
    milli <- Gen.posNum[Long]
  } yield Instant.ofEpochMilli(milli))

  implicit val arbTimestamp = Arbitrary[LocalDateTime](for {
    milli <- Gen.posNum[Long]
  } yield LocalDateTime.ofInstant(Instant.ofEpochMilli(milli), ZoneId.systemDefault()))

  implicit val arbTimestampTz = Arbitrary[ZonedDateTime](for {
    milli <- Gen.posNum[Long]
  } yield ZonedDateTime.ofInstant(Instant.ofEpochMilli(milli), ZoneId.systemDefault()))

  implicit val arbZoneOffset = Arbitrary(Gen.choose(-12, 12).map(ZoneOffset.ofHours))

  implicit val arbInterval = Arbitrary(for {
    months <- Gen.choose(-120, 120)
    years <- Gen.choose(-10, 10)
    days <- Gen.choose(-50, 50)
    hours <- Gen.choose(-50, 50)
    minutes <- Gen.choose(0, 59)
    seconds <- Gen.choose(0, 59)
  } yield Interval(
    Duration.ofSeconds(seconds).plusMinutes(minutes).plusHours(hours),
    Period.ofMonths(months).plusYears(years).plusDays(days)
  ))

  implicit val arbTimeTz = Arbitrary[OffsetTime](for {
    time <- arbitrary[LocalTime]
    offs <- arbitrary[ZoneOffset]
  } yield time.atOffset(offs))

  implicit val arbUUID = Arbitrary[UUID](Gen.uuid)

  // arbitrary string that only contains valid UTF-8 characters
  val utf8 = StandardCharsets.UTF_8.newEncoder()
  implicit val arbUTF8String = Arbitrary(arbitrary[String].filter {
    str => utf8.canEncode(str) && !str.contains('\u0000')
  })

  // TODO: can empty maps be supported?
  implicit val arbHStore: Arbitrary[Map[String, Option[String]]] = Arbitrary(
    Gen.mapOf(for {
      k <- Gen.identifier
      v <- Gen.oneOf(Gen.alphaStr.map(Some(_)), Gen.const(None))
    } yield (k, v)).suchThat(_.nonEmpty)
  )

  // postgres has slightly different precision rules, but that doesn't mean the decoder isn't working
  implicit val arbFloat = Arbitrary[Float](for {
    precision <- Gen.choose(1, 6)
    scale <- Gen.choose(-10, 10)
    digits <- Gen.listOfN[Char](precision, Gen.numChar)
  } yield BigDecimal(BigDecimal(digits.mkString).bigDecimal.movePointLeft(scale)).toFloat)

  implicit val arbDouble = Arbitrary[Double](for {
    precision <- Gen.choose(1, 15)
    scale <- Gen.choose(-20, 20)
    digits <- Gen.listOfN[Char](precision, Gen.numChar)
  } yield BigDecimal(BigDecimal(digits.mkString).bigDecimal.movePointLeft(scale)).toDouble)
} 
Example 68
Source File: AntlrRawFileType.scala    From rug   with GNU General Public License v3.0 5 votes vote down vote up
package com.atomist.rug.kind.grammar

import java.nio.charset.StandardCharsets

import com.atomist.source.FileArtifact
import com.atomist.tree.content.text.PositionedTreeNode
import com.atomist.tree.content.text.grammar.antlr.{AntlrGrammar, AstNodeCreationStrategy}
import com.atomist.util.Utils.withCloseable
import org.apache.commons.io.IOUtils
import org.springframework.core.io.DefaultResourceLoader


abstract class AntlrRawFileType(
                                 topLevelProduction: String,
                                 nodeCreationStrategy: AstNodeCreationStrategy,
                                 grammars: String*
                               )
  extends TypeUnderFile {

  private val g4s: Seq[String] = {
    val cp = new DefaultResourceLoader()
    val resources = grammars.map(grammar => cp.getResource(grammar))
    resources.map(r => withCloseable(r.getInputStream)(is => IOUtils.toString(is, StandardCharsets.UTF_8)))
  }

  private[kind] def parser = antlrGrammar

  private lazy val antlrGrammar = new AntlrGrammar(topLevelProduction, nodeCreationStrategy, g4s: _*)

  override def fileToRawNode(f: FileArtifact): Option[PositionedTreeNode] = {
    antlrGrammar.parse(f.content)
  }
} 
Example 69
Source File: NrsService.scala    From vat-api   with Apache License 2.0 5 votes vote down vote up
package v1.services

import java.nio.charset.StandardCharsets
import java.util.Base64

import cats.data.EitherT
import cats.implicits._
import javax.inject.Inject
import org.joda.time.DateTime
import play.api.libs.json.Json
import uk.gov.hmrc.http.HeaderCarrier
import v1.connectors.NrsConnector
import v1.controllers.UserRequest
import v1.models.errors.{DownstreamError, ErrorWrapper}
import v1.models.nrs.request.{Metadata, NrsSubmission, SearchKeys}
import v1.models.nrs.response.NrsResponse
import v1.models.request.submit.SubmitRequest

import scala.concurrent.{ExecutionContext, Future}

class NrsService @Inject()(connector: NrsConnector) {

  def submitNrs(vatSubmission: SubmitRequest, submissionTimestamp: DateTime)(
    implicit request: UserRequest[_],
    hc: HeaderCarrier,
    ec: ExecutionContext): Future[Either[ErrorWrapper, NrsResponse]] = {

    val result = for {
      nrsResponse <- EitherT(connector.submitNrs(buildNrsSubmission(vatSubmission, submissionTimestamp, request)))
        .leftMap(_ => ErrorWrapper(None, DownstreamError, None))
    } yield nrsResponse

    result.value
  }

  def buildNrsSubmission(vatSubmission: SubmitRequest, submissionTimestamp: DateTime, request: UserRequest[_]): NrsSubmission = {

    import vatSubmission._

    val payloadString: String =
      Base64.getEncoder.encodeToString(
        Json.toJson(body)
          .toString()
          .getBytes(StandardCharsets.UTF_8)
      )

    NrsSubmission(
      payload = payloadString,
      Metadata(
        businessId = "vat",
        notableEvent = "vat-return",
        payloadContentType = "application/json",
        payloadSha256Checksum = None,
        userSubmissionTimestamp = submissionTimestamp,
        identityData = request.userDetails.identityData,
        userAuthToken = request.headers.get("Authorization").get,
        headerData = Json.toJson(request.headers.toMap.map { h => h._1 -> h._2.head }),
        searchKeys =
          SearchKeys(
            vrn = Some(vrn.vrn),
            companyName = None,
            periodKey = body.periodKey,
            taxPeriodEndDate = None
          )
      )
    )
  }
} 
Example 70
Source File: NRSService.scala    From vat-api   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.vatapi.services

import java.nio.charset.StandardCharsets
import org.joda.time.DateTime
import java.util.Base64

import javax.inject.Inject
import play.api.Logger
import play.api.libs.json.Json
import uk.gov.hmrc.domain.Vrn
import uk.gov.hmrc.http.HeaderCarrier
import uk.gov.hmrc.vatapi.connectors.NRSConnector
import uk.gov.hmrc.vatapi.httpparsers.NrsSubmissionHttpParser.NrsSubmissionOutcome
import uk.gov.hmrc.vatapi.models.{Metadata, NRSSubmission, SearchKeys, VatReturnDeclaration}
import uk.gov.hmrc.vatapi.resources.AuthRequest

import scala.concurrent.{ExecutionContext, Future}


class NRSService @Inject()(
                            nrsConnector: NRSConnector
                          ) {

  val logger: Logger = Logger(this.getClass)

  def submit(vrn: Vrn, submission: NRSSubmission)(implicit hc: HeaderCarrier, ec: ExecutionContext): Future[NrsSubmissionOutcome] = {
    logger.debug(s"[NRSService][submit] - Submitting payload to NRS")
    nrsConnector.submit(vrn, submission)
  }

  def convertToNrsSubmission(vrn: Vrn, payload: VatReturnDeclaration)(implicit request: AuthRequest[_]): NRSSubmission = {

    val encoder = Base64.getEncoder
    NRSSubmission(
      payload = encoder.encodeToString(Json.toJson(payload).toString.getBytes(StandardCharsets.UTF_8)),
      metadata = Metadata(
        businessId = "vat",
        notableEvent = "vat-return",
        payloadContentType = "application/json",
        payloadSha256Checksum = None,
        userSubmissionTimestamp = DateTime.now(),
        identityData = request.authContext.identityData,
        userAuthToken = request.headers.get("Authorization").get,
        headerData = Json.toJson(request.headers.toMap.map { h => h._1 -> h._2.head }),
        searchKeys = SearchKeys(
          vrn = Some(vrn),
          periodKey = Some(payload.periodKey)
        )
      )
    )
  }
} 
Example 71
Source File: BlockingInvokeOneActionSimulation.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk

import java.nio.charset.StandardCharsets

import org.apache.openwhisk.extension.whisk.OpenWhiskProtocolBuilder
import org.apache.openwhisk.extension.whisk.Predef._
import io.gatling.core.Predef._
import io.gatling.core.structure.ScenarioBuilder
import io.gatling.core.util.ClasspathPackagedResource

import scala.concurrent.duration._

class BlockingInvokeOneActionSimulation extends Simulation {
  // Specify parameters for the run
  val host = sys.env("OPENWHISK_HOST")

  // Specify authentication
  val Array(uuid, key) = sys.env("API_KEY").split(":")

  val connections: Int = sys.env("CONNECTIONS").toInt
  val seconds: FiniteDuration = sys.env.getOrElse("SECONDS", "10").toInt.seconds

  // Specify thresholds
  val requestsPerSec: Int = sys.env("REQUESTS_PER_SEC").toInt
  val minimalRequestsPerSec: Int = sys.env.getOrElse("MIN_REQUESTS_PER_SEC", requestsPerSec.toString).toInt
  val maxErrorsAllowed: Int = sys.env.getOrElse("MAX_ERRORS_ALLOWED", "0").toInt
  val maxErrorsAllowedPercentage: Double = sys.env.getOrElse("MAX_ERRORS_ALLOWED_PERCENTAGE", "0.1").toDouble

  // Generate the OpenWhiskProtocol
  val openWhiskProtocol: OpenWhiskProtocolBuilder = openWhisk.apiHost(host)

  // Specify async
  val async = sys.env.getOrElse("ASYNC", "false").toBoolean

  val actionName = "testActionForBlockingInvokeOneAction"
  val actionfile = if (async) "/data/nodeJSAsyncAction.js" else "/data/nodeJSAction.js"

  // Define scenario
  val test: ScenarioBuilder = scenario(s"Invoke one ${if (async) "async" else "sync"} action blocking")
    .doIf(_.userId == 1) {
      exec(openWhisk("Create action")
        .authenticate(uuid, key)
        .action(actionName)
        .create(ClasspathPackagedResource(actionfile, getClass.getResource(actionfile)).string(StandardCharsets.UTF_8)))
    }
    .rendezVous(connections)
    .during(5.seconds) {
      exec(openWhisk("Warm containers up").authenticate(uuid, key).action(actionName).invoke())
    }
    .rendezVous(connections)
    .during(seconds) {
      exec(openWhisk("Invoke action").authenticate(uuid, key).action(actionName).invoke())
    }
    .rendezVous(connections)
    .doIf(_.userId == 1) {
      exec(openWhisk("Delete action").authenticate(uuid, key).action(actionName).delete())
    }

  setUp(test.inject(atOnceUsers(connections)))
    .protocols(openWhiskProtocol)
    // One failure will make the build yellow
    .assertions(details("Invoke action").requestsPerSec.gt(minimalRequestsPerSec))
    .assertions(details("Invoke action").requestsPerSec.gt(requestsPerSec))
    // Mark the build yellow, if there are failed requests. And red if both conditions fail.
    .assertions(details("Invoke action").failedRequests.count.lte(maxErrorsAllowed))
    .assertions(details("Invoke action").failedRequests.percent.lte(maxErrorsAllowedPercentage))
} 
Example 72
Source File: UserEventTests.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.common

import java.nio.charset.StandardCharsets

import akka.actor.ActorSystem
import common._
import common.rest.WskRestOperations
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}
import org.apache.openwhisk.connector.kafka.KafkaConsumerConnector
import org.apache.openwhisk.core.WhiskConfig
import org.apache.openwhisk.core.connector.{Activation, EventMessage, Metric}

import scala.concurrent.duration._

@RunWith(classOf[JUnitRunner])
class UserEventTests extends FlatSpec with Matchers with WskTestHelpers with StreamLogging with BeforeAndAfterAll {

  implicit val wskprops = WskProps()
  implicit val system = ActorSystem("UserEventTestSystem")

  val wsk = new WskRestOperations

  val groupid = "kafkatest"
  val topic = "events"
  val maxPollInterval = 60.seconds

  lazy val consumer = new KafkaConsumerConnector(kafkaHosts, groupid, topic)
  val testActionsDir = WhiskProperties.getFileRelativeToWhiskHome("tests/dat/actions")
  behavior of "UserEvents"

  override def afterAll(): Unit = {
    consumer.close()
  }

  def kafkaHosts: String = new WhiskConfig(WhiskConfig.kafkaHosts).kafkaHosts

  def userEventsEnabled: Boolean = UserEvents.enabled

  if (userEventsEnabled) {
    it should "invoke an action and produce user events" in withAssetCleaner(wskprops) { (wp, assetHelper) =>
      val file = Some(TestUtils.getTestActionFilename("hello.js"))
      val name = "testUserEvents"

      assetHelper.withCleaner(wsk.action, name, confirmDelete = true) { (action, _) =>
        action.create(name, file)
      }

      val run = wsk.action.invoke(name, blocking = true)

      withActivation(wsk.activation, run) { result =>
        withClue("invoking an action was unsuccessful") {
          result.response.status shouldBe "success"
        }
      }
      // checking for any metrics to arrive
      val received =
        consumer.peek(maxPollInterval).map {
          case (_, _, _, msg) => EventMessage.parse(new String(msg, StandardCharsets.UTF_8))
        }
      received.map(event => {
        event.get.body match {
          case a: Activation =>
            Seq(a.statusCode) should contain oneOf (0, 1, 2, 3)
            event.get.source should fullyMatch regex "(invoker|controller)\\w+".r
          case m: Metric =>
            Seq(m.metricName) should contain oneOf ("ConcurrentInvocations", "ConcurrentRateLimit", "TimedRateLimit")
            event.get.source should fullyMatch regex "controller\\w+".r
        }
      })
      // produce at least 2 events - an Activation and a 'ConcurrentInvocations' Metric
      // >= 2 is due to events that might have potentially occurred in between
      received.size should be >= 2
      consumer.commit()
    }

  }

} 
Example 73
Source File: RemoteCacheInvalidation.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.database

import java.nio.charset.StandardCharsets

import scala.concurrent.Future
import scala.concurrent.duration.DurationInt
import scala.util.Failure
import scala.util.Success
import scala.util.Try

import akka.actor.ActorSystem
import akka.actor.Props
import spray.json._
import org.apache.openwhisk.common.Logging
import org.apache.openwhisk.core.WhiskConfig
import org.apache.openwhisk.core.connector.Message
import org.apache.openwhisk.core.connector.MessageFeed
import org.apache.openwhisk.core.connector.MessagingProvider
import org.apache.openwhisk.core.entity.CacheKey
import org.apache.openwhisk.core.entity.ControllerInstanceId
import org.apache.openwhisk.core.entity.WhiskAction
import org.apache.openwhisk.core.entity.WhiskActionMetaData
import org.apache.openwhisk.core.entity.WhiskPackage
import org.apache.openwhisk.core.entity.WhiskRule
import org.apache.openwhisk.core.entity.WhiskTrigger
import org.apache.openwhisk.spi.SpiLoader

case class CacheInvalidationMessage(key: CacheKey, instanceId: String) extends Message {
  override def serialize = CacheInvalidationMessage.serdes.write(this).compactPrint
}

object CacheInvalidationMessage extends DefaultJsonProtocol {
  def parse(msg: String) = Try(serdes.read(msg.parseJson))
  implicit val serdes = jsonFormat(CacheInvalidationMessage.apply _, "key", "instanceId")
}

class RemoteCacheInvalidation(config: WhiskConfig, component: String, instance: ControllerInstanceId)(
  implicit logging: Logging,
  as: ActorSystem) {
  import RemoteCacheInvalidation._
  implicit private val ec = as.dispatchers.lookup("dispatchers.kafka-dispatcher")

  private val instanceId = s"$component${instance.asString}"

  private val msgProvider = SpiLoader.get[MessagingProvider]
  private val cacheInvalidationConsumer =
    msgProvider.getConsumer(config, s"$cacheInvalidationTopic$instanceId", cacheInvalidationTopic, maxPeek = 128)
  private val cacheInvalidationProducer = msgProvider.getProducer(config)

  def notifyOtherInstancesAboutInvalidation(key: CacheKey): Future[Unit] = {
    cacheInvalidationProducer.send(cacheInvalidationTopic, CacheInvalidationMessage(key, instanceId)).map(_ => ())
  }

  private val invalidationFeed = as.actorOf(Props {
    new MessageFeed(
      "cacheInvalidation",
      logging,
      cacheInvalidationConsumer,
      cacheInvalidationConsumer.maxPeek,
      1.second,
      removeFromLocalCache)
  })

  def invalidateWhiskActionMetaData(key: CacheKey) =
    WhiskActionMetaData.removeId(key)

  private def removeFromLocalCache(bytes: Array[Byte]): Future[Unit] = Future {
    val raw = new String(bytes, StandardCharsets.UTF_8)

    CacheInvalidationMessage.parse(raw) match {
      case Success(msg: CacheInvalidationMessage) => {
        if (msg.instanceId != instanceId) {
          WhiskActionMetaData.removeId(msg.key)
          WhiskAction.removeId(msg.key)
          WhiskPackage.removeId(msg.key)
          WhiskRule.removeId(msg.key)
          WhiskTrigger.removeId(msg.key)
        }
      }
      case Failure(t) => logging.error(this, s"failed processing message: $raw with $t")
    }
    invalidationFeed ! MessageFeed.Processed
  }
}

object RemoteCacheInvalidation {
  val cacheInvalidationTopic = "cacheInvalidation"
} 
Example 74
Source File: IO.scala    From airframe   with Apache License 2.0 5 votes vote down vote up
package wvlet.airframe.control
import java.io.{ByteArrayOutputStream, File, InputStream}
import java.net.URL
import java.nio.charset.StandardCharsets

import wvlet.airframe.control.Control.withResource


object IO {

  def readAsString(f: File): String = {
    readAsString(f.toURI.toURL)
  }

  def readAsString(url: URL): String = {
    withResource(url.openStream()) { in => readAsString(in) }
  }

  def readAsString(in: InputStream): String = {
    new String(readFully(in), StandardCharsets.UTF_8)
  }

  def readFully(in: InputStream): Array[Byte] = {
    val byteArray =
      if (in == null) {
        Array.emptyByteArray
      } else {
        withResource(new ByteArrayOutputStream) { b =>
          val buf = new Array[Byte](8192)
          withResource(in) { src =>
            var readBytes = 0
            while ({
              readBytes = src.read(buf);
              readBytes != -1
            }) {
              b.write(buf, 0, readBytes)
            }
          }
          b.toByteArray
        }
      }
    byteArray
  }

} 
Example 75
Source File: OkHttpTest.scala    From airframe   with Apache License 2.0 5 votes vote down vote up
package wvlet.airframe.http.okhttp

import java.nio.charset.StandardCharsets

import okhttp3.internal.http.HttpMethod
import okhttp3.{Protocol, Request, RequestBody, Response, ResponseBody}
import wvlet.airframe.http.{HttpMultiMap, HttpStatus}
import wvlet.airspec.AirSpec

class OkHttpTest extends AirSpec {

  def `provide facade of http requests`: Unit = {
    val body = RequestBody.create(ContentTypeJson, "hello okhttp")
    Seq(
      new Request.Builder().get(),
      new Request.Builder().post(body),
      new Request.Builder().delete(body),
      new Request.Builder().put(body),
      new Request.Builder().patch(body),
      new Request.Builder().head(),
      new Request.Builder().method("OPTIONS", body),
      new Request.Builder().method("TRACE", body)
    ).foreach { builder =>
      val req = builder.url("http://localhost/hello").build()
      val r   = req.toHttpRequest
      r.method shouldBe toHttpMethod(req.method())
      r.path shouldBe "/hello"
      r.query shouldBe HttpMultiMap.empty
      if (HttpMethod.permitsRequestBody(req.method())) {
        r.contentString shouldBe "hello okhttp"
        r.contentBytes shouldBe "hello okhttp".getBytes(StandardCharsets.UTF_8)
        r.contentType shouldBe Some("application/json;charset=utf-8")
      } else {
        r.contentString shouldBe ""
        r.contentBytes shouldBe Array.empty[Byte]
        r.contentType shouldBe empty
      }
      req.toRaw shouldBeTheSameInstanceAs req
    }
  }

  def `provide facade of http responses`: Unit = {
    val res = new Response.Builder()
      .code(403)
      .body(ResponseBody.create(ContentTypeJson, "hello world"))
      .request(new Request.Builder().url("http://localhost/").get().build())
      .protocol(Protocol.HTTP_1_1)
      .message("message")
      .build()

    val r = res.toHttpResponse
    r.status shouldBe HttpStatus.Forbidden_403
    r.statusCode shouldBe 403
    r.contentType shouldBe Some("application/json;charset=utf-8")
    r.contentBytes shouldBe "hello world".getBytes(StandardCharsets.UTF_8)
    res.toRaw shouldBeTheSameInstanceAs res
  }

} 
Example 76
Source File: IOUtil.scala    From airframe   with Apache License 2.0 5 votes vote down vote up
package wvlet.log.io

import java.io._
import java.net.{ServerSocket, URL}
import java.nio.charset.StandardCharsets


object IOUtil {
  def withResource[Resource <: AutoCloseable, U](resource: Resource)(body: Resource => U): U = {
    try {
      body(resource)
    } finally {
      resource.close
    }
  }

  def withTempFile[U](name: String, suffix: String = ".tmp", dir: String = "target")(body: File => U) = {
    val d = new File(dir)
    d.mkdirs()
    val f = File.createTempFile(name, suffix, d)
    try {
      body(f)
    } finally {
      f.delete()
    }
  }

  def randomPort: Int = unusedPort
  def unusedPort: Int = {
    withResource(new ServerSocket(0)) { socket => socket.getLocalPort }
  }

  def findPath(path: String): Option[File] = findPath(new File(path))

  def findPath(path: File): Option[File] = {
    if (path.exists()) {
      Some(path)
    } else {
      val defaultPath = new File(new File(System.getProperty("prog.home", "")), path.getPath)
      if (defaultPath.exists()) {
        Some(defaultPath)
      } else {
        None
      }
    }
  }

  def readAsString(f: File): String = {
    readAsString(f.toURI.toURL)
  }

  def readAsString(url: URL): String = {
    withResource(url.openStream()) { in => readAsString(in) }
  }

  def readAsString(resourcePath: String): String = {
    require(resourcePath != null, s"resourcePath is null")
    Resource
      .find(resourcePath)
      .map(readAsString(_))
      .getOrElse {
        val file = findPath(new File(resourcePath))
        if (file.isEmpty) {
          throw new FileNotFoundException(s"Not found ${resourcePath}")
        }
        readAsString(new FileInputStream(file.get))
      }
  }

  def readAsString(in: InputStream): String = {
    readFully(in) { data => new String(data, StandardCharsets.UTF_8) }
  }

  def readFully[U](in: InputStream)(f: Array[Byte] => U): U = {
    val byteArray = withResource(new ByteArrayOutputStream) { b =>
      val buf = new Array[Byte](8192)
      withResource(in) { src =>
        var readBytes = 0
        while ({
          readBytes = src.read(buf);
          readBytes != -1
        }) {
          b.write(buf, 0, readBytes)
        }
      }
      b.toByteArray
    }
    f(byteArray)
  }
} 
Example 77
Source File: ValueCodecTest.scala    From airframe   with Apache License 2.0 5 votes vote down vote up
package wvlet.airframe.codec

import java.nio.charset.StandardCharsets
import java.util.Base64

import wvlet.airframe.codec.PrimitiveCodec.ValueCodec
import wvlet.airframe.msgpack.spi.Value.StringValue
import wvlet.airframe.msgpack.spi.{MessagePack, MsgPack, Value, ValueFactory}

object ValueCodecTest {
  case class ValueTest(v: Value)
  case class RawByteArrayTest(rawByteArray: Array[Byte])
  case class RawMsgpackTest2(msgpack: MsgPack)
}

import wvlet.airframe.codec.ValueCodecTest._


class ValueCodecTest extends CodecSpec {
  scalaJsSupport

  def `support MessagePack values`: Unit = {
    roundtrip(ValueCodec, ValueFactory.newInteger(1), DataType.ANY)
    roundtrip(ValueCodec, ValueFactory.newString("hello msgpack"), DataType.ANY)
    roundtrip(ValueCodec, ValueFactory.newBoolean(true), DataType.ANY)
    roundtrip(ValueCodec, ValueFactory.newFloat(0.1234d), DataType.ANY)
  }

  def `accept value`: Unit = {
    val codec = MessageCodec.of[ValueTest]
    codec.unpackJson("""{"v":"hello msgpack"}""") shouldBe Some(ValueTest(StringValue("hello msgpack")))
  }

  def `support string to Array[Byte] conversion`: Unit = {
    val codec = MessageCodec.of[RawByteArrayTest]
    codec.unpackJson("""{"rawByteArray":"hello msgpack"}""") match {
      case Some(x) =>
        x.rawByteArray shouldBe "hello msgpack".getBytes(StandardCharsets.UTF_8)
      case _ =>
        fail("failed to parse msgpack")
    }
  }

  def `support BASE64-encoded string to Array[Byte] conversion`: Unit = {
    val base64 = Base64.getEncoder.encodeToString("hello msgpack".getBytes(StandardCharsets.UTF_8))
    val codec  = MessageCodec.of[RawByteArrayTest]
    codec.unpackJson(s"""{"rawByteArray":"${base64}"}""") match {
      case Some(x) =>
        x.rawByteArray shouldBe "hello msgpack".getBytes(StandardCharsets.UTF_8)
      case _ =>
        fail("failed to parse msgpack")
    }
  }

  def `accept MsgPack type`: Unit = {
    val codec = MessageCodec.of[RawMsgpackTest2]
    codec.unpackJson("""{"msgpack":"hello msgpack"}""") match {
      case Some(x) =>
        MessagePack.newUnpacker(x.msgpack).unpackValue shouldBe StringValue("hello msgpack")
      case _ =>
        fail("failed to parse msgpack")
    }
  }
} 
Example 78
Source File: FinagleTest.scala    From airframe   with Apache License 2.0 5 votes vote down vote up
package wvlet.airframe.http.finagle

import java.nio.charset.StandardCharsets

import com.twitter.finagle.http
import com.twitter.finagle.http.Status
import wvlet.airframe.http.{HttpMultiMap, HttpStatus}
import wvlet.airspec.AirSpec


class FinagleTest extends AirSpec {
  import wvlet.airframe.http.finagle._

  def `provide facade of http requests`: Unit = {
    Seq(
      http.Method.Get,
      http.Method.Post,
      http.Method.Delete,
      http.Method.Put,
      http.Method.Patch,
      http.Method.Head,
      http.Method.Options,
      http.Method.Trace
    ).foreach { m =>
      val req = http.Request(m, "/hello")
      req.setContentString("hello finagle")
      req.setContentTypeJson()
      val r = req.toHttpRequest
      r.method shouldBe toHttpMethod(m)
      r.path shouldBe "/hello"
      r.query shouldBe HttpMultiMap.empty
      r.contentString shouldBe "hello finagle"
      r.contentBytes shouldBe "hello finagle".getBytes(StandardCharsets.UTF_8)
      r.contentType shouldBe Some("application/json;charset=utf-8")
      req.toRaw shouldBeTheSameInstanceAs req
    }
  }

  def `provide facade of http responses`: Unit = {
    val resp = http.Response(Status.Forbidden)
    resp.setContentString("hello world")
    resp.setContentTypeJson()

    val r = resp.toHttpResponse

    r.status shouldBe HttpStatus.Forbidden_403
    r.statusCode shouldBe 403
    r.contentString shouldBe "hello world"
    r.contentType shouldBe Some("application/json;charset=utf-8")
    r.contentBytes shouldBe "hello world".getBytes(StandardCharsets.UTF_8)
    resp.toRaw shouldBeTheSameInstanceAs resp
  }
} 
Example 79
Source File: ProcessJobRunnerSrv.scala    From Cortex   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.thp.cortex.services

import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Path, Paths}

import akka.actor.ActorSystem
import javax.inject.{Inject, Singleton}
import org.elastic4play.utils.RichFuture
import org.thp.cortex.models._
import play.api.Logger
import play.api.libs.json.Json

import scala.concurrent.duration.FiniteDuration
import scala.concurrent.{ExecutionContext, Future}
import scala.sys.process.{Process, ProcessLogger, _}
import scala.util.Try

@Singleton
class ProcessJobRunnerSrv @Inject()(implicit val system: ActorSystem) {

  lazy val logger = Logger(getClass)

  private val pythonPackageVersionRegex = "^Version: ([0-9]*)\\.([0-9]*)\\.([0-9]*)".r

  def checkCortexUtilsVersion(pythonVersion: String): Option[(Int, Int, Int)] =
    Try {
      (s"pip$pythonVersion" :: "show" :: "cortexutils" :: Nil)
        .lineStream
        .collectFirst {
          case pythonPackageVersionRegex(major, minor, patch) ⇒ (major.toInt, minor.toInt, patch.toInt)
        }
    }.getOrElse(None)

  def run(jobDirectory: Path, command: String, job: Job, timeout: Option[FiniteDuration])(implicit ec: ExecutionContext): Future[Unit] = {
    val baseDirectory = Paths.get(command).getParent.getParent
    val output        = StringBuilder.newBuilder
    logger.info(s"Execute $command in $baseDirectory, timeout is ${timeout.fold("none")(_.toString)}")
    val process = Process(Seq(command, jobDirectory.toString), baseDirectory.toFile)
      .run(ProcessLogger { s ⇒
        logger.info(s"  Job ${job.id}: $s")
        output ++= s
      })
    val execution = Future
      .apply {
        process.exitValue()
        ()
      }
      .map { _ ⇒
        val outputFile = jobDirectory.resolve("output").resolve("output.json")
        if (!Files.exists(outputFile) || Files.size(outputFile) == 0) {
          val report = Json.obj("success" → false, "errorMessage" → output.toString)
          Files.write(outputFile, report.toString.getBytes(StandardCharsets.UTF_8))
        }
        ()
      }
      .recoverWith {
        case error ⇒
          logger.error(s"Execution of command $command failed", error)
          Future.apply {
            val report = Json.obj("success" → false, "errorMessage" → s"${error.getMessage}\n$output")
            Files.write(jobDirectory.resolve("output").resolve("output.json"), report.toString.getBytes(StandardCharsets.UTF_8))
            ()
          }
      }
    timeout.fold(execution)(t ⇒ execution.withTimeout(t, killProcess(process)))
  }

  def killProcess(process: Process): Unit = {
    logger.info("Timeout reached, killing process")
    process.destroy()
  }
} 
Example 80
Source File: L6-6PerRecord.scala    From prosparkstreaming   with Apache License 2.0 5 votes vote down vote up
package org.apress.prospark

import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.streaming.Seconds
import org.apache.spark.streaming.StreamingContext
import org.eclipse.paho.client.mqttv3.MqttClient
import org.eclipse.paho.client.mqttv3.MqttMessage
import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence
import org.json4s.DefaultFormats
import org.json4s.JField
import org.json4s.JsonAST.JObject
import org.json4s.jvalue2extractable
import org.json4s.jvalue2monadic
import org.json4s.native.JsonMethods.parse
import org.json4s.string2JsonInput

object MqttSinkAppB {

  def main(args: Array[String]) {
    if (args.length != 3) {
      System.err.println(
        "Usage: MqttSinkApp <appname> <outputBrokerUrl> <topic>")
      System.exit(1)
    }

    val Seq(appName, outputBrokerUrl, topic) = args.toSeq

    val conf = new SparkConf()
      .setAppName(appName)
      .setJars(SparkContext.jarOfClass(this.getClass).toSeq)

    val batchInterval = 10

    val ssc = new StreamingContext(conf, Seconds(batchInterval))

    HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env",
      interval = batchInterval)
      .flatMap(rec => {
        val query = parse(rec) \ "query"
        ((query \ "results" \ "quote").children).map(rec => JObject(JField("Timestamp", query \ "created")).merge(rec))
      })
      .map(rec => {
        implicit val formats = DefaultFormats
        rec.children.map(f => f.extract[String]) mkString ","
      })
      .foreachRDD { rdd =>
        rdd.foreach { rec =>
          {
            val client = new MqttClient(outputBrokerUrl, MqttClient.generateClientId(), new MemoryPersistence())
            client.connect()
            client.publish(topic, new MqttMessage(rec.getBytes(StandardCharsets.UTF_8)))
            client.disconnect()
            client.close()
          }
        }
      }

    ssc.start()
    ssc.awaitTermination()
  }

} 
Example 81
Source File: L6-12StaticPool.scala    From prosparkstreaming   with Apache License 2.0 5 votes vote down vote up
package org.apress.prospark

import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.streaming.Seconds
import org.apache.spark.streaming.StreamingContext
import org.eclipse.paho.client.mqttv3.MqttClient
import org.eclipse.paho.client.mqttv3.MqttMessage
import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence
import org.json4s.DefaultFormats
import org.json4s.JField
import org.json4s.JsonAST.JObject
import org.json4s.jvalue2extractable
import org.json4s.jvalue2monadic
import org.json4s.native.JsonMethods.parse
import org.json4s.string2JsonInput

object MqttSinkAppF {

  def main(args: Array[String]) {
    if (args.length != 3) {
      System.err.println(
        "Usage: MqttSinkApp <appname> <outputBrokerUrl> <topic>")
      System.exit(1)
    }

    val Seq(appName, outputBrokerUrl, topic) = args.toSeq

    val conf = new SparkConf()
      .setAppName(appName)
      .setJars(SparkContext.jarOfClass(this.getClass).toSeq)

    val batchInterval = 10

    val ssc = new StreamingContext(conf, Seconds(batchInterval))

    val mqttSink = ssc.sparkContext.broadcast(MqttSinkLazy(outputBrokerUrl))

    HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env",
      interval = batchInterval)
      .flatMap(rec => {
        val query = parse(rec) \ "query"
        ((query \ "results" \ "quote").children).map(rec => JObject(JField("Timestamp", query \ "created")).merge(rec))
      })
      .map(rec => {
        implicit val formats = DefaultFormats
        rec.children.map(f => f.extract[String]) mkString ","
      })
      .foreachRDD { rdd =>
        rdd.foreachPartition { par =>
          par.foreach(message => mqttSink.value.client.publish(topic, new MqttMessage(message.getBytes(StandardCharsets.UTF_8))))
        }
      }

    ssc.start()
    ssc.awaitTermination()
  }

}

class MqttSinkLazy(brokerUrl: String) extends Serializable {
  lazy val client = {
    val client = new MqttClient(brokerUrl, MqttClient.generateClientId(), new MemoryPersistence())
    client.connect()
    sys.addShutdownHook {
      client.disconnect()
      client.close()
    }
    client
  }
}

object MqttSinkLazy {
  val brokerUrl = "tcp://localhost:1883"
  val client = new MqttSinkLazy(brokerUrl)

  def apply(brokerUrl: String): MqttSinkLazy = {
    client
  }
} 
Example 82
Source File: L6-8Static.scala    From prosparkstreaming   with Apache License 2.0 5 votes vote down vote up
package org.apress.prospark

import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.streaming.Seconds
import org.apache.spark.streaming.StreamingContext
import org.eclipse.paho.client.mqttv3.MqttClient
import org.eclipse.paho.client.mqttv3.MqttMessage
import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence
import org.json4s.DefaultFormats
import org.json4s.JField
import org.json4s.JsonAST.JObject
import org.json4s.jvalue2extractable
import org.json4s.jvalue2monadic
import org.json4s.native.JsonMethods.parse
import org.json4s.string2JsonInput

object MqttSinkAppD {

  def main(args: Array[String]) {
    if (args.length != 3) {
      System.err.println(
        "Usage: MqttSinkApp <appname> <outputBrokerUrl> <topic>")
      System.exit(1)
    }

    val Seq(appName, outputBrokerUrl, topic) = args.toSeq

    val conf = new SparkConf()
      .setAppName(appName)
      .setJars(SparkContext.jarOfClass(this.getClass).toSeq)

    val batchInterval = 10

    val ssc = new StreamingContext(conf, Seconds(batchInterval))

    HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env",
      interval = batchInterval)
      .flatMap(rec => {
        val query = parse(rec) \ "query"
        ((query \ "results" \ "quote").children).map(rec => JObject(JField("Timestamp", query \ "created")).merge(rec))
      })
      .map(rec => {
        implicit val formats = DefaultFormats
        rec.children.map(f => f.extract[String]) mkString ","
      })
      .foreachRDD { rdd =>
        rdd.foreachPartition { par =>
          par.foreach(message => MqttSink().publish(topic, new MqttMessage(message.getBytes(StandardCharsets.UTF_8))))
        }
      }

    ssc.start()
    ssc.awaitTermination()
  }
}

object MqttSink {
  val brokerUrl = "tcp://localhost:1883"
  val client = new MqttClient(brokerUrl, MqttClient.generateClientId(), new MemoryPersistence())
  client.connect()
  sys.addShutdownHook {
    client.disconnect()
    client.close()
  }

  def apply(): MqttClient = {
    client
  }
} 
Example 83
Source File: L6-18Cassandra.scala    From prosparkstreaming   with Apache License 2.0 5 votes vote down vote up
package org.apress.prospark

import java.nio.charset.StandardCharsets
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.streaming.Seconds
import org.apache.spark.streaming.StreamingContext
import org.json4s.DefaultFormats
import org.json4s.JField
import org.json4s.JsonAST.JObject
import org.json4s.jvalue2extractable
import org.json4s.jvalue2monadic
import org.json4s.native.JsonMethods.parse
import org.json4s.string2JsonInput
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.io.Text
import java.nio.ByteBuffer
import org.apache.cassandra.hadoop.ColumnFamilyOutputFormat
import org.apache.cassandra.hadoop.ConfigHelper
import org.apache.cassandra.thrift.ColumnOrSuperColumn
import org.apache.cassandra.thrift.Column
import org.apache.cassandra.utils.ByteBufferUtil
import org.apache.cassandra.thrift.Mutation
import java.util.Arrays

object CassandraSinkApp {

  def main(args: Array[String]) {
    if (args.length != 6) {
      System.err.println(
        "Usage: CassandraSinkApp <appname> <cassandraHost> <cassandraPort> <keyspace> <columnFamilyName> <columnName>")
      System.exit(1)
    }

    val Seq(appName, cassandraHost, cassandraPort, keyspace, columnFamilyName, columnName) = args.toSeq

    val conf = new SparkConf()
      .setAppName(appName)
      .setJars(SparkContext.jarOfClass(this.getClass).toSeq)

    val batchInterval = 10
    val windowSize = 20
    val slideInterval = 10

    val ssc = new StreamingContext(conf, Seconds(batchInterval))

    HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env",
      interval = batchInterval)
      .flatMap(rec => {
        implicit val formats = DefaultFormats
        val query = parse(rec) \ "query"
        ((query \ "results" \ "quote").children)
          .map(rec => ((rec \ "symbol").extract[String], (rec \ "LastTradePriceOnly").extract[String].toFloat))
      })
      .reduceByKeyAndWindow((x: Float, y: Float) => (x + y), Seconds(windowSize), Seconds(slideInterval))
      .foreachRDD(rdd => {
        val jobConf = new Configuration()
        ConfigHelper.setOutputRpcPort(jobConf, cassandraPort)
        ConfigHelper.setOutputInitialAddress(jobConf, cassandraHost)
        ConfigHelper.setOutputColumnFamily(jobConf, keyspace, columnFamilyName)
        ConfigHelper.setOutputPartitioner(jobConf, "Murmur3Partitioner")
        rdd.map(rec => {
          val c = new Column()
          c.setName(ByteBufferUtil.bytes(columnName))
          c.setValue(ByteBufferUtil.bytes(rec._2 / (windowSize / batchInterval)))
          c.setTimestamp(System.currentTimeMillis)
          val m = new Mutation()
          m.setColumn_or_supercolumn(new ColumnOrSuperColumn())
          m.column_or_supercolumn.setColumn(c)
          (ByteBufferUtil.bytes(rec._1), Arrays.asList(m))
        }).saveAsNewAPIHadoopFile(keyspace, classOf[ByteBuffer], classOf[List[Mutation]], classOf[ColumnFamilyOutputFormat], jobConf)
      })

    ssc.start()
    ssc.awaitTermination()
  }
} 
Example 84
Source File: L6-5Exception.scala    From prosparkstreaming   with Apache License 2.0 5 votes vote down vote up
package org.apress.prospark

import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.streaming.Seconds
import org.apache.spark.streaming.StreamingContext
import org.eclipse.paho.client.mqttv3.MqttClient
import org.eclipse.paho.client.mqttv3.MqttMessage
import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence
import org.json4s.DefaultFormats
import org.json4s.JField
import org.json4s.JsonAST.JObject
import org.json4s.jvalue2extractable
import org.json4s.jvalue2monadic
import org.json4s.native.JsonMethods.parse
import org.json4s.string2JsonInput

object MqttSinkAppA {

  def main(args: Array[String]) {
    if (args.length != 3) {
      System.err.println(
        "Usage: MqttSinkApp <appname> <outputBrokerUrl> <topic>")
      System.exit(1)
    }

    val Seq(appName, outputBrokerUrl, topic) = args.toSeq

    val conf = new SparkConf()
      .setAppName(appName)
      .setJars(SparkContext.jarOfClass(this.getClass).toSeq)

    val batchInterval = 10

    val ssc = new StreamingContext(conf, Seconds(batchInterval))

    HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env",
      interval = batchInterval)
      .flatMap(rec => {
        val query = parse(rec) \ "query"
        ((query \ "results" \ "quote").children).map(rec => JObject(JField("Timestamp", query \ "created")).merge(rec))
      })
      .map(rec => {
        implicit val formats = DefaultFormats
        rec.children.map(f => f.extract[String]) mkString ","
      })
      .foreachRDD { rdd =>
        val client = new MqttClient(outputBrokerUrl, MqttClient.generateClientId(), new MemoryPersistence())
        client.connect()
        rdd.foreach(rec => client.publish(topic, new MqttMessage(rec.getBytes(StandardCharsets.UTF_8))))
        client.disconnect()
        client.close()
      }

    ssc.start()
    ssc.awaitTermination()
  }

} 
Example 85
Source File: L6-10LazyStatic.scala    From prosparkstreaming   with Apache License 2.0 5 votes vote down vote up
package org.apress.prospark

import java.nio.charset.StandardCharsets
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.streaming.Seconds
import org.apache.spark.streaming.StreamingContext
import org.eclipse.paho.client.mqttv3.MqttClient
import org.eclipse.paho.client.mqttv3.MqttMessage
import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence
import org.json4s.DefaultFormats
import org.json4s.JField
import org.json4s.JsonAST.JObject
import org.json4s.jvalue2extractable
import org.json4s.jvalue2monadic
import org.json4s.native.JsonMethods.parse
import org.json4s.string2JsonInput
import org.apache.commons.pool2.PooledObject
import org.apache.commons.pool2.BasePooledObjectFactory
import org.apache.commons.pool2.impl.DefaultPooledObject
import org.apache.commons.pool2.impl.GenericObjectPool
import org.apache.commons.pool2.ObjectPool

object MqttSinkAppE {

  def main(args: Array[String]) {
    if (args.length != 3) {
      System.err.println(
        "Usage: MqttSinkApp <appname> <outputBrokerUrl> <topic>")
      System.exit(1)
    }

    val Seq(appName, outputBrokerUrl, topic) = args.toSeq

    val conf = new SparkConf()
      .setAppName(appName)
      .setJars(SparkContext.jarOfClass(this.getClass).toSeq)

    val batchInterval = 10

    val ssc = new StreamingContext(conf, Seconds(batchInterval))

    HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env",
      interval = batchInterval)
      .flatMap(rec => {
        val query = parse(rec) \ "query"
        ((query \ "results" \ "quote").children).map(rec => JObject(JField("Timestamp", query \ "created")).merge(rec))
      })
      .map(rec => {
        implicit val formats = DefaultFormats
        rec.children.map(f => f.extract[String]) mkString ","
      })
      .foreachRDD { rdd =>
        rdd.foreachPartition { par =>
          val mqttSink = MqttSinkPool().borrowObject()
          par.foreach(message => mqttSink.publish(topic, new MqttMessage(message.getBytes(StandardCharsets.UTF_8))))
          MqttSinkPool().returnObject(mqttSink)
        }
      }

    ssc.start()
    ssc.awaitTermination()
  }
}

object MqttSinkPool {
  val poolSize = 8
  val brokerUrl = "tcp://localhost:1883"
  val mqttPool = new GenericObjectPool[MqttClient](new MqttClientFactory(brokerUrl))
  mqttPool.setMaxTotal(poolSize)
  sys.addShutdownHook {
    mqttPool.close()
  }
  
  def apply(): GenericObjectPool[MqttClient] = {
    mqttPool
  }
}

class MqttClientFactory(brokerUrl: String) extends BasePooledObjectFactory[MqttClient] {
  override def create() = {
    val client = new MqttClient(brokerUrl, MqttClient.generateClientId(), new MemoryPersistence())
    client.connect()
    client
  }
  override def wrap(client: MqttClient) = new DefaultPooledObject[MqttClient](client)
  override def validateObject(pObj: PooledObject[MqttClient]) = pObj.getObject.isConnected()
  override def destroyObject(pObj: PooledObject[MqttClient]) = {
    pObj.getObject.disconnect()
    pObj.getObject.close()
  }
  override def passivateObject(pObj: PooledObject[MqttClient]) = {}
} 
Example 86
Source File: L6-7PerPartition.scala    From prosparkstreaming   with Apache License 2.0 5 votes vote down vote up
package org.apress.prospark

import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.streaming.Seconds
import org.apache.spark.streaming.StreamingContext
import org.eclipse.paho.client.mqttv3.MqttClient
import org.eclipse.paho.client.mqttv3.MqttMessage
import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence
import org.json4s.DefaultFormats
import org.json4s.JField
import org.json4s.JsonAST.JObject
import org.json4s.jvalue2extractable
import org.json4s.jvalue2monadic
import org.json4s.native.JsonMethods.parse
import org.json4s.string2JsonInput

object MqttSinkAppC {

  def main(args: Array[String]) {
    if (args.length != 3) {
      System.err.println(
        "Usage: MqttSinkApp <appname> <outputBrokerUrl> <topic>")
      System.exit(1)
    }

    val Seq(appName, outputBrokerUrl, topic) = args.toSeq

    val conf = new SparkConf()
      .setAppName(appName)
      .setJars(SparkContext.jarOfClass(this.getClass).toSeq)

    val batchInterval = 10

    val ssc = new StreamingContext(conf, Seconds(batchInterval))

    HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env",
      interval = batchInterval)
      .flatMap(rec => {
        val query = parse(rec) \ "query"
        ((query \ "results" \ "quote").children).map(rec => JObject(JField("Timestamp", query \ "created")).merge(rec))
      })
      .map(rec => {
        implicit val formats = DefaultFormats
        rec.children.map(f => f.extract[String]) mkString ","
      })
      .foreachRDD { rdd =>
        rdd.foreachPartition { par =>
          val client = new MqttClient(outputBrokerUrl, MqttClient.generateClientId(), new MemoryPersistence())
          client.connect()
          par.foreach(rec => client.publish(topic, new MqttMessage(rec.getBytes(StandardCharsets.UTF_8))))
          client.disconnect()
          client.close()
        }
      }

    ssc.start()
    ssc.awaitTermination()
  }
} 
Example 87
Source File: TestCompileApplicationInstance.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.tools

import java.io.{OutputStream, OutputStreamWriter}
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import java.nio.file.Files

import com.amazon.milan.application.{Application, ApplicationConfiguration, ApplicationInstance}
import com.amazon.milan.lang._
import com.amazon.milan.testing.applications._
import com.amazon.milan.{Id, SemanticVersion}
import org.junit.Assert._
import org.junit.Test


object TestCompileApplicationInstance {

  case class Record(recordId: String, i: Int)

  class Provider extends ApplicationInstanceProvider {
    override def getApplicationInstance(params: List[(String, String)]): ApplicationInstance = {
      val input = Stream.of[Record]
      val graph = new StreamGraph(input)
      val config = new ApplicationConfiguration
      config.setListSource(input, Record("1", 1))

      val instanceId = params.find(_._1 == "instanceId").get._2
      val appId = params.find(_._1 == "appId").get._2

      new ApplicationInstance(
        instanceId,
        new Application(appId, graph, SemanticVersion.ZERO),
        config)
    }
  }

  class Compiler extends ApplicationInstanceCompiler {
    override def compile(applicationInstance: ApplicationInstance,
                         params: List[(String, String)],
                         output: OutputStream): Unit = {
      val writer = new OutputStreamWriter(output)
      val testParam = params.find(_._1 == "test").get._2
      writer.write(testParam)
      writer.write(applicationInstance.toJsonString)
      writer.close()
    }
  }

}

@Test
class TestCompileApplicationInstance {
  @Test
  def test_CompileApplicationInstance_Main_SendsProviderAndCompilerParameters(): Unit = {

    val tempFile = Files.createTempFile("TestCompileApplicationInstance", ".scala")
    Files.deleteIfExists(tempFile)

    val appId = Id.newId()
    val instanceId = Id.newId()
    val testValue = Id.newId()

    try {
      val args = Array(
        "--provider",
        "com.amazon.milan.tools.TestCompileApplicationInstance.Provider",
        "--compiler",
        "com.amazon.milan.tools.TestCompileApplicationInstance.Compiler",
        "--package",
        "generated",
        "--output",
        tempFile.toString,
        s"-PinstanceId=$instanceId",
        s"-PappId=$appId",
        s"-Ctest=$testValue"
      )
      CompileApplicationInstance.main(args)

      val fileContents = StandardCharsets.UTF_8.decode(ByteBuffer.wrap(Files.readAllBytes(tempFile))).toString
      assertTrue(fileContents.contains(appId))
      assertTrue(fileContents.contains(instanceId))
      assertTrue(fileContents.contains(testValue))
    }
    finally {
      Files.deleteIfExists(tempFile)
    }
  }
} 
Example 88
Source File: JsonDataOutputFormat.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.dataformats

import java.io.OutputStream
import java.nio.charset.StandardCharsets

import com.amazon.milan.HashUtil
import com.amazon.milan.serialization.{JavaTypeFactory, MilanObjectMapper}
import com.amazon.milan.typeutil.TypeDescriptor
import com.fasterxml.jackson.databind.annotation.{JsonDeserialize, JsonSerialize}

import scala.collection.JavaConverters._



@JsonSerialize
@JsonDeserialize
class JsonDataOutputFormat[T: TypeDescriptor] extends DataOutputFormat[T] {
  @transient private lazy val objectMapper = new MilanObjectMapper()
  @transient private lazy val javaType = new JavaTypeFactory(this.objectMapper.getTypeFactory).makeJavaType(this.recordTypeDescriptor)
  @transient private lazy val hashCodeValue = HashUtil.combineHashCodes(this.recordTypeDescriptor.hashCode())
  @transient private lazy val writer = this.objectMapper.writerFor(this.javaType)
  @transient private lazy val newLine = "\n".getBytes(StandardCharsets.UTF_8)

  private var recordTypeDescriptor = implicitly[TypeDescriptor[T]]

  override def getGenericArguments: List[TypeDescriptor[_]] =
    List(implicitly[TypeDescriptor[T]])

  override def setGenericArguments(genericArgs: List[TypeDescriptor[_]]): Unit = {
    this.recordTypeDescriptor = genericArgs.head.asInstanceOf[TypeDescriptor[T]]
  }

  override def writeValue(value: T, outputStream: OutputStream): Unit = {
    this.writer.writeValue(outputStream, value)
    outputStream.write(this.newLine)
  }

  override def writeValues(values: TraversableOnce[T], outputStream: OutputStream): Unit = {
    this.writer
      .withRootValueSeparator("\n")
      .writeValues(outputStream)
      .writeAll(values.toIterable.asJava)
    outputStream.write(this.newLine)
  }

  override def hashCode(): Int = this.hashCodeValue

  override def equals(obj: Any): Boolean = {
    obj match {
      case o: JsonDataOutputFormat[T] =>
        this.recordTypeDescriptor.equals(o.recordTypeDescriptor)

      case _ =>
        false
    }
  }
} 
Example 89
Source File: TestCsvDataInputFormat.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.dataformats

import java.nio.charset.StandardCharsets

import com.amazon.milan.serialization.{DataFormatConfiguration, DataFormatFlags, MilanObjectMapper}
import com.amazon.milan.test.IntStringRecord
import com.amazon.milan.typeutil._
import org.junit.Assert._
import org.junit.Test


object TestCsvDataInputFormat {

  class TestClass(var intValue: Int, var stringValue: String, var doubleValue: Double) {
    def this() {
      this(0, "", 0)
    }
  }

}

import com.amazon.milan.dataformats.TestCsvDataInputFormat._


@Test
class TestCsvDataInputFormat {
  @Test
  def test_CsvDataInputFormat_ReadValue_WithUtf8EncodedCsvRow_ReturnsCorrectObject(): Unit = {
    val format = new CsvDataInputFormat[TestClass](Array("intValue", "stringValue", "doubleValue"), DataFormatConfiguration.default)
    val row = "1,\"foo bar\",3.14"
    val rowBytes = row.getBytes("utf-8")
    val output = format.readValue(rowBytes, 0, rowBytes.length).get

    assertEquals(1, output.intValue)
    assertEquals("foo bar", output.stringValue)
    assertEquals(3.14, output.doubleValue, 1e-10)
  }

  @Test
  def test_CsvDataInputFormat_ReadValue_WithUtf8EncodedCsvRowWithOneFieldMissing_ReturnsObjectWithDefaultValueForThatField(): Unit = {
    val format = new CsvDataInputFormat[TestClass](Array("intValue", "stringValue", "doubleValue"), DataFormatConfiguration.default)
    val row = "1,\"foo bar\""
    val rowBytes = row.getBytes("utf-8")
    val output = format.readValue(rowBytes, 0, rowBytes.length).get

    assertEquals(1, output.intValue)
    assertEquals("foo bar", output.stringValue)
    assertEquals(0.0, output.doubleValue, 0)
  }

  @Test(expected = classOf[PropertyNotFoundException])
  def test_CsvDataInputFormat_ReadValue_WithFailOnUnknownPropertiesTrue_AndUnknownPropertyInSchema_ThrowsUnrecognizedPropertyException(): Unit = {
    val format = new CsvDataInputFormat[TestClass](Array("unknownProperty"), DataFormatConfiguration.withFlags(DataFormatFlags.FailOnUnknownProperties))
    val row = "1"
    val rowBytes = row.getBytes("utf-8")
    format.readValue(rowBytes, 0, rowBytes.length)
  }

  @Test
  def test_CsvDataInputFormat_ReadValue_WithFailOnUnknownPropertiesFalse_AndUnknownPropertyInSchema_DoesNotThrow(): Unit = {
    val format = new CsvDataInputFormat[TestClass](Array("unknownProperty"), DataFormatConfiguration.default)
    val row = "1"
    val rowBytes = row.getBytes("utf-8")
    format.readValue(rowBytes, 0, rowBytes.length)
  }

  @Test
  def test_CsvDataInputFormat_WithFailOnUnknownPropertiesTrue_JsonSerializeAndDeserializeAsDataFormat_YieldsEquivalentObject(): Unit = {
    val original = new CsvDataInputFormat[TestClass](
      Array("intValue", "stringValue", "doubleValue"),
      DataFormatConfiguration.withFlags(DataFormatFlags.FailOnUnknownProperties))

    val copy = MilanObjectMapper.copy(original.asInstanceOf[DataInputFormat[TestClass]])

    assertEquals(original, copy)
  }

  @Test
  def test_CsvDataInputFormat_WithNonStandardSeparatorAndNullIdentifier_CorrectlyParsesARecord(): Unit = {
    val format = new CsvDataInputFormat[IntStringRecord](Array("i", "s"), true, 0x01, "\\N", DataFormatConfiguration.default)
    val inputRecordString = "3\u0001\\N"
    val inputRecordBytes = inputRecordString.getBytes(StandardCharsets.UTF_8)
    val record = format.readValue(inputRecordBytes, 0, inputRecordBytes.length).get
    assertEquals(IntStringRecord(3, null), record)
  }
} 
Example 90
Source File: FlinkGenerator.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.flink.generator

import java.io.{ByteArrayOutputStream, OutputStream}
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Path, StandardOpenOption}

import com.amazon.milan.application.{Application, ApplicationConfiguration, ApplicationInstance}
import com.amazon.milan.compiler.flink.internal.FlinkTypeEmitter
import com.amazon.milan.lang.StreamGraph
import com.amazon.milan.program.{Cycle, StreamExpression}
import com.amazon.milan.{Id, SemanticVersion}
import com.typesafe.scalalogging.Logger
import org.slf4j.LoggerFactory


case class GeneratorConfig(preventGenericTypeInformation: Boolean = false)


object FlinkGenerator {
  val default = new FlinkGenerator(GeneratorConfig())
}


class FlinkGenerator(classLoader: ClassLoader, generatorConfig: GeneratorConfig) {

  private val generatorTypeLifter = new FlinkTypeLifter(new FlinkTypeEmitter, this.generatorConfig.preventGenericTypeInformation)

  private val logger = Logger(LoggerFactory.getLogger(getClass))

  def this(generatorConfig: GeneratorConfig) {
    this(getClass.getClassLoader, generatorConfig)
  }

  def generateScala(graph: StreamGraph,
                    appConfig: ApplicationConfiguration,
                    packageName: String,
                    className: String): String = {
    val application = new Application(Id.newId(), graph, SemanticVersion.ZERO)
    val instance = new ApplicationInstance(Id.newId(), application, appConfig)
    this.generateScala(instance, packageName, className)
  }

  def generateScala(instance: ApplicationInstance,
                    outputPath: Path,
                    packageName: String,
                    className: String): Unit = {
    val scalaCode = this.generateScala(instance, packageName, className)
    val contents = scalaCode.getBytes(StandardCharsets.UTF_8)
    Files.write(outputPath, contents, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING)
  }

  def generateScala(instance: ApplicationInstance,
                    packageName: String,
                    className: String): String = {
    val output = new ByteArrayOutputStream()
    this.generateScala(instance, output, packageName, className)

    output.flush()
    StandardCharsets.UTF_8.decode(ByteBuffer.wrap(output.toByteArray)).toString
  }

  def generateScala(instance: ApplicationInstance,
                    output: OutputStream,
                    packageName: String,
                    className: String): Unit = {
    val finalGraph = instance.application.graph.getDereferencedGraph
    finalGraph.typeCheckGraph()

    val outputs = new GeneratorOutputs(this.generatorTypeLifter)
    val context = GeneratorContext.createEmpty(instance.instanceDefinitionId, finalGraph, instance.config, outputs, this.generatorTypeLifter)

    // Ensure that every data stream is generated.
    finalGraph
      .getStreams
      .foreach(stream => this.ensureStreamIsGenerated(context, stream))

    // Close any cycles.
    finalGraph
      .getStreams
      .filter(_.isInstanceOf[Cycle])
      .map(_.asInstanceOf[Cycle])
      .foreach(context.closeCycle)

    // Add all sinks at the end.
    instance.config.dataSinks.foreach(sink => context.generateSink(sink))

    val generated = context.output.generateScala(packageName, className)
    output.write(generated.getBytes(StandardCharsets.UTF_8))
  }

  private def ensureStreamIsGenerated(context: GeneratorContext,
                                      stream: StreamExpression): Unit = {
    context.getOrGenerateDataStream(stream)
  }
} 
Example 91
Source File: package.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler

import java.io.OutputStream
import java.nio.charset.StandardCharsets

import com.amazon.milan.typeutil.TypeDescriptor

import _root_.scala.language.implicitConversions


package object scala {

  implicit class MilanScalaCompilerStringExtensions(s: String) {
    def strip: String =
      this.s.stripMargin.stripLineEnd.stripPrefix("\n")

    def indent(level: Int): String = {
      val prefix = Seq.tabulate(level)(_ => "  ").mkString
      this.s.lines.map(line => prefix + line).mkString("\n")
    }

    def indentTail(level: Int): String = {
      val prefix = Seq.tabulate(level)(_ => "  ").mkString
      this.s.lines.zipWithIndex.map {
        case (line, index) =>
          if (index == 0) {
            line
          }
          else {
            prefix + line
          }
      }.mkString("\n")
    }

    def getUtf8Bytes: Array[Byte] =
      this.s.getBytes(StandardCharsets.UTF_8)
  }

  implicit class MilanScalaCompilerTypeDescriptorExtensions[_](t: TypeDescriptor[_]) {
    def toTerm: ClassName = {
      if (t.isTuple && t.genericArguments.isEmpty) {
        ClassName("Product")
      }
      else {
        ClassName(t.fullName)
      }
    }
  }

  implicit class MilanScalaOutputStreamExtensions(outputStream: OutputStream) {
    def writeUtf8(s: String): Unit = {
      this.outputStream.write(s.getUtf8Bytes)
    }
  }

} 
Example 92
Source File: MapreduceDriverTest.scala    From schedoscope   with Apache License 2.0 5 votes vote down vote up
package org.schedoscope.scheduler.driver

import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Paths}

import org.apache.hadoop.fs.Path
import org.apache.hadoop.mapreduce.Job
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
import org.scalatest.{FlatSpec, Matchers}
import org.schedoscope.dsl.View
import org.schedoscope.dsl.transformations.{FailingMapper, MapreduceTransformation}
import org.schedoscope.test.resources.LocalTestResources
import org.schedoscope.test.resources.TestDriverRunCompletionHandlerCallCounter._

class MapreduceDriverTest extends FlatSpec with Matchers with TestFolder {
  lazy val driver = new LocalTestResources().driverFor[MapreduceTransformation]("mapreduce")

  def invalidJob: (Map[String, Any]) => Job = (m: Map[String, Any]) => Job.getInstance

  def failingJob: (Map[String, Any]) => Job = (m: Map[String, Any]) => {
    writeData()
    val job = Job.getInstance
    job.setMapperClass(classOf[FailingMapper])
    FileInputFormat.setInputPaths(job, new Path(inputPath("")))
    FileOutputFormat.setOutputPath(job, new Path(outputPath(System.nanoTime.toString)))
    job
  }

  def identityJob: (Map[String, Any]) => Job = (m: Map[String, Any]) => {
    writeData()
    val job = Job.getInstance
    FileInputFormat.setInputPaths(job, new Path(inputPath("")))
    FileOutputFormat.setOutputPath(job, new Path(outputPath(System.nanoTime.toString)))
    job
  }

  case class DummyView() extends View

  def writeData() {
    Files.write(Paths.get(s"${inputPath("")}/file.txt"), "some data".getBytes(StandardCharsets.UTF_8))
  }

  "MapreduceDriver" should "have transformation name Mapreduce" in {
    driver.transformationName shouldBe "mapreduce"
  }

  it should "execute Mapreduce transformations synchronously" in {
    val driverRunState = driver.runAndWait(MapreduceTransformation(new DummyView(), identityJob))

    driverRunState shouldBe a[DriverRunSucceeded[_]]
  }

  it should "execute another Mapreduce transformations synchronously" in {
    val driverRunState = driver.runAndWait(MapreduceTransformation(new DummyView(), identityJob))

    driverRunState shouldBe a[DriverRunSucceeded[_]]
  }

  it should "execute Mapreduce transformations asynchronously" in {
    val driverRunHandle = driver.run(MapreduceTransformation(new DummyView(), identityJob))

    var runWasAsynchronous = false

    while (driver.getDriverRunState(driverRunHandle).isInstanceOf[DriverRunOngoing[_]])
      runWasAsynchronous = true

    runWasAsynchronous shouldBe true
    driver.getDriverRunState(driverRunHandle) shouldBe a[DriverRunSucceeded[_]]
  }

  it should "execute Mapreduce transformations and return errors when running asynchronously" in {
    val driverRunHandle = driver.run(MapreduceTransformation(new DummyView(), failingJob))

    var runWasAsynchronous = false

    while (driver.getDriverRunState(driverRunHandle).isInstanceOf[DriverRunOngoing[_]])
      runWasAsynchronous = true

    // runWasAsynchronous shouldBe true FIXME: isn't asynchronous, why?
    driver.getDriverRunState(driverRunHandle) shouldBe a[DriverRunFailed[_]]
  }

  it should "call its DriverRunCompletitionHandlers' driverRunCompleted upon request" in {
    val runHandle = driver.run(MapreduceTransformation(new DummyView(), identityJob))

    while (driver.getDriverRunState(runHandle).isInstanceOf[DriverRunOngoing[_]]) {}

    driver.driverRunCompleted(runHandle)

    driverRunCompletedCalled(runHandle, driver.getDriverRunState(runHandle)) shouldBe true
  }

  it should "call its DriverRunCompletitionHandlers' driverRunStarted upon request" in {
    val runHandle = driver.run(MapreduceTransformation(new DummyView(), identityJob))

    driver.driverRunStarted(runHandle)

    driverRunStartedCalled(runHandle) shouldBe true
  }
} 
Example 93
Source File: BuiltInErrors.scala    From endpoints4s   with MIT License 5 votes vote down vote up
package endpoints4s.http4s.server

import java.nio.charset.StandardCharsets

import endpoints4s.{Invalid, algebra}
import fs2.Chunk
import org.http4s.EntityEncoder
import org.http4s.MediaType
import org.http4s.headers.`Content-Type`


trait BuiltInErrors extends algebra.BuiltInErrors {
  this: EndpointsWithCustomErrors =>

  def clientErrorsResponseEntity: ResponseEntity[Invalid] = {
    val hdr = `Content-Type`(MediaType.application.json)
    EntityEncoder.simple(hdr) { invalid =>
      val s = endpoints4s.ujson.codecs.invalidCodec.encode(invalid)
      Chunk.bytes(s.getBytes(StandardCharsets.UTF_8))
    }
  }

  def serverErrorResponseEntity: ResponseEntity[Throwable] =
    clientErrorsResponseEntity.contramap(th => Invalid(th.getMessage))
} 
Example 94
Source File: ServerTestBase.scala    From endpoints4s   with MIT License 5 votes vote down vote up
package endpoints4s.algebra.server

import java.nio.charset.StandardCharsets

import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.headers.`Content-Type`
import akka.http.scaladsl.model.{HttpRequest, HttpResponse}
import akka.util.ByteString
import endpoints4s.algebra
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll}

import scala.concurrent.duration._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import scala.concurrent.{ExecutionContext, Future}

trait ServerTestBase[T <: algebra.Endpoints]
    extends AnyWordSpec
    with Matchers
    with ScalaFutures
    with BeforeAndAfterAll
    with BeforeAndAfter {

  override implicit def patienceConfig: PatienceConfig =
    PatienceConfig(10.seconds, 10.millisecond)

  val serverApi: T

  
  case class Malformed(errors: Seq[String]) extends DecodedUrl[Nothing]
} 
Example 95
Source File: ZkUtils.scala    From CMAK   with Apache License 2.0 5 votes vote down vote up
package kafka.manager.utils

import java.nio.charset.StandardCharsets

import org.apache.curator.framework.CuratorFramework
import org.apache.kafka.common.TopicPartition
import org.apache.zookeeper.CreateMode
import org.apache.zookeeper.KeeperException.{NoNodeException, NodeExistsException}
import org.apache.zookeeper.data.Stat


  def replicaAssignmentZkData(map: Map[String, Seq[Int]]): String = {
    toJson(Map("version" -> 1, "partitions" -> map))
  }

  def readData(curator: CuratorFramework, path: String): (String, Stat) = {
    val stat: Stat = new Stat()
    val dataStr: String = curator.getData.storingStatIn(stat).forPath(path)
    (dataStr, stat)
  }
  
  def readDataMaybeNull(curator: CuratorFramework, path: String): (Option[String], Stat) = {
    val stat: Stat = new Stat()
    try {
      val dataStr: String = curator.getData.storingStatIn(stat).forPath(path)
      (Option(dataStr), stat)
    } catch {
      case e: NoNodeException => {
        (None, stat)
      }
      case e2: Throwable => throw e2
    }
  }


  def getPartitionReassignmentZkData(partitionsToBeReassigned: Map[TopicPartition, Seq[Int]]): String = {
    toJson(Map("version" -> 1, "partitions" -> partitionsToBeReassigned.map(e => Map("topic" -> e._1.topic, "partition" -> e._1.partition,
      "replicas" -> e._2))))
  }
} 
Example 96
Source File: package.scala    From CMAK   with Apache License 2.0 5 votes vote down vote up
package kafka.manager

import java.nio.charset.StandardCharsets
import java.text.NumberFormat


package object utils {
  import org.json4s._
  import org.json4s.jackson.JsonMethods._
  import org.json4s.jackson.Serialization.{read, write}
  implicit val formats = DefaultFormats
  private[this] val numberFormat = NumberFormat.getInstance()
  
  implicit class LongFormatted(val x: Long) {
    def formattedAsDecimal = numberFormat.format(x)  
  }

  implicit def serializeString(data: String) : Array[Byte] = {
    data.getBytes(StandardCharsets.UTF_8)
  }

  implicit def deserializeString(data: Array[Byte]) : String  = {
    new String(data, StandardCharsets.UTF_8)
  }

  def toJson(map: Map[String, Any]): String = {
    write(map)
  }
  
  def toJson(s: String) : String = {
    "\"" + s + "\""
  }

  def fromJson[T](s: String) : T = {
    read(s)
  }

  def parseJson(s: String) : JValue = {
    parse(s)
  }

  @throws[UtilException]
  def checkCondition(cond: Boolean, error: UtilError) : Unit = {
    if(!cond) {
      throw new UtilException(error)
    }
  }

  @throws[UtilException]
  def throwError [T] (error: UtilError) : T = {
    throw new UtilException(error)
  }
} 
Example 97
Source File: MessageWriter.scala    From lsp4s   with Apache License 2.0 5 votes vote down vote up
package scala.meta.jsonrpc

import java.io.ByteArrayOutputStream
import java.io.OutputStream
import java.io.OutputStreamWriter
import java.io.PrintWriter
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import scala.concurrent.Future
import io.circe.syntax._
import monix.execution.Ack
import monix.reactive.Observer
import scribe.LoggerSupport


  def write(msg: Message): Future[Ack] = lock.synchronized {
    baos.reset()
    val json = msg.asJson
    val protocol = BaseProtocolMessage.fromJson(json)
    logger.trace(s" --> $json")
    val byteBuffer = MessageWriter.write(protocol, baos, headerOut)
    out.onNext(byteBuffer)
  }
}

object MessageWriter {

  def headerWriter(out: OutputStream): PrintWriter = {
    new PrintWriter(new OutputStreamWriter(out, StandardCharsets.US_ASCII))
  }

  def write(message: BaseProtocolMessage): ByteBuffer = {
    val out = new ByteArrayOutputStream()
    val header = headerWriter(out)
    write(message, out, header)
  }

  def write(
      message: BaseProtocolMessage,
      out: ByteArrayOutputStream,
      headerOut: PrintWriter
  ): ByteBuffer = {
    message.header.foreach {
      case (key, value) =>
        headerOut.write(key)
        headerOut.write(": ")
        headerOut.write(value)
        headerOut.write("\r\n")
    }
    headerOut.write("\r\n")
    out.write(message.content)
    out.flush()
    val buffer = ByteBuffer.wrap(out.toByteArray, 0, out.size())
    buffer
  }
} 
Example 98
Source File: BaseProtocolMessage.scala    From lsp4s   with Apache License 2.0 5 votes vote down vote up
package scala.meta.jsonrpc

import java.io.InputStream
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import java.util
import io.circe.Json
import io.circe.syntax._
import monix.reactive.Observable
import scribe.LoggerSupport

final class BaseProtocolMessage(
    val header: Map[String, String],
    val content: Array[Byte]
) {

  override def equals(obj: scala.Any): Boolean =
    this.eq(obj.asInstanceOf[Object]) || {
      obj match {
        case m: BaseProtocolMessage =>
          header.equals(m.header) &&
            util.Arrays.equals(content, m.content)
      }
    }

  override def toString: String = {
    val bytes = MessageWriter.write(this)
    StandardCharsets.UTF_8.decode(bytes).toString
  }
}

object BaseProtocolMessage {
  val ContentLen = "Content-Length"

  def apply(msg: Message): BaseProtocolMessage =
    fromJson(msg.asJson)
  def fromJson(json: Json): BaseProtocolMessage =
    fromBytes(json.noSpaces.getBytes(StandardCharsets.UTF_8))
  def fromBytes(bytes: Array[Byte]): BaseProtocolMessage =
    new BaseProtocolMessage(
      Map("Content-Length" -> bytes.length.toString),
      bytes
    )

  def fromInputStream(
      in: InputStream,
      logger: LoggerSupport
  ): Observable[BaseProtocolMessage] =
    fromBytes(Observable.fromInputStream(in), logger)

  def fromBytes(
      in: Observable[Array[Byte]],
      logger: LoggerSupport
  ): Observable[BaseProtocolMessage] =
    fromByteBuffers(in.map(ByteBuffer.wrap), logger)

  def fromByteBuffers(
      in: Observable[ByteBuffer],
      logger: LoggerSupport
  ): Observable[BaseProtocolMessage] =
    in.executeAsync.liftByOperator(new BaseProtocolMessageParser(logger))
} 
Example 99
Source File: FileLoggerSuite.scala    From lsp4s   with Apache License 2.0 5 votes vote down vote up
package tests

import java.io.ByteArrayOutputStream
import java.io.PrintStream
import java.nio.charset.StandardCharsets
import java.nio.file.Files
import minitest.SimpleTestSuite

object FileLoggerSuite extends SimpleTestSuite {
  test("logs don't go to stdout") {
    val path = Files.createTempFile("lsp4s", ".log")
    val baos = new ByteArrayOutputStream()
    val writer = scribe.writer.FileWriter().path(_ => path).autoFlush
    Console.withOut(new PrintStream(baos)) {
      val logger = scribe.Logger("lsp4s").orphan().withHandler(writer = writer)
      logger.info("This is info")
      logger.warn("This is warning")
      logger.error("This is error")
    }
    val obtainedOut = baos.toString()
    assert(obtainedOut.isEmpty)
    val obtainedLogs =
      new String(Files.readAllBytes(path), StandardCharsets.UTF_8)
    List("info", "warning", "error").foreach { message =>
      assert(obtainedLogs.contains(s"This is $message"), obtainedLogs)
    }
  }
} 
Example 100
Source File: SftpStoreNoChrootTest.scala    From fs2-blobstore   with Apache License 2.0 5 votes vote down vote up
package blobstore.sftp

import java.nio.charset.StandardCharsets
import java.util.Properties

import blobstore.Store
import cats.effect.IO
import fs2.Stream
import com.jcraft.jsch.{ChannelSftp, JSch, Session}


class SftpStoreNoChrootTest extends AbstractSftpStoreTest {
  override val session: IO[Session] = IO {
    val jsch = new JSch()

    val session = jsch.getSession("blob", "sftp-no-chroot", 22)
    session.setTimeout(10000)
    session.setPassword("password")

    val config = new Properties
    config.put("StrictHostKeyChecking", "no")
    session.setConfig(config)
    session // Let the store connect this session
  }

  it should "honor absRoot on put" in {
    val s = session.unsafeRunSync()
    s.connect(10000)

    val store: Store[IO] =
      new SftpStore[IO](s"/home/blob/", s, blocker, mVar, None, 10000)

    val filePath = dirPath("baz/bam") / "tmp"

    val save = Stream
      .emits("foo".getBytes(StandardCharsets.UTF_8))
      .covary[IO]
      .through(store.put(filePath))
      .compile
      .toList

    val storeRead = store.get(filePath, 1024).through(fs2.text.utf8Decode).compile.toList
    val is = IO {
      @SuppressWarnings(Array("scalafix:DisableSyntax.asInstanceOf"))
      val ch = s.openChannel("sftp").asInstanceOf[ChannelSftp]
      ch.connect()
      ch.get(s"/home/blob/$filePath")
    }
    val directRead = fs2.io.readInputStream(is, 1024, blocker).through(fs2.text.utf8Decode).compile.toList

    val program = for {
      _                 <- save
      contentsFromStore <- storeRead
      contentsDirect    <- directRead
    } yield contentsDirect.mkString("\n") -> contentsFromStore.mkString("\n")

    val (fromStore, fromDirect) = program.unsafeRunSync()
    fromStore mustBe "foo"
    fromDirect mustBe "foo"
  }
} 
Example 101
Source File: DbService.scala    From sns   with Apache License 2.0 5 votes vote down vote up
package me.snov.sns.service

import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Paths, StandardOpenOption}

import akka.event.LoggingAdapter
import me.snov.sns.model.{Configuration, Subscription, Topic}
import spray.json._

trait DbService {
  def load(): Option[Configuration]

  def save(configuration: Configuration)
}

class MemoryDbService extends DbService {
  override def load(): Option[Configuration] = {
    Some(Configuration(subscriptions= List[Subscription](), topics= List[Topic]()))
  }

  override def save(configuration: Configuration): Unit = {}
}

class FileDbService(dbFilePath: String)(implicit log: LoggingAdapter) extends DbService {

  val subscriptionsName = "subscriptions"
  val topicsName = "topics"
  
  val path = Paths.get(dbFilePath)
  
  def load(): Option[Configuration] = {
    if (Files.exists(path)) {
      log.debug("Loading DB")
      try {
        val configuration = read().parseJson.convertTo[Configuration]
        log.info("Loaded DB")
        return Some(configuration)
      } catch {
        case e: DeserializationException => log.error(e, "Unable to parse configuration")
        case e: RuntimeException => log.error(e,"Unable to load configuration")
      }
    }
    None
  }
  
  def save(configuration: Configuration) = {
    log.debug("Saving DB")
    write(configuration.toJson.prettyPrint)
  }

  private def write(contents: String) = {
    Files.write(path, contents.getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING)
  }

  private def read(): String = {
    new String(Files.readAllBytes(path))
  }
} 
Example 102
Source File: Decode.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.xdr

import java.io.EOFException
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import java.time.Instant

import cats.Eval
import cats.data.{IndexedStateT, State}
import com.typesafe.scalalogging.LazyLogging

import scala.util.Try

trait Decode extends LazyLogging {

  private def decode[T](bs: Seq[Byte], len: Int)(decoder: Seq[Byte] => T): (Seq[Byte], T) = {
    if (bs.length < len) throw new EOFException("Insufficient data remains to parse.")
    val t = decoder(bs.take(len))
    logger.trace(s"Dropping {} to make {}", len, t)
    bs.drop(len) -> t
  }

  val int: State[Seq[Byte], Int] = State[Seq[Byte], Int] { bs =>
    decode(bs, 4) { in => ByteBuffer.wrap(in.toArray).getInt }
  }

  val long: State[Seq[Byte], Long] = State[Seq[Byte], Long] { bs =>
    decode(bs, 8) { in => ByteBuffer.wrap(in.toArray).getLong }
  }

  val instant: State[Seq[Byte], Instant] = long.map(Instant.ofEpochSecond)

  val bool: State[Seq[Byte], Boolean] = int.map(_ == 1)

  def bytes(len: Int): State[Seq[Byte], Seq[Byte]] = State[Seq[Byte], Seq[Byte]] { bs =>
    decode(bs, len) { _.take(len) }
  }

  val bytes: State[Seq[Byte], Seq[Byte]] = for {
    len <- int
    bs <- bytes(len)
  } yield bs

  def padded(multipleOf: Int = 4): State[Seq[Byte], Seq[Byte]] = for {
    len <- int
    bs <- bytes(len)
    _ <- bytes((multipleOf - (len % multipleOf)) % multipleOf)
  } yield bs

  val string: State[Seq[Byte], String] = padded().map(_.toArray).map(new String(_, StandardCharsets.UTF_8))

  def switch[T](zero: State[Seq[Byte], T], others: State[Seq[Byte], T]*): IndexedStateT[Eval, Seq[Byte], Seq[Byte], T] = int.flatMap {
    case 0 => zero
    case n =>  Try(others(n - 1)).getOrElse {
      throw new IllegalArgumentException(s"No parser defined for discriminant $n")
    }
  }

  // TODO (jem) - All switches should use this instead and Discriminators should be held in the parent (switcher not switchee).
  def switchInt[T](zero: State[Seq[Byte], T], others: State[Seq[Byte], T]*): State[Seq[Byte], (T, Int)] = int.flatMap {
    case 0 => zero.map(_ -> 0)
    case n => Try(others(n - 1).map(_ -> n)).getOrElse {
      throw new IllegalArgumentException(s"No parser defined for discriminant $n")
    }
  }

  def opt[T](parseT: State[Seq[Byte], T]): State[Seq[Byte], Option[T]] = bool.flatMap {
    case true => parseT.map(Some(_))
    case false => State.pure(None)
  }

  def arr[T](parseT: State[Seq[Byte], T]): State[Seq[Byte], Seq[T]] = int.flatMap(seq(_, parseT))

  // $COVERAGE-OFF$
  // For debugging XDR only.
  def log[T](t: T): State[Seq[Byte], Unit] = State[Seq[Byte], Unit] { bs =>
    logger.debug("{}\n", t)
    bs -> ()
  }
  // $COVERAGE-ON$

  def seq[T](qty: Int, parseT: State[Seq[Byte], T]): State[Seq[Byte], Seq[T]] = {
    (0 until qty).foldLeft(State.pure[Seq[Byte], Seq[T]](Seq.empty[T])) { case (state, _) =>
      for {
        ts <- state
        t <- parseT
      } yield ts :+ t
    }
  }

  def drop[T](parse: State[Seq[Byte], _])(t: T): State[Seq[Byte], T] = for {
    _ <- parse
  } yield t

  def widen[A, W, O <: W](s: State[A, O]): State[A, W] = s.map(w => w: W)
} 
Example 103
Source File: AppMasterResolver.scala    From incubator-retired-gearpump   with Apache License 2.0 5 votes vote down vote up
package org.apache.gearpump.experiments.yarn.client

import java.io.IOException
import java.net.{HttpURLConnection, URL}
import java.nio.charset.StandardCharsets
import akka.actor.{ActorRef, ActorSystem}
import org.apache.commons.io.IOUtils
import org.apache.gearpump.experiments.yarn.glue.Records.{ApplicationId, ApplicationReport}
import org.apache.gearpump.experiments.yarn.glue.YarnClient
import org.apache.gearpump.util.{AkkaHelper, LogUtil}
import org.apache.hadoop.hdfs.web.URLConnectionFactory
import org.apache.hadoop.yarn.conf.YarnConfiguration
import scala.util.Try


class AppMasterResolver(yarnClient: YarnClient, system: ActorSystem) {
  val LOG = LogUtil.getLogger(getClass)
  val RETRY_INTERVAL_MS = 3000 // ms

  def resolve(appId: ApplicationId, timeoutSeconds: Int = 30): ActorRef = {
    val appMaster = retry(connect(appId), 1 + timeoutSeconds * 1000 / RETRY_INTERVAL_MS)
    appMaster
  }

  private def connect(appId: ApplicationId): ActorRef = {
    val report = yarnClient.getApplicationReport(appId)

    AppMasterResolver.resolveAppMasterAddress(report, system)
  }

  private def retry(fun: => ActorRef, times: Int): ActorRef = {
    var index = 0
    var result: ActorRef = null
    while (index < times && result == null) {
      Thread.sleep(RETRY_INTERVAL_MS)
      index += 1
      val tryConnect = Try(fun)
      if (tryConnect.isFailure) {
        LOG.error(s"Failed to connect YarnAppMaster(tried $index)... " +
          tryConnect.failed.get.getMessage)
      } else {
        result = tryConnect.get
      }
    }
    result
  }
}

object AppMasterResolver {
  val LOG = LogUtil.getLogger(getClass)

  def resolveAppMasterAddress(report: ApplicationReport, system: ActorSystem): ActorRef = {
    val appMasterPath = s"${report.getTrackingURL}/supervisor-actor-path"
    LOG.info(s"appMasterPath=$appMasterPath")

    val connectionFactory: URLConnectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(new YarnConfiguration())
    val url: URL = new URL(appMasterPath)
    val connection: HttpURLConnection = connectionFactory.openConnection(url)
      .asInstanceOf[HttpURLConnection]
    connection.setInstanceFollowRedirects(true)

    try {
      connection.connect()
    } catch {
      case e: IOException =>
        LOG.error(s"Failed to connect to AppMaster" + e.getMessage)
    }

    val status = connection.getResponseCode
    if (status == 200) {
      val stream: java.io.InputStream = connection.getInputStream
      val response = IOUtils.toString(stream, StandardCharsets.UTF_8)
      LOG.info("Successfully resolved AppMaster address: " + response)
      connection.disconnect()
      AkkaHelper.actorFor(system, response)
    } else {
      connection.disconnect()
      throw new IOException("Fail to resolve AppMaster address, please make sure " +
        s"${report.getTrackingURL} is accessible...")
    }
  }
} 
Example 104
Source File: ExtractNodes.scala    From tensorframes   with Apache License 2.0 5 votes vote down vote up
package org.tensorframes.dsl

import java.io.{BufferedReader, InputStreamReader, File}
import java.nio.file.Files
import java.nio.charset.StandardCharsets
import org.tensorframes.Logging
import org.scalatest.Matchers

import scala.collection.JavaConverters._

object ExtractNodes extends Matchers with Logging {

  def executeCommand(py: String): Map[String, String] = {
    val content =
      s"""
         |from __future__ import print_function
         |import tensorflow as tf
         |
         |$py
         |g = tf.get_default_graph().as_graph_def()
         |for n in g.node:
         |    print(">>>>>", str(n.name), "<<<<<<")
         |    print(n)
       """.stripMargin
    val f = File.createTempFile("pythonTest", ".py")
    logTrace(s"Created temp file ${f.getAbsolutePath}")
    Files.write(f.toPath, content.getBytes(StandardCharsets.UTF_8))
    // Using the standard python installation in the PATH. It needs to have TensorFlow installed.
    val p = new ProcessBuilder("python", f.getAbsolutePath).start()
    val s = p.getInputStream
    val isr = new InputStreamReader(s)
    val br = new BufferedReader(isr)
    var res: String = ""
    var str: String = ""
    while(str != null) {
      str = br.readLine()
      if (str != null) {
        res = res + "\n" + str
      }
    }

    p.waitFor()
    assert(p.exitValue() === 0, (p.exitValue(),
      {
        println(content)
        s"===========\n$content\n==========="
      }))
    res.split(">>>>>").map(_.trim).filterNot(_.isEmpty).map { b =>
      val zs = b.split("\n")
      val node = zs.head.dropRight(7)
      val rest = zs.tail
      node -> rest.mkString("\n")
    } .toMap
  }

  def compareOutput(py: String, nodes: Operation*): Unit = {
    val g = TestUtilities.buildGraph(nodes.head, nodes.tail:_*)
    val m1 = g.getNodeList.asScala.map { n =>
      n.getName -> n.toString.trim
    } .toMap
    val pym = executeCommand(py)
    logTrace(s"m1 = '$m1'")
    logTrace(s"pym = '$pym'")
    assert((m1.keySet -- pym.keySet).isEmpty, {
      val diff = (m1.keySet -- pym.keySet).toSeq.sorted
      s"Found extra nodes in scala: $diff"
    })
    assert((pym.keySet -- m1.keySet).isEmpty, {
      val diff = (pym.keySet -- m1.keySet).toSeq.sorted
      s"Found extra nodes in python: $diff"
    })
    for (k <- m1.keySet) {
      assert(m1(k) === pym(k),
        s"scala=${m1(k)}\npython=${pym(k)}")
    }
  }
} 
Example 105
Source File: BypassPySparkJob.scala    From incubator-livy   with Apache License 2.0 5 votes vote down vote up
package org.apache.livy.repl

import java.nio.charset.StandardCharsets

import org.apache.livy.{Job, JobContext}
import org.apache.livy.sessions._

class BypassPySparkJob(
    serializedJob: Array[Byte],
    pi: PythonInterpreter) extends Job[Array[Byte]] {

  override def call(jc: JobContext): Array[Byte] = {
    val resultByteArray = pi.pysparkJobProcessor.processBypassJob(serializedJob)
    val resultString = new String(resultByteArray, StandardCharsets.UTF_8)
    if (resultString.startsWith("Client job error:")) {
      throw new PythonJobException(resultString)
    }
    resultByteArray
  }
} 
Example 106
Source File: JWTUtils.scala    From introduction-to-akkahttp   with Apache License 2.0 5 votes vote down vote up
package com.shashank.akkahttp.util

import io.jsonwebtoken.Jwts
import java.nio.charset.StandardCharsets
import javax.xml.bind.DatatypeConverter


object JWTUtils {
  case class User(name:String, admin:Boolean)

  val adminToken = "eyJhbGciOiJIUzUxMiJ9.eyJuYW1lIjoiYWRtaW4iLCJhZG1pbiI6dHJ1ZX0.c6wRZ4pla6D9f_nDO6tqwyq5KFwyW2iSkKvrwGejn2IMxU_Z273cKZAW3Fu51Cwhp-4vwqOr1aWnyUIwzb_eow"
  val myToken = "eyJhbGciOiJIUzUxMiJ9.eyJuYW1lIjoiU2hhc2hhbmsiLCJhZG1pbiI6ZmFsc2V9.smlXLOZFZ14fozEwULbiSvzDEStlVjnLWSmg6MiaDDXUirCJjPpkNrzpKI31MxID0ZUV-H3tEcPmB9jJjGl9qA"

  private val secretKey = DatatypeConverter.printBase64Binary("introductiontoakkahttp".getBytes(StandardCharsets.UTF_8))


  def decodeJWTToUser(tokenString:String):Option[User] = {
    try {
      val claims = Jwts.parser().setSigningKey(secretKey).parseClaimsJws(tokenString).getBody
      Some(User(claims.get("name").asInstanceOf[String], claims.get("admin").asInstanceOf[Boolean]))
    } catch  {
      case e :  Exception => {
        println("exception in decode token: " + e.getMessage())
        println("Considering it as invalid token")
        None
      }
    }
  }


} 
Example 107
Source File: Instances.scala    From kafka-journal   with MIT License 5 votes vote down vote up
package com.evolutiongaming.kafka.journal.circe

import java.nio.charset.StandardCharsets

import cats.implicits._
import com.evolutiongaming.catshelper._
import com.evolutiongaming.kafka.journal.PayloadAndType._
import com.evolutiongaming.kafka.journal._
import com.evolutiongaming.kafka.journal.circe.Codecs._
import com.evolutiongaming.kafka.journal.conversions._
import com.evolutiongaming.kafka.journal.eventual.EventualRead
import io.circe._
import io.circe.jawn._
import io.circe.syntax._
import scodec.bits.ByteVector

object Instances {

  implicit def kafkaWrite[F[_] : MonadThrowable](
    implicit payloadJsonToBytes: ToBytes[F, PayloadJson[Json]]
  ): KafkaWrite[F, Json] =
    KafkaWrite.writeJson(EventJsonPayloadAndType(_, PayloadType.Json), payloadJsonToBytes)

  implicit def kafkaRead[F[_] : MonadThrowable](
    implicit payloadJsonFromBytes: FromBytes[F, PayloadJson[Json]]
  ): KafkaRead[F, Json] =
    KafkaRead.readJson(payloadJsonFromBytes, (json: EventJsonPayloadAndType[Json]) => json.payload.pure[F])

  implicit def eventualRead[F[_] : MonadThrowable]: EventualRead[F, Json] =
    EventualRead.readJson(str => FromCirceResult.summon[F].apply(parse(str)))

  implicit def payloadJsonToBytes[F[_]: FromTry]: ToBytes[F, PayloadJson[Json]] = fromEncoder

  implicit def payloadJsonFromBytes[F[_]: ApplicativeThrowable]: FromBytes[F, PayloadJson[Json]] = fromDecoder

  private def fromEncoder[F[_] : FromTry, A : Encoder]: ToBytes[F, A] =
    a => FromTry[F].unsafe {
      val json = a.asJson
      val byteBuffer = Printer.noSpaces.printToByteBuffer(json, StandardCharsets.UTF_8)
      ByteVector.view(byteBuffer)
    }

  private def fromDecoder[F[_] : ApplicativeThrowable, A : Decoder]: FromBytes[F, A] =
    bytes =>
      FromCirceResult.summon[F].apply(decodeByteBuffer[A](bytes.toByteBuffer))
        .adaptErr { case e => JournalError(s"Failed to parse $bytes json: $e", e) }

} 
Example 108
Source File: CustomReceiver.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
// scalastyle:off println
package org.apache.spark.examples.streaming

import java.io.{BufferedReader, InputStreamReader}
import java.net.Socket
import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.receiver.Receiver


  private def receive() {
   var socket: Socket = null
   var userInput: String = null
   try {
     logInfo("Connecting to " + host + ":" + port)
     socket = new Socket(host, port)
     logInfo("Connected to " + host + ":" + port)
     val reader = new BufferedReader(
       new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))
     userInput = reader.readLine()
     while(!isStopped && userInput != null) {
       store(userInput)
       userInput = reader.readLine()
     }
     reader.close()
     socket.close()
     logInfo("Stopped receiving")
     restart("Trying to connect again")
   } catch {
     case e: java.net.ConnectException =>
       restart("Error connecting to " + host + ":" + port, e)
     case t: Throwable =>
       restart("Error receiving data", t)
   }
  }
}
// scalastyle:on println 
Example 109
Source File: GraphLoaderSuite.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.graphx

import java.io.File
import java.io.FileOutputStream
import java.io.OutputStreamWriter
import java.nio.charset.StandardCharsets

import org.apache.spark.SparkFunSuite
import org.apache.spark.util.Utils

class GraphLoaderSuite extends SparkFunSuite with LocalSparkContext {

  test("GraphLoader.edgeListFile") {
    withSpark { sc =>
      val tmpDir = Utils.createTempDir()
      val graphFile = new File(tmpDir.getAbsolutePath, "graph.txt")
      val writer = new OutputStreamWriter(new FileOutputStream(graphFile), StandardCharsets.UTF_8)
      for (i <- (1 until 101)) writer.write(s"$i 0\n")
      writer.close()
      try {
        val graph = GraphLoader.edgeListFile(sc, tmpDir.getAbsolutePath)
        val neighborAttrSums = graph.aggregateMessages[Int](
          ctx => ctx.sendToDst(ctx.srcAttr),
          _ + _)
        assert(neighborAttrSums.collect.toSet === Set((0: VertexId, 100)))
      } finally {
        Utils.deleteRecursively(tmpDir)
      }
    }
  }
} 
Example 110
Source File: LibSVMRelationSuite.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ml.source.libsvm

import java.io.File
import java.nio.charset.StandardCharsets

import com.google.common.io.Files

import org.apache.spark.{SparkException, SparkFunSuite}
import org.apache.spark.ml.linalg.{DenseVector, SparseVector, Vector, Vectors}
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.sql.{Row, SaveMode}
import org.apache.spark.util.Utils


class LibSVMRelationSuite extends SparkFunSuite with MLlibTestSparkContext {
  // Path for dataset
  var path: String = _

  override def beforeAll(): Unit = {
    super.beforeAll()
    val lines =
      """
        |1 1:1.0 3:2.0 5:3.0
        |0
        |0 2:4.0 4:5.0 6:6.0
      """.stripMargin
    val dir = Utils.createDirectory(tempDir.getCanonicalPath, "data")
    val file = new File(dir, "part-00000")
    Files.write(lines, file, StandardCharsets.UTF_8)
    path = dir.toURI.toString
  }

  override def afterAll(): Unit = {
    try {
      Utils.deleteRecursively(new File(path))
    } finally {
      super.afterAll()
    }
  }

  test("select as sparse vector") {
    val df = spark.read.format("libsvm").load(path)
    assert(df.columns(0) == "label")
    assert(df.columns(1) == "features")
    val row1 = df.first()
    assert(row1.getDouble(0) == 1.0)
    val v = row1.getAs[SparseVector](1)
    assert(v == Vectors.sparse(6, Seq((0, 1.0), (2, 2.0), (4, 3.0))))
  }

  test("select as dense vector") {
    val df = spark.read.format("libsvm").options(Map("vectorType" -> "dense"))
      .load(path)
    assert(df.columns(0) == "label")
    assert(df.columns(1) == "features")
    assert(df.count() == 3)
    val row1 = df.first()
    assert(row1.getDouble(0) == 1.0)
    val v = row1.getAs[DenseVector](1)
    assert(v == Vectors.dense(1.0, 0.0, 2.0, 0.0, 3.0, 0.0))
  }

  test("select a vector with specifying the longer dimension") {
    val df = spark.read.option("numFeatures", "100").format("libsvm")
      .load(path)
    val row1 = df.first()
    val v = row1.getAs[SparseVector](1)
    assert(v == Vectors.sparse(100, Seq((0, 1.0), (2, 2.0), (4, 3.0))))
  }

  test("write libsvm data and read it again") {
    val df = spark.read.format("libsvm").load(path)
    val tempDir2 = new File(tempDir, "read_write_test")
    val writepath = tempDir2.toURI.toString
    // TODO: Remove requirement to coalesce by supporting multiple reads.
    df.coalesce(1).write.format("libsvm").mode(SaveMode.Overwrite).save(writepath)

    val df2 = spark.read.format("libsvm").load(writepath)
    val row1 = df2.first()
    val v = row1.getAs[SparseVector](1)
    assert(v == Vectors.sparse(6, Seq((0, 1.0), (2, 2.0), (4, 3.0))))
  }

  test("write libsvm data failed due to invalid schema") {
    val df = spark.read.format("text").load(path)
    intercept[SparkException] {
      df.write.format("libsvm").save(path + "_2")
    }
  }

  test("select features from libsvm relation") {
    val df = spark.read.format("libsvm").load(path)
    df.select("features").rdd.map { case Row(d: Vector) => d }.first
    df.select("features").collect
  }
} 
Example 111
Source File: KPLBasedKinesisTestUtils.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.streaming.kinesis

import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets

import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer

import com.amazonaws.services.kinesis.producer.{KinesisProducer => KPLProducer, KinesisProducerConfiguration, UserRecordResult}
import com.google.common.util.concurrent.{FutureCallback, Futures}

private[kinesis] class KPLBasedKinesisTestUtils extends KinesisTestUtils {
  override protected def getProducer(aggregate: Boolean): KinesisDataGenerator = {
    if (!aggregate) {
      new SimpleDataGenerator(kinesisClient)
    } else {
      new KPLDataGenerator(regionName)
    }
  }
}


private[kinesis] class KPLDataGenerator(regionName: String) extends KinesisDataGenerator {

  private lazy val producer: KPLProducer = {
    val conf = new KinesisProducerConfiguration()
      .setRecordMaxBufferedTime(1000)
      .setMaxConnections(1)
      .setRegion(regionName)
      .setMetricsLevel("none")

    new KPLProducer(conf)
  }

  override def sendData(streamName: String, data: Seq[Int]): Map[String, Seq[(Int, String)]] = {
    val shardIdToSeqNumbers = new mutable.HashMap[String, ArrayBuffer[(Int, String)]]()
    data.foreach { num =>
      val str = num.toString
      val data = ByteBuffer.wrap(str.getBytes(StandardCharsets.UTF_8))
      val future = producer.addUserRecord(streamName, str, data)
      val kinesisCallBack = new FutureCallback[UserRecordResult]() {
        override def onFailure(t: Throwable): Unit = {} // do nothing

        override def onSuccess(result: UserRecordResult): Unit = {
          val shardId = result.getShardId
          val seqNumber = result.getSequenceNumber()
          val sentSeqNumbers = shardIdToSeqNumbers.getOrElseUpdate(shardId,
            new ArrayBuffer[(Int, String)]())
          sentSeqNumbers += ((num, seqNumber))
        }
      }
      Futures.addCallback(future, kinesisCallBack)
    }
    producer.flushSync()
    shardIdToSeqNumbers.toMap
  }
} 
Example 112
Source File: FlumeTestUtils.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.streaming.flume

import java.net.{InetSocketAddress, ServerSocket}
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import java.util.{List => JList}
import java.util.Collections

import scala.collection.JavaConverters._

import org.apache.avro.ipc.NettyTransceiver
import org.apache.avro.ipc.specific.SpecificRequestor
import org.apache.commons.lang3.RandomUtils
import org.apache.flume.source.avro
import org.apache.flume.source.avro.{AvroFlumeEvent, AvroSourceProtocol}
import org.jboss.netty.channel.ChannelPipeline
import org.jboss.netty.channel.socket.SocketChannel
import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory
import org.jboss.netty.handler.codec.compression.{ZlibDecoder, ZlibEncoder}

import org.apache.spark.util.Utils
import org.apache.spark.SparkConf


  private class CompressionChannelFactory(compressionLevel: Int)
    extends NioClientSocketChannelFactory {

    override def newChannel(pipeline: ChannelPipeline): SocketChannel = {
      val encoder = new ZlibEncoder(compressionLevel)
      pipeline.addFirst("deflater", encoder)
      pipeline.addFirst("inflater", new ZlibDecoder())
      super.newChannel(pipeline)
    }
  }

} 
Example 113
Source File: StreamMetadata.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import java.io.{InputStreamReader, OutputStreamWriter}
import java.nio.charset.StandardCharsets

import scala.util.control.NonFatal

import org.apache.commons.io.IOUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, FSDataInputStream, FSDataOutputStream, Path}
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization

import org.apache.spark.internal.Logging
import org.apache.spark.sql.streaming.StreamingQuery


  def write(
      metadata: StreamMetadata,
      metadataFile: Path,
      hadoopConf: Configuration): Unit = {
    var output: FSDataOutputStream = null
    try {
      val fs = FileSystem.get(hadoopConf)
      output = fs.create(metadataFile)
      val writer = new OutputStreamWriter(output)
      Serialization.write(metadata, writer)
      writer.close()
    } catch {
      case NonFatal(e) =>
        logError(s"Error writing stream metadata $metadata to $metadataFile", e)
        throw e
    } finally {
      IOUtils.closeQuietly(output)
    }
  }
} 
Example 114
Source File: YarnShuffleIntegrationSuite.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.yarn

import java.io.File
import java.nio.charset.StandardCharsets

import com.google.common.io.Files
import org.apache.commons.io.FileUtils
import org.apache.hadoop.yarn.conf.YarnConfiguration
import org.scalatest.Matchers

import org.apache.spark._
import org.apache.spark.internal.Logging
import org.apache.spark.network.shuffle.ShuffleTestAccessor
import org.apache.spark.network.yarn.{YarnShuffleService, YarnTestAccessor}
import org.apache.spark.tags.ExtendedYarnTest


@ExtendedYarnTest
class YarnShuffleIntegrationSuite extends BaseYarnClusterSuite {

  override def newYarnConfig(): YarnConfiguration = {
    val yarnConfig = new YarnConfiguration()
    yarnConfig.set(YarnConfiguration.NM_AUX_SERVICES, "spark_shuffle")
    yarnConfig.set(YarnConfiguration.NM_AUX_SERVICE_FMT.format("spark_shuffle"),
      classOf[YarnShuffleService].getCanonicalName)
    yarnConfig.set("spark.shuffle.service.port", "0")
    yarnConfig
  }

  test("external shuffle service") {
    val shuffleServicePort = YarnTestAccessor.getShuffleServicePort
    val shuffleService = YarnTestAccessor.getShuffleServiceInstance

    val registeredExecFile = YarnTestAccessor.getRegisteredExecutorFile(shuffleService)

    logInfo("Shuffle service port = " + shuffleServicePort)
    val result = File.createTempFile("result", null, tempDir)
    val finalState = runSpark(
      false,
      mainClassName(YarnExternalShuffleDriver.getClass),
      appArgs = Seq(result.getAbsolutePath(), registeredExecFile.getAbsolutePath),
      extraConf = Map(
        "spark.shuffle.service.enabled" -> "true",
        "spark.shuffle.service.port" -> shuffleServicePort.toString
      )
    )
    checkResult(finalState, result)
    assert(YarnTestAccessor.getRegisteredExecutorFile(shuffleService).exists())
  }
}

private object YarnExternalShuffleDriver extends Logging with Matchers {

  val WAIT_TIMEOUT_MILLIS = 10000

  def main(args: Array[String]): Unit = {
    if (args.length != 2) {
      // scalastyle:off println
      System.err.println(
        s"""
        |Invalid command line: ${args.mkString(" ")}
        |
        |Usage: ExternalShuffleDriver [result file] [registered exec file]
        """.stripMargin)
      // scalastyle:on println
      System.exit(1)
    }

    val sc = new SparkContext(new SparkConf()
      .setAppName("External Shuffle Test"))
    val conf = sc.getConf
    val status = new File(args(0))
    val registeredExecFile = new File(args(1))
    logInfo("shuffle service executor file = " + registeredExecFile)
    var result = "failure"
    val execStateCopy = new File(registeredExecFile.getAbsolutePath + "_dup")
    try {
      val data = sc.parallelize(0 until 100, 10).map { x => (x % 10) -> x }.reduceByKey{ _ + _ }.
        collect().toSet
      sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
      data should be ((0 until 10).map{x => x -> (x * 10 + 450)}.toSet)
      result = "success"
      // only one process can open a leveldb file at a time, so we copy the files
      FileUtils.copyDirectory(registeredExecFile, execStateCopy)
      assert(!ShuffleTestAccessor.reloadRegisteredExecutors(execStateCopy).isEmpty)
    } finally {
      sc.stop()
      FileUtils.deleteDirectory(execStateCopy)
      Files.write(result, status, StandardCharsets.UTF_8)
    }
  }

} 
Example 115
Source File: SocketInputDStream.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.streaming.dstream

import java.io._
import java.net.{ConnectException, Socket}
import java.nio.charset.StandardCharsets

import scala.reflect.ClassTag
import scala.util.control.NonFatal

import org.apache.spark.internal.Logging
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.receiver.Receiver
import org.apache.spark.util.NextIterator

private[streaming]
class SocketInputDStream[T: ClassTag](
    _ssc: StreamingContext,
    host: String,
    port: Int,
    bytesToObjects: InputStream => Iterator[T],
    storageLevel: StorageLevel
  ) extends ReceiverInputDStream[T](_ssc) {

  def getReceiver(): Receiver[T] = {
    new SocketReceiver(host, port, bytesToObjects, storageLevel)
  }
}

private[streaming]
class SocketReceiver[T: ClassTag](
    host: String,
    port: Int,
    bytesToObjects: InputStream => Iterator[T],
    storageLevel: StorageLevel
  ) extends Receiver[T](storageLevel) with Logging {

  private var socket: Socket = _

  def onStart() {

    logInfo(s"Connecting to $host:$port")
    try {
      socket = new Socket(host, port)
    } catch {
      case e: ConnectException =>
        restart(s"Error connecting to $host:$port", e)
        return
    }
    logInfo(s"Connected to $host:$port")

    // Start the thread that receives data over a connection
    new Thread("Socket Receiver") {
      setDaemon(true)
      override def run() { receive() }
    }.start()
  }

  def onStop() {
    // in case restart thread close it twice
    synchronized {
      if (socket != null) {
        socket.close()
        socket = null
        logInfo(s"Closed socket to $host:$port")
      }
    }
  }

  
  def bytesToLines(inputStream: InputStream): Iterator[String] = {
    val dataInputStream = new BufferedReader(
      new InputStreamReader(inputStream, StandardCharsets.UTF_8))
    new NextIterator[String] {
      protected override def getNext() = {
        val nextValue = dataInputStream.readLine()
        if (nextValue == null) {
          finished = true
        }
        nextValue
      }

      protected override def close() {
        dataInputStream.close()
      }
    }
  }
} 
Example 116
Source File: RateLimitedOutputStreamSuite.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.streaming.util

import java.io.ByteArrayOutputStream
import java.nio.charset.StandardCharsets
import java.util.concurrent.TimeUnit._

import org.apache.spark.SparkFunSuite

class RateLimitedOutputStreamSuite extends SparkFunSuite {

  private def benchmark[U](f: => U): Long = {
    val start = System.nanoTime
    f
    System.nanoTime - start
  }

  test("write") {
    val underlying = new ByteArrayOutputStream
    val data = "X" * 41000
    val stream = new RateLimitedOutputStream(underlying, desiredBytesPerSec = 10000)
    val elapsedNs = benchmark { stream.write(data.getBytes(StandardCharsets.UTF_8)) }

    val seconds = SECONDS.convert(elapsedNs, NANOSECONDS)
    assert(seconds >= 4, s"Seconds value ($seconds) is less than 4.")
    assert(seconds <= 30, s"Took more than 30 seconds ($seconds) to write data.")
    assert(underlying.toString("UTF-8") === data)
  }
} 
Example 117
Source File: JacksonMessageWriter.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.status.api.v1

import java.io.OutputStream
import java.lang.annotation.Annotation
import java.lang.reflect.Type
import java.nio.charset.StandardCharsets
import java.text.SimpleDateFormat
import java.util.{Calendar, Locale, SimpleTimeZone}
import javax.ws.rs.Produces
import javax.ws.rs.core.{MediaType, MultivaluedMap}
import javax.ws.rs.ext.{MessageBodyWriter, Provider}

import com.fasterxml.jackson.annotation.JsonInclude
import com.fasterxml.jackson.databind.{ObjectMapper, SerializationFeature}


@Provider
@Produces(Array(MediaType.APPLICATION_JSON))
private[v1] class JacksonMessageWriter extends MessageBodyWriter[Object]{

  val mapper = new ObjectMapper() {
    override def writeValueAsString(t: Any): String = {
      super.writeValueAsString(t)
    }
  }
  mapper.registerModule(com.fasterxml.jackson.module.scala.DefaultScalaModule)
  mapper.enable(SerializationFeature.INDENT_OUTPUT)
  mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL)
  mapper.setDateFormat(JacksonMessageWriter.makeISODateFormat)

  override def isWriteable(
      aClass: Class[_],
      `type`: Type,
      annotations: Array[Annotation],
      mediaType: MediaType): Boolean = {
      true
  }

  override def writeTo(
      t: Object,
      aClass: Class[_],
      `type`: Type,
      annotations: Array[Annotation],
      mediaType: MediaType,
      multivaluedMap: MultivaluedMap[String, AnyRef],
      outputStream: OutputStream): Unit = {
    t match {
      case ErrorWrapper(err) => outputStream.write(err.getBytes(StandardCharsets.UTF_8))
      case _ => mapper.writeValue(outputStream, t)
    }
  }

  override def getSize(
      t: Object,
      aClass: Class[_],
      `type`: Type,
      annotations: Array[Annotation],
      mediaType: MediaType): Long = {
    -1L
  }
}

private[spark] object JacksonMessageWriter {
  def makeISODateFormat: SimpleDateFormat = {
    val iso8601 = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'GMT'", Locale.US)
    val cal = Calendar.getInstance(new SimpleTimeZone(0, "GMT"))
    iso8601.setCalendar(cal)
    iso8601
  }
} 
Example 118
Source File: PythonRDDSuite.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.api.python

import java.io.{ByteArrayOutputStream, DataOutputStream}
import java.nio.charset.StandardCharsets

import org.apache.spark.SparkFunSuite

class PythonRDDSuite extends SparkFunSuite {

  test("Writing large strings to the worker") {
    val input: List[String] = List("a"*100000)
    val buffer = new DataOutputStream(new ByteArrayOutputStream)
    PythonRDD.writeIteratorToStream(input.iterator, buffer)
  }

  test("Handle nulls gracefully") {
    val buffer = new DataOutputStream(new ByteArrayOutputStream)
    // Should not have NPE when write an Iterator with null in it
    // The correctness will be tested in Python
    PythonRDD.writeIteratorToStream(Iterator("a", null), buffer)
    PythonRDD.writeIteratorToStream(Iterator(null, "a"), buffer)
    PythonRDD.writeIteratorToStream(Iterator("a".getBytes(StandardCharsets.UTF_8), null), buffer)
    PythonRDD.writeIteratorToStream(Iterator(null, "a".getBytes(StandardCharsets.UTF_8)), buffer)
    PythonRDD.writeIteratorToStream(Iterator((null, null), ("a", null), (null, "b")), buffer)
    PythonRDD.writeIteratorToStream(Iterator(
      (null, null),
      ("a".getBytes(StandardCharsets.UTF_8), null),
      (null, "b".getBytes(StandardCharsets.UTF_8))), buffer)
  }
} 
Example 119
Source File: MasterWebUISuite.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.master.ui

import java.io.DataOutputStream
import java.net.{HttpURLConnection, URL}
import java.nio.charset.StandardCharsets
import java.util.Date

import scala.collection.mutable.HashMap

import org.mockito.Mockito.{mock, times, verify, when}
import org.scalatest.BeforeAndAfterAll

import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.DeployMessages.{KillDriverResponse, RequestKillDriver}
import org.apache.spark.deploy.DeployTestUtils._
import org.apache.spark.deploy.master._
import org.apache.spark.rpc.{RpcEndpointRef, RpcEnv}


class MasterWebUISuite extends SparkFunSuite with BeforeAndAfterAll {

  val conf = new SparkConf
  val securityMgr = new SecurityManager(conf)
  val rpcEnv = mock(classOf[RpcEnv])
  val master = mock(classOf[Master])
  val masterEndpointRef = mock(classOf[RpcEndpointRef])
  when(master.securityMgr).thenReturn(securityMgr)
  when(master.conf).thenReturn(conf)
  when(master.rpcEnv).thenReturn(rpcEnv)
  when(master.self).thenReturn(masterEndpointRef)
  val masterWebUI = new MasterWebUI(master, 0)

  override def beforeAll() {
    super.beforeAll()
    masterWebUI.bind()
  }

  override def afterAll() {
    masterWebUI.stop()
    super.afterAll()
  }

  test("kill application") {
    val appDesc = createAppDesc()
    // use new start date so it isn't filtered by UI
    val activeApp = new ApplicationInfo(
      new Date().getTime, "app-0", appDesc, new Date(), null, Int.MaxValue)

    when(master.idToApp).thenReturn(HashMap[String, ApplicationInfo]((activeApp.id, activeApp)))

    val url = s"http://localhost:${masterWebUI.boundPort}/app/kill/"
    val body = convPostDataToString(Map(("id", activeApp.id), ("terminate", "true")))
    val conn = sendHttpRequest(url, "POST", body)
    conn.getResponseCode

    // Verify the master was called to remove the active app
    verify(master, times(1)).removeApplication(activeApp, ApplicationState.KILLED)
  }

  test("kill driver") {
    val activeDriverId = "driver-0"
    val url = s"http://localhost:${masterWebUI.boundPort}/driver/kill/"
    val body = convPostDataToString(Map(("id", activeDriverId), ("terminate", "true")))
    val conn = sendHttpRequest(url, "POST", body)
    conn.getResponseCode

    // Verify that master was asked to kill driver with the correct id
    verify(masterEndpointRef, times(1)).ask[KillDriverResponse](RequestKillDriver(activeDriverId))
  }

  private def convPostDataToString(data: Map[String, String]): String = {
    (for ((name, value) <- data) yield s"$name=$value").mkString("&")
  }

  
  private def sendHttpRequest(
      url: String,
      method: String,
      body: String = ""): HttpURLConnection = {
    val conn = new URL(url).openConnection().asInstanceOf[HttpURLConnection]
    conn.setRequestMethod(method)
    if (body.nonEmpty) {
      conn.setDoOutput(true)
      conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded")
      conn.setRequestProperty("Content-Length", Integer.toString(body.length))
      val out = new DataOutputStream(conn.getOutputStream)
      out.write(body.getBytes(StandardCharsets.UTF_8))
      out.close()
    }
    conn
  }
} 
Example 120
Source File: GameManager.scala    From telegram   with Apache License 2.0 5 votes vote down vote up
package com.bot4s.telegram.api

import java.net.URLDecoder
import java.nio.charset.StandardCharsets
import java.util.Base64

import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.{Directive1, Route}
import com.bot4s.telegram.marshalling
import com.bot4s.telegram.methods.{GetGameHighScores, SetGameScore}
import com.bot4s.telegram.models.{CallbackQuery, ChatId, User}
import com.bot4s.telegram.future.BotExecutionContext
import io.circe.generic.extras.semiauto._
import io.circe.generic.semiauto.deriveDecoder
import io.circe.{Decoder, Encoder}

import scala.concurrent.Future
import scala.util.{Failure, Success}


case class Payload(
                    user            : User,
                    chatId          : Option[ChatId] = None,
                    messageId       : Option[Int] = None,
                    inlineMessageId : Option[String] = None,
                    gameManagerHost : String,
                    gameShortName   : String) {

  def toGetGameHighScores = GetGameHighScores(user.id, chatId, messageId, inlineMessageId)

  def base64Encode: String = {
    val payloadJson = marshalling.toJson[Payload](this)
    val encodedPayload = Base64.getEncoder.encodeToString(
      payloadJson.getBytes(StandardCharsets.UTF_8))

    encodedPayload
  }
}

object Payload {

  def base64Decode(encodedPayload: String): Payload = {
    val base64payload = URLDecoder.decode(encodedPayload, "UTF-8")
    val jsonPayload = new String(Base64.getDecoder.decode(base64payload),
      StandardCharsets.UTF_8)
    val payload = marshalling.fromJson[Payload](jsonPayload)

    payload
  }

  def forCallbackQuery(gameManagerHost: String)(implicit cbq: CallbackQuery): Payload = {
    Payload(
      cbq.from,
      cbq.message.map(_.source),
      cbq.message.map(_.messageId),
      cbq.inlineMessageId,
      gameManagerHost,
      cbq.gameShortName.get) // throws if not a game callback
  }

  import marshalling._
  implicit val payloadEncoder: Encoder[Payload] = deriveEncoder[Payload]
  implicit val payloadDecoder: Decoder[Payload] = deriveDecoder[Payload]
} 
Example 121
Source File: DemoFileDownloadServlet.scala    From udash-demos   with GNU General Public License v3.0 5 votes vote down vote up
package io.udash.demos.files.jetty

import java.io.File
import java.net.URLDecoder
import java.nio.charset.StandardCharsets
import javax.servlet.http.HttpServletRequest

import io.udash.demos.files.services.FilesStorage
import io.udash.rpc.utils.FileDownloadServlet

class DemoFileDownloadServlet(filesDir: String, contextPrefix: String) extends FileDownloadServlet {
  override protected def resolveFile(request: HttpServletRequest): File = {
    val name = URLDecoder.decode(request.getRequestURI.stripPrefix(contextPrefix + "/"), StandardCharsets.UTF_8.name())
    new File(filesDir, name)
  }

  override protected def presentedFileName(name: String): String =
    FilesStorage.allFiles
      .find(_.serverFileName == name)
      .map(_.name)
      .getOrElse(name)
} 
Example 122
Source File: Sourcer.scala    From eidos   with Apache License 2.0 5 votes vote down vote up
package org.clulab.wm.wmexchanger.utils

import java.io.{File, FileNotFoundException}
import java.nio.charset.StandardCharsets

import org.slf4j.{Logger, LoggerFactory}

import scala.io.BufferedSource
import scala.io.Source

object Sourcer {
  protected lazy val logger: Logger = LoggerFactory.getLogger(this.getClass)
  val utf8: String = StandardCharsets.UTF_8.toString
  
  def sourceFromResource(path: String): BufferedSource = {
    val url = Option(Sourcer.getClass.getResource(path))
        .getOrElse(throw newFileNotFoundException(path))

    logger.info("Sourcing resource " + url.getPath)
    Source.fromURL(url, utf8)
  }
  
  def sourceFromFile(file: File): BufferedSource = {
    logger.info("Sourcing file " + file.getPath)
    Source.fromFile(file, utf8)
  }

  def sourceFromFile(path: String): BufferedSource = sourceFromFile(new File(path))

  def newFileNotFoundException(path: String): FileNotFoundException = {
    val message1 = path + " (The system cannot find the path specified"
    val message2 = message1 + (if (path.startsWith("~")) ".  Make sure to not use the tilde (~) character in paths in lieu of the home directory." else "")
    val message3 = message2 + ")"

    new FileNotFoundException(message3)
  }
} 
Example 123
Source File: Sourcer.scala    From eidos   with Apache License 2.0 5 votes vote down vote up
package org.clulab.wm.eidos.utils

import java.io.{File, FileNotFoundException}
import java.nio.charset.StandardCharsets

import org.slf4j.{Logger, LoggerFactory}

import scala.io.BufferedSource
import scala.io.Source

object Sourcer {
  protected lazy val logger: Logger = LoggerFactory.getLogger(this.getClass)
  val utf8: String = StandardCharsets.UTF_8.toString
  
  def sourceFromResource(path: String): BufferedSource = {
    val url = Option(Sourcer.getClass.getResource(path))
        .getOrElse(throw newFileNotFoundException(path))

    logger.info("Sourcing resource " + url.getPath)
    Source.fromURL(url, utf8)
  }
  
  def sourceFromFile(file: File): BufferedSource = {
    logger.info("Sourcing file " + file.getPath)
    Source.fromFile(file, utf8)
  }

  def sourceFromFile(path: String): BufferedSource = sourceFromFile(new File(path))

  def newFileNotFoundException(path: String): FileNotFoundException = {
    val message1 = path + " (The system cannot find the path specified"
    val message2 = message1 + (if (path.startsWith("~")) ".  Make sure to not use the tilde (~) character in paths in lieu of the home directory." else "")
    val message3 = message2 + ")"

    new FileNotFoundException(message3)
  }
} 
Example 124
Source File: TestDiskFull.scala    From eidos   with Apache License 2.0 5 votes vote down vote up
package org.clulab.wm.eidos.utils

import java.io.BufferedOutputStream
import java.io.FileOutputStream
import java.io.IOException
import java.io.OutputStreamWriter
import java.io.PrintWriter
import java.io.SyncFailedException
import java.nio.charset.StandardCharsets

import org.clulab.wm.eidos.test.TestUtils._
import org.clulab.wm.eidos.utils.Closer.AutoCloser

class TestDiskFull extends Test {

  def test1 = {
    val file = "/E:/full.dat"
    var i = 0

    try {
      val text1 = "The quick brown fox jumped over the lazy dog."
      val text = text1 + text1

      for (limit <- 1 until 400) {
        val fos = new FileOutputStream(file)
        val osw = new OutputStreamWriter(new BufferedOutputStream(fos), StandardCharsets.UTF_8.toString)
        i = 0

        new PrintWriter(osw).autoClose { pw =>
          while (i < limit) {
            pw.print(text)
            i += 1
            //          pw.flush()
            //          osw.flush()
            //          fos.flush()
            fos.getFD.sync()
          }
        }
      }
    }
    catch {
      case exception: SyncFailedException =>
        println(s"Synchronization failed for file $file at $i")
        println("Exiting with code -2 on assumption that the disk is full")
        System.exit(-2)
      case exception: IOException =>
        println(s"IO failed for file $file at $i")
        println("Exiting with code -2 on assumption that the disk is full")
        System.exit(-2)
      case exception: Exception =>
        println(s"Exception for file $file at $i")
        exception.printStackTrace()
      case throwable: Throwable =>
        println(s"Throwable for file $file at $i")
        throwable.printStackTrace()
    }
  }

//  test1
} 
Example 125
Source File: Sourcer.scala    From eidos   with Apache License 2.0 5 votes vote down vote up
package org.clulab.wm.elasticsearch.utils

import java.io.{File, FileNotFoundException}
import java.nio.charset.StandardCharsets

import org.slf4j.{Logger, LoggerFactory}

import scala.io.BufferedSource
import scala.io.Source

object Sourcer {
  protected lazy val logger: Logger = LoggerFactory.getLogger(this.getClass)
  val utf8: String = StandardCharsets.UTF_8.toString
  
  def sourceFromResource(path: String): BufferedSource = {
    val url = Option(Sourcer.getClass.getResource(path))
        .getOrElse(throw newFileNotFoundException(path))

    logger.info("Sourcing resource " + url.getPath)
    Source.fromURL(url, utf8)
  }
  
  def sourceFromFile(file: File): BufferedSource = {
    logger.info("Sourcing file " + file.getPath)
    Source.fromFile(file, utf8)
  }

  def sourceFromFile(path: String): BufferedSource = sourceFromFile(new File(path))

  def newFileNotFoundException(path: String): FileNotFoundException = {
    val message1 = path + " (The system cannot find the path specified"
    val message2 = message1 + (if (path.startsWith("~")) ".  Make sure to not use the tilde (~) character in paths in lieu of the home directory." else "")
    val message3 = message2 + ")"

    new FileNotFoundException(message3)
  }
} 
Example 126
Source File: InfluxUDPClient.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.udp

import java.io.File
import java.net._
import java.nio.charset.{Charset, StandardCharsets}

import com.github.fsanaulla.chronicler.core.components.BodyBuilder
import com.github.fsanaulla.chronicler.core.model.{InfluxWriter, Point}

import scala.io.Source
import scala.util.{Failure, Try}


final class InfluxUDPClient(host: String, port: Int) extends AutoCloseable {
  private[this] val socket = new DatagramSocket()
  private[this] def buildAndSend(msg: Array[Byte]): Try[Unit] =
    Try(
      socket.send(
        new DatagramPacket(
          msg,
          msg.length,
          new InetSocketAddress(host, port)
        )
      )
    )

  def writeNative(point: String, charset: Charset = StandardCharsets.UTF_8): Try[Unit] =
    buildAndSend(point.getBytes(charset))

  def bulkWriteNative(points: Seq[String], charset: Charset = StandardCharsets.UTF_8): Try[Unit] =
    buildAndSend(points.mkString("\n").getBytes(charset))

  def write[T](
      measurement: String,
      entity: T,
      charset: Charset = StandardCharsets.UTF_8
    )(implicit writer: InfluxWriter[T]
    ): Try[Unit] = {
    BodyBuilder.stringBodyBuilder.fromT(measurement, entity) match {
      case Left(ex) => scala.util.Failure(ex)
      case Right(r) =>
        buildAndSend(r.getBytes(charset))
    }
  }

  def bulkWrite[T](
      measurement: String,
      entities: Seq[T],
      charset: Charset = StandardCharsets.UTF_8
    )(implicit writer: InfluxWriter[T]
    ): Try[Unit] = {
    BodyBuilder.stringBodyBuilder.fromSeqT(measurement, entities) match {
      case Left(ex) => Failure(ex)
      case Right(r) =>
        buildAndSend(r.getBytes(charset))
    }
  }

  def writeFromFile(file: File, charset: Charset = StandardCharsets.UTF_8): Try[Unit] = {
    val sendData = Source
      .fromFile(file)
      .getLines()
      .mkString("\n")
      .getBytes(charset)

    buildAndSend(sendData)
  }

  def writePoint(point: Point, charset: Charset = StandardCharsets.UTF_8): Try[Unit] =
    buildAndSend(point.serialize.getBytes(charset))

  def bulkWritePoints(points: Seq[Point], charset: Charset = StandardCharsets.UTF_8): Try[Unit] =
    buildAndSend(
      points
        .map(_.serialize)
        .mkString("\n")
        .getBytes(charset)
    )

  def close(): Unit = socket.close()
} 
Example 127
Source File: AhcJsonHandler.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.ahc.shared.handlers

import java.nio.charset.{Charset, StandardCharsets}

import com.github.fsanaulla.chronicler.core.alias.{ErrorOr, Id}
import com.github.fsanaulla.chronicler.core.components.JsonHandler
import com.github.fsanaulla.chronicler.core.encoding.encodingFromContentType
import com.github.fsanaulla.chronicler.core.implicits._
import com.github.fsanaulla.chronicler.core.jawn.RichJParser
import org.asynchttpclient.Response
import org.typelevel.jawn.ast.{JParser, JValue}

import scala.collection.JavaConverters._

private[ahc] final class AhcJsonHandler(compress: Boolean) extends JsonHandler[Id, Response] {

  
  def responseBody(response: Response): ErrorOr[JValue] = {
    val bodyBts = response.getResponseBodyAsBytes
    val encoding: Charset = Option(response.getContentType)
      .flatMap(encodingFromContentType)
      .map(Charset.forName)
      .getOrElse(StandardCharsets.UTF_8)

    val bodyStr = new String(bodyBts, encoding)

    JParser.parseFromStringEither(bodyStr)
  }

  def responseHeader(response: Response): List[(String, String)] =
    response.getHeaders
      .entries()
      .asScala
      .toList
      .map(e => e.getKey -> e.getValue)

  def responseCode(response: Response): Int =
    response.getStatusCode
} 
Example 128
Source File: ZkWatcher.scala    From Adenium   with Apache License 2.0 5 votes vote down vote up
package com.adenium.externals.zookeeper

import java.nio.charset.StandardCharsets

import com.adenium.externals.zookeeper.ZkUtil.setPersistent
import com.adenium.utils.Logger
import com.adenium.utils.May._
import org.apache.curator.framework.CuratorFramework
import org.apache.zookeeper.Watcher.Event.EventType
import org.apache.zookeeper.data.Stat
import org.apache.zookeeper.{KeeperException, WatchedEvent, Watcher}

import scala.language.reflectiveCalls



object ZkWatcher {

  def onZkChange(cur: CuratorFramework, path: String)(handler: (String, Stat) => Unit) {

    Logger.logInfo("[  watchNodeOrChidlrenChange ] == zknode : " + path)

    def watcher = new Watcher {
      def process(event: WatchedEvent) {
        Logger.logDebug("[ watchNodeOrChidlrenChange ] == callback invoked " + path + "\ttype: " + event.getType)
        event.getType match {
          case EventType.NodeDataChanged | EventType.NodeChildrenChanged => updated()
          case _ => reset()
        }
      }
    }

    def updated() {
      try {
        val stat = new Stat()
        val msg = cur.getData.storingStatIn(stat).forPath(path)

        setPersistent(cur, path, "")

        val str = new String(msg, StandardCharsets.UTF_8)

        if (str.nonEmpty) {
          state("[ Watching ] == arrived msg: " + new String(msg, StandardCharsets.UTF_8))
          handler(str, stat)
        }

        if (str.startsWith("stop zkctrl")) {
          Logger.logWarning("[ Watching ] == stopped by 'stop zkctrl' message : path =" + path)
        } else {
          /// create and attach next msg watcher
          cur.checkExists.usingWatcher(watcher).forPath(path)
        }

      } catch {
        case e: KeeperException =>
          Logger.logWarning("[ watchNodeOrChidlrenChange ] == read node: " + path + "\te: " + e)
          reset()
      }
    }

    def reset() {
      setPersistent(cur, path, "")
      updated()
    }

    reset()
  }

} 
Example 129
Source File: S3Util.scala    From redshift-fake-driver   with Apache License 2.0 5 votes vote down vote up
package jp.ne.opt.redshiftfake

import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import java.nio.charset.StandardCharsets
import java.util.zip.GZIPOutputStream

import com.amazonaws.services.s3.AmazonS3
import com.amazonaws.services.s3.model.{ObjectMetadata, PutObjectRequest}
import jp.ne.opt.redshiftfake.util.Loan.using
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream

object S3Util {

   def loadGzippedDataToS3(s3Client: AmazonS3, data: String, bucket: String, key: String): Unit = {
    val arrayOutputStream = new ByteArrayOutputStream()
    using(new GZIPOutputStream(arrayOutputStream)) (gzipOutStream => {
      gzipOutStream.write(data.getBytes(StandardCharsets.UTF_8))
    })
    val buf = arrayOutputStream.toByteArray
    val metadata = new ObjectMetadata
    metadata.setContentLength(buf.length)
    val request = new PutObjectRequest(bucket, key, new ByteArrayInputStream(buf), metadata)

    s3Client.putObject(request)
  }

   def loadBzipped2DataToS3(s3Client: AmazonS3, data: String, bucket: String, key: String): Unit = {
    val arrayOutputStream = new ByteArrayOutputStream()
    using(new BZip2CompressorOutputStream(arrayOutputStream)) (bzip2OutStream => {
      bzip2OutStream.write(data.getBytes(StandardCharsets.UTF_8))
    })
    val buf = arrayOutputStream.toByteArray
    val metadata = new ObjectMetadata
    metadata.setContentLength(buf.length)
    val request = new PutObjectRequest(bucket, key, new ByteArrayInputStream(buf), metadata)

    s3Client.putObject(request)
  }

   def loadDataToS3(s3Client: AmazonS3, data: String, bucket: String, key: String): Unit = {
    val buf = data.getBytes
    val metadata = new ObjectMetadata
    metadata.setContentLength(buf.length)
    val request = new PutObjectRequest(bucket, key, new ByteArrayInputStream(buf), metadata)

    s3Client.putObject(request)
  }
} 
Example 130
Source File: EnvelopeDecoderSpec.scala    From fs2-rabbit   with Apache License 2.0 5 votes vote down vote up
package dev.profunktor.fs2rabbit.effects

import java.nio.charset.StandardCharsets

import cats.data.EitherT
import cats.effect.{IO, SyncIO}
import cats.instances.either._
import cats.instances.try_._
import dev.profunktor.fs2rabbit.model.{AmqpEnvelope, AmqpProperties, DeliveryTag, ExchangeName, RoutingKey}
import org.scalatest.funsuite.AsyncFunSuite

import scala.util.Try

class EnvelopeDecoderSpec extends AsyncFunSuite {

  // Available instances of EnvelopeDecoder for any ApplicativeError[F, Throwable]
  EnvelopeDecoder[Either[Throwable, ?], String]
  EnvelopeDecoder[SyncIO, String]
  EnvelopeDecoder[EitherT[IO, String, ?], String]
  EnvelopeDecoder[Try, String]

  test("should decode a UTF-8 string") {
    val msg = "hello world!"
    val raw = msg.getBytes(StandardCharsets.UTF_8)

    EnvelopeDecoder[IO, String]
      .run(
        AmqpEnvelope(DeliveryTag(0L), raw, AmqpProperties.empty, ExchangeName("test"), RoutingKey("test.route"), false))
      .flatMap { result =>
        IO(assert(result == msg))
      }
      .unsafeToFuture()
  }

  test("should decode payload with the given content encoding") {
    val msg = "hello world!"
    val raw = msg.getBytes(StandardCharsets.UTF_8)

    EnvelopeDecoder[IO, String]
      .run(
        AmqpEnvelope(DeliveryTag(0L),
                     raw,
                     AmqpProperties.empty.copy(contentEncoding = Some("UTF-16")),
                     ExchangeName("test"),
                     RoutingKey("test.route"),
                     false))
      .flatMap { result =>
        IO(assert(result != msg))
      }
      .unsafeToFuture()
  }

  test("should decode a UTF-16 string into a UTF-8 (default)") {
    val msg = "hello world!"
    val raw = msg.getBytes(StandardCharsets.UTF_16)

    EnvelopeDecoder[IO, String]
      .run(
        AmqpEnvelope(DeliveryTag(0L), raw, AmqpProperties.empty, ExchangeName("test"), RoutingKey("test.route"), false))
      .flatMap { result =>
        IO(assert(result != msg))
      }
      .unsafeToFuture()
  }

} 
Example 131
Source File: MiscSpec.scala    From borer   with Mozilla Public License 2.0 5 votes vote down vote up
package io.bullet.borer.derivation

import java.nio.charset.StandardCharsets

import io.bullet.borer._
import utest._

object MiscSpec extends AbstractBorerSpec {

  def encode[T: Encoder](value: T): String   = Json.encode(value).toUtf8String
  def decode[T: Decoder](encoded: String): T = Json.decode(encoded getBytes StandardCharsets.UTF_8).to[T].value

  final case class CaseClass3(abc: Int, d: String, efghi: Boolean)

  final case class CaseClassT[T](key: String, value: T)

  final case class CaseClass1(flag: Boolean)

  object CaseClass1 {
    def apply(): CaseClass1 = new CaseClass1(false)
  }

  final case class CaseClass1T[T](value: T)

  val tests = Tests {

    "Case Class with 3 members" - {
      implicit val codec = ArrayBasedCodecs.deriveCodec[CaseClass3]
      roundTrip("""[42,"",true]""", CaseClass3(42, "", efghi = true))
    }

    "Generic Case Class with fixed codec" - {
      implicit val codec = ArrayBasedCodecs.deriveCodec[CaseClassT[Double]]
      roundTrip("""["foo",18.1]""", CaseClassT("foo", 18.1))
    }

    "Generic Case Class with generic codec" - {
      implicit def codec[T: Encoder: Decoder]: Codec[CaseClassT[T]] = ArrayBasedCodecs.deriveCodec[CaseClassT[T]]
      roundTrip("""["foo",18.1]""", CaseClassT("foo", 18.1))
    }

    "Unary Case Class with custom apply" - {
      implicit val codec = ArrayBasedCodecs.deriveCodec[CaseClass1]
      roundTrip("false", CaseClass1(false))
    }

    "Generic unary Case Class" - {
      implicit def codec[T: Encoder: Decoder]: Codec[CaseClass1T[T]] = ArrayBasedCodecs.deriveCodec[CaseClass1T[T]]
      roundTrip(""""foo"""", CaseClass1T("foo"))
    }

    "Local Case Class" - {
      case class Box(id: String)
      implicit val boxCodec = ArrayBasedCodecs.deriveCodec[Box]
      roundTrip(""""abc"""", Box("abc"))
    }

    "Recursive Case Class" - {
      case class Box(x: Option[Box] = None)
      implicit lazy val codec: Codec[Box] = ArrayBasedCodecs.deriveCodec[Box]
      roundTrip("""[[[]]]""", Box(Some(Box(Some(Box())))))
    }

    "CompactMapBasedCodecs" - {

      "unary" - {
        implicit val codec = CompactMapBasedCodecs.deriveCodec[CaseClass1]
        roundTrip("false", CaseClass1(false))
      }

      "non-unary" - {
        implicit val codec = CompactMapBasedCodecs.deriveCodec[CaseClass3]
        roundTrip("""{"abc":42,"d":"","efghi":true}""", CaseClass3(42, "", efghi = true))
      }
    }
  }
} 
Example 132
Source File: AkkaJsonSuiteSpec.scala    From borer   with Mozilla Public License 2.0 5 votes vote down vote up
package io.bullet.borer.compat

import java.nio.charset.StandardCharsets

import _root_.akka.util.ByteString
import io.bullet.borer._

object AkkaJsonSuiteSpec extends AbstractJsonSuiteSpec {
  import akka._

  def encode[T: Encoder](value: T): String =
    Json.encode(value).to[ByteString].result.utf8String

  def decode[T: Decoder](encoded: String): T =
    Json
      .decode(ByteString(encoded getBytes StandardCharsets.UTF_8))
      .withConfig(Json.DecodingConfig.default.copy(maxNumberAbsExponent = 300))
      .to[T]
      .value
} 
Example 133
Source File: ScodecJsonSuiteSpec.scala    From borer   with Mozilla Public License 2.0 5 votes vote down vote up
package io.bullet.borer.compat

import java.nio.charset.StandardCharsets
import _root_.scodec.bits.ByteVector
import io.bullet.borer._

object ScodecJsonSuiteSpec extends AbstractJsonSuiteSpec {
  import scodec._

  def encode[T: Encoder](value: T): String =
    Json.encode(value).to[ByteVector].result.decodeUtf8.getOrElse("")

  def decode[T: Decoder](encoded: String): T =
    Json
      .decode(ByteVector(encoded getBytes StandardCharsets.UTF_8))
      .withConfig(Json.DecodingConfig.default.copy(maxNumberAbsExponent = 300))
      .to[T]
      .value
} 
Example 134
Source File: ByteBufferJsonSuiteSpec.scala    From borer   with Mozilla Public License 2.0 5 votes vote down vote up
package io.bullet.borer

import java.nio.charset.StandardCharsets
import java.nio.ByteBuffer

object ByteBufferJsonSuiteSpec extends AbstractJsonSuiteSpec {

  def encode[T: Encoder](value: T): String = {
    val byteBuffer = Json.encode(value).withConfig(Json.EncodingConfig(bufferSize = 8)).toByteBuffer
    new String(ByteAccess.ForByteBuffer.toByteArray(byteBuffer), StandardCharsets.UTF_8)
  }

  def decode[T: Decoder](encoded: String): T =
    Json
      .decode(ByteBuffer.wrap(encoded getBytes StandardCharsets.UTF_8))
      .withConfig(Json.DecodingConfig.default.copy(maxNumberAbsExponent = 300))
      .to[T]
      .value
} 
Example 135
Source File: FileSpec.scala    From borer   with Mozilla Public License 2.0 5 votes vote down vote up
package io.bullet.borer

import java.io.File
import java.nio.charset.StandardCharsets
import java.nio.file.Files

import utest._

import scala.io.Source

object FileSpec extends TestSuite {

  final case class Foo(
      string: String = "This is a really long text for testing writing to a file",
      int: Int = 42,
      double: Double = 0.0)

  implicit val fooCodec = Codec(Encoder.from(Foo.unapply _), Decoder.from(Foo.apply _))

  val tests = Tests {

    "small file" - {
      val tempFile = File.createTempFile("borer", ".json")
      try {
        Json.encode(Foo()).to(tempFile).result ==> tempFile

        new String(Files.readAllBytes(tempFile.toPath), "UTF8") ==>
        """["This is a really long text for testing writing to a file",42,0.0]"""

        Json.decode(tempFile).to[Foo].value ==> Foo()

      } finally tempFile.delete()
    }

    "large file" - {
      val testFileBytes = Source.fromResource("large.json").mkString.getBytes(StandardCharsets.UTF_8)
      val config = Json.DecodingConfig.default
        .copy(maxNumberMantissaDigits = 99, maxNumberAbsExponent = 300, initialCharbufferSize = 8)
      val dom = Json.decode(testFileBytes).withConfig(config).to[Dom.Element].value

      val tempFile = File.createTempFile("borer", ".json")
      try {
        Json.encode(dom).to(tempFile).result ==> tempFile

        Json
          .decode(Input.fromFile(tempFile, bufferSize = 256))
          .withConfig(config)
          .to[Dom.Element]
          .value ==> dom

      } finally tempFile.delete()
    }
  }
} 
Example 136
Source File: FromInputIteratorFileSpec.scala    From borer   with Mozilla Public License 2.0 5 votes vote down vote up
package io.bullet.borer

import java.nio.charset.StandardCharsets

import utest._

import scala.io.Source

object FromInputIteratorFileSpec extends TestSuite with TestUtils {

  val testFileBytes = Source.fromResource("large.json").mkString.getBytes(StandardCharsets.UTF_8)

  val config = Json.DecodingConfig.default
    .copy(maxNumberMantissaDigits = 99, maxNumberAbsExponent = 300, initialCharbufferSize = 8)
  val dom = Json.decode(testFileBytes).withConfig(config).to[Dom.Element].value

  val tests = Tests {

    "test file" - {
      Json
        .decode(chunkedInput(3, 2, 1, 0, 100, 71))
        .withConfig(config)
        .to[Dom.Element]
        .value ==> dom
    }
  }

  def chunkedInput(chunkSizes: Int*): Iterator[Array[Byte]] =
    chunkIterator(testFileBytes, Iterator.continually(0).flatMap(_ => chunkSizes))

  def chunkIterator(remainingBytes: Array[Byte], chunkSizes: Iterator[Int]): Iterator[Array[Byte]] = {
    val len = chunkSizes.next()
    if (remainingBytes.length <= len) Iterator.single(remainingBytes)
    else Iterator.single(remainingBytes.take(len)) ++ chunkIterator(remainingBytes.drop(len), chunkSizes)
  }

  final class FFPadder(input: Input[Array[Byte]]) extends Input.PaddingProvider[Array[Byte]] {

    def padByte(): Byte = -1

    def padDoubleByte(remaining: Int): Char =
      if (remaining < 1) '\uffff' else ((input.readByte() << 8) | 0xFF).toChar

    def padQuadByte(remaining: Int): Int = {
      import input.{readByte => byte, readDoubleByteBigEndian => doub}
      // format: OFF
      remaining match {
        case 0 =>                                            0xFFFFFFFF
        case 1 =>                         (byte()   << 24) | 0xFFFFFF
        case 2 => (doub() << 16)                           | 0xFFFF
        case 3 => (doub() << 16) | ((byte() & 0xFF) <<  8) | 0xFF
        case _ => throw new IllegalStateException
      }
      // format: ON
    }

    def padOctaByte(remaining: Int): Long = {
      import input.{readByte => byte, readDoubleByteBigEndian => doub, readQuadByteBigEndian => quad}
      // format: OFF
      remaining match {
        case 0 =>                                                                                 0XFFFFFFFFFFFFFFFFL
        case 1 =>                                                      (byte().toLong    << 56) | 0XFFFFFFFFFFFFFFL
        case 2 =>                         (doub().toLong      << 48)                            | 0XFFFFFFFFFFFFL
        case 3 =>                         (doub().toLong      << 48) | ((byte() & 0XFFL) << 40) | 0XFFFFFFFFFFL
        case 4 => (quad().toLong << 32) |                                                         0XFFFFFFFFL
        case 5 => (quad().toLong << 32) |                              ((byte() & 0XFFL) << 24) | 0XFFFFFFL
        case 6 => (quad().toLong << 32) | ((doub() & 0XFFFFL) << 16) |                            0XFFFFL
        case 7 => (quad().toLong << 32) | ((doub() & 0XFFFFL) << 16) | ((byte() & 0XFFL) <<  8) | 0XFFL
        case _ => throw new IllegalStateException
      }
      // format: ON
    }

    def padBytes(rest: Array[Byte], missing: Long) =
      ByteAccess.ForByteArray.concat(rest, Array.fill[Byte](missing.toInt)(-1))
  }
} 
Example 137
Source File: FileChecks.scala    From docspell   with GNU General Public License v3.0 5 votes vote down vote up
package docspell.convert

import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Path}

import cats.data.Kleisli
import cats.effect.IO
import fs2.{Pipe, Stream}
import docspell.common.MimeType
import docspell.convert.ConversionResult.Handler
import docspell.files.TikaMimetype

trait FileChecks {

  implicit class FileCheckOps(p: Path) {

    def isNonEmpty: Boolean =
      Files.exists(p) && Files.size(p) > 0

    def isType(mime: MimeType): Boolean =
      TikaMimetype.detect[IO](p).map(_ == mime).unsafeRunSync

    def isPDF: Boolean =
      isType(MimeType.pdf)

    def isPlainText: Boolean =
      isType(MimeType.text("plain"))
  }

  def storeFile(file: Path): Pipe[IO, Byte, Path] =
    in => Stream.eval(in.compile.to(Array).flatMap(bytes => IO(Files.write(file, bytes))))

  def storePdfHandler(file: Path): Handler[IO, Path] =
    storePdfTxtHandler(file, file.resolveSibling("unexpected.txt")).map(_._1)

  def storePdfTxtHandler(filePdf: Path, fileTxt: Path): Handler[IO, (Path, Path)] =
    Kleisli({
      case ConversionResult.SuccessPdfTxt(pdf, txt) =>
        for {
          pout <- pdf.through(storeFile(filePdf)).compile.lastOrError
          str  <- txt
          tout <- IO(Files.write(fileTxt, str.getBytes(StandardCharsets.UTF_8)))
        } yield (pout, tout)

      case ConversionResult.SuccessPdf(pdf) =>
        pdf.through(storeFile(filePdf)).compile.lastOrError.map(p => (p, fileTxt))

      case ConversionResult.Failure(ex) =>
        throw new Exception(s"Unexpected result (failure: ${ex.getMessage})", ex)

      case cr =>
        throw new Exception(s"Unexpected result: $cr")
    })

  def commandExists(cmd: String): Boolean =
    Runtime.getRuntime.exec(Array("which", cmd)).waitFor() == 0

} 
Example 138
Source File: Binary.scala    From docspell   with GNU General Public License v3.0 5 votes vote down vote up
package docspell.common

import java.nio.charset.Charset
import java.nio.charset.StandardCharsets

import cats.effect._
import fs2.{Chunk, Pipe, Stream}

import scodec.bits.ByteVector

final case class Binary[F[_]](name: String, mime: MimeType, data: Stream[F, Byte]) {

  def withMime(mime: MimeType): Binary[F] =
    copy(mime = mime)
}

object Binary {

  def apply[F[_]](name: String, data: Stream[F, Byte]): Binary[F] =
    Binary[F](name, MimeType.octetStream, data)

  def utf8[F[_]](name: String, content: String): Binary[F] =
    Binary[F](
      name,
      MimeType.octetStream,
      Stream.emit(content).through(fs2.text.utf8Encode)
    )

  def text[F[_]](name: String, content: String): Binary[F] =
    utf8(name, content).withMime(MimeType.plain.withUtf8Charset)

  def text[F[_]](name: String, content: ByteVector, cs: Charset): Binary[F] =
    Binary(name, MimeType.plain.withCharset(cs), Stream.chunk(Chunk.byteVector(content)))

  def html[F[_]](name: String, content: String): Binary[F] =
    utf8(name, content).withMime(MimeType.html.withUtf8Charset)

  def html[F[_]](name: String, content: ByteVector, cs: Charset): Binary[F] =
    Binary(name, MimeType.html.withCharset(cs), Stream.chunk(Chunk.byteVector(content)))

  def decode[F[_]](cs: Charset): Pipe[F, Byte, String] =
    if (cs == StandardCharsets.UTF_8)
      fs2.text.utf8Decode
    else
      util.decode[F](cs)

  def loadAllBytes[F[_]: Sync](data: Stream[F, Byte]): F[ByteVector] =
    data.chunks.map(_.toByteVector).compile.fold(ByteVector.empty)((r, e) => r ++ e)

  // This is a copy from org.http4s.util
  // Http4s is licensed under the Apache License 2.0
  private object util {
    import fs2._
    import java.nio._

    private val utf8Bom: Chunk[Byte] = Chunk(0xef.toByte, 0xbb.toByte, 0xbf.toByte)

    def decode[F[_]](charset: Charset): Pipe[F, Byte, String] = {
      val decoder         = charset.newDecoder
      val maxCharsPerByte = math.ceil(decoder.maxCharsPerByte().toDouble).toInt
      val avgBytesPerChar = math.ceil(1.0 / decoder.averageCharsPerByte().toDouble).toInt
      val charBufferSize  = 128

      _.repeatPull[String] {
        _.unconsN(charBufferSize * avgBytesPerChar, allowFewer = true).flatMap {
          case None =>
            val charBuffer = CharBuffer.allocate(1)
            decoder.decode(ByteBuffer.allocate(0), charBuffer, true)
            decoder.flush(charBuffer)
            val outputString = charBuffer.flip().toString
            if (outputString.isEmpty) Pull.done.as(None)
            else Pull.output1(outputString).as(None)
          case Some((chunk, stream)) =>
            if (chunk.nonEmpty) {
              val chunkWithoutBom = skipByteOrderMark(chunk)
              val bytes           = chunkWithoutBom.toArray
              val byteBuffer      = ByteBuffer.wrap(bytes)
              val charBuffer      = CharBuffer.allocate(bytes.length * maxCharsPerByte)
              decoder.decode(byteBuffer, charBuffer, false)
              val nextStream = stream.consChunk(Chunk.byteBuffer(byteBuffer.slice()))
              Pull.output1(charBuffer.flip().toString).as(Some(nextStream))
            } else
              Pull.output(Chunk.empty[String]).as(Some(stream))
        }
      }
    }

    private def skipByteOrderMark[F[_]](chunk: Chunk[Byte]): Chunk[Byte] =
      if (chunk.size >= 3 && chunk.take(3) == utf8Bom)
        chunk.drop(3)
      else chunk

  }
} 
Example 139
Source File: SearchFilters.scala    From franklin   with Apache License 2.0 5 votes vote down vote up
package com.azavea.franklin.database

import cats.implicits._
import com.azavea.franklin.api.schemas.bboxToString
import com.azavea.franklin.datamodel.{PaginationToken, Query}
import com.azavea.stac4s.{Bbox, TemporalExtent}
import eu.timepit.refined.types.numeric.NonNegInt
import geotrellis.vector.Geometry
import geotrellis.vector.{io => _, _}
import io.circe.generic.semiauto._
import io.circe.refined._
import io.circe.{Decoder, HCursor}

import java.net.URLEncoder
import java.nio.charset.StandardCharsets

final case class SearchFilters(
    bbox: Option[Bbox],
    datetime: Option[TemporalExtent],
    intersects: Option[Geometry],
    collections: List[String],
    items: List[String],
    limit: Option[NonNegInt],
    query: Map[String, List[Query]],
    next: Option[PaginationToken]
) {

  def asQueryParameters: String = {

    val bboxQP =
      bbox map { box => s"bbox=${bboxToString(box)}" }
    val datetimeQP =
      datetime map { tempExtent =>
        s"datetime=${SearchFilters.encodeString(temporalExtentToString(tempExtent))}"
      }
    val collectionsQP = collections.toNel map { _ =>
      s"""collections=${SearchFilters.encodeString(collections.mkString(","))}"""
    }
    val itemsQP = items.toNel map { _ =>
      s"""ids=${SearchFilters.encodeString(items.mkString(","))}"""
    }

    List(bboxQP, datetimeQP, collectionsQP, itemsQP).flatten.mkString("&")
  }

}

object SearchFilters {

  def encodeString(s: String): String = URLEncoder.encode(s, StandardCharsets.UTF_8.toString)

  implicit val searchFilterDecoder = new Decoder[SearchFilters] {

    final def apply(c: HCursor): Decoder.Result[SearchFilters] =
      for {
        bbox              <- c.downField("bbox").as[Option[Bbox]]
        datetime          <- c.downField("datetime").as[Option[TemporalExtent]]
        intersects        <- c.downField("intersects").as[Option[Geometry]]
        collectionsOption <- c.downField("collections").as[Option[List[String]]]
        itemsOption       <- c.downField("items").as[Option[List[String]]]
        limit             <- c.downField("limit").as[Option[NonNegInt]]
        query             <- c.get[Option[Map[String, List[Query]]]]("query")
        paginationToken   <- c.get[Option[PaginationToken]]("next")
      } yield {
        SearchFilters(
          bbox,
          datetime,
          intersects,
          collectionsOption.getOrElse(List.empty),
          itemsOption.getOrElse(List.empty),
          limit,
          query getOrElse Map.empty,
          paginationToken
        )
      }
  }
  implicit val searchFilterEncoder = deriveEncoder[SearchFilters]
} 
Example 140
Source File: TileInfo.scala    From franklin   with Apache License 2.0 5 votes vote down vote up
package com.azavea.franklin.datamodel

import cats.implicits._
import com.azavea.stac4s._
import io.circe.generic.JsonCodec

import java.net.URLEncoder
import java.nio.charset.StandardCharsets

@JsonCodec
case class TileInfo(
    extent: StacExtent,
    title: Option[String],
    description: Option[String],
    tileMatrixSetLinks: List[TileMatrixSetLink],
    links: List[TileSetLink]
)

object TileInfo {

  val webMercatorQuadLink = TileMatrixSetLink(
    "WebMercatorQuad",
    "http://schemas.opengis.net/tms/1.0/json/examples/WebMercatorQuad.json"
  )

  def fromStacItem(host: String, collectionId: String, item: StacItem): Option[TileInfo] = {
    val spatialExtent = SpatialExtent(List(item.bbox))
    val stacExtent    = StacExtent(spatialExtent, Interval(List.empty))

    val cogTileLinks = item.assets.collect {
      case (key, asset) if asset._type === Some(`image/cog`) =>
        val encodedItemId = URLEncoder.encode(item.id, StandardCharsets.UTF_8.toString)
        val encodedKey    = URLEncoder.encode(key, StandardCharsets.UTF_8.toString)
        val href =
          s"$host/tiles/collections/$collectionId/items/$encodedItemId/{tileMatrixSetId}/{tileMatrix}/{tileCol}/{tileRow}/?asset=$encodedKey"
        val mediaType = Some(`image/png`)
        TileSetLink(href, StacLinkType.Item, mediaType, None, Some(true))
    }

    cogTileLinks.isEmpty match {
      case false =>
        Some(TileInfo(stacExtent, None, None, List(webMercatorQuadLink), cogTileLinks.toList))
      case _ => None
    }
  }

  def fromStacCollection(host: String, collection: StacCollection): TileInfo = {
    val mvtHref =
      s"$host/tiles/collections/${collection.id}/footprint/{tileMatrixSetId}/{tileMatrix}/{tileCol}/{tileRow}"
    val tileEndpointLink = TileSetLink(
      mvtHref,
      StacLinkType.VendorLinkType("tiles"),
      Some(VendorMediaType("application/vnd.mapbox-vector-tile")),
      Some(s"${collection.id} -- Footprints"),
      Some(true)
    )

    val tileJsonHref =
      s"$host/tiles/collections/${collection.id}/footprint/tile-json"

    val tileJsonLink = TileSetLink(
      tileJsonHref,
      StacLinkType.VendorLinkType("tile-json"),
      Some(`application/json`),
      Some(s"${collection.id} -- Footprints TileJSON"),
      Some(false)
    )

    TileInfo(
      collection.extent,
      collection.title map { title => s"$title - MVT" },
      Some("Mapbox Vector Tile representation of item footprints for this collection"),
      List(webMercatorQuadLink),
      List(
        tileEndpointLink,
        tileJsonLink
      )
    )
  }
} 
Example 141
Source File: TileRequest.scala    From franklin   with Apache License 2.0 5 votes vote down vote up
package com.azavea.franklin.datamodel

import eu.timepit.refined.types.numeric.NonNegInt
import eu.timepit.refined.types.string.NonEmptyString

import java.net.URLDecoder
import java.nio.charset.StandardCharsets

sealed trait TileMatrixRequest {
  val z: Int
  val x: Int
  val y: Int
  val collection: String

  def urlDecode(rawString: String): String =
    URLDecoder.decode(rawString, StandardCharsets.UTF_8.toString)
}

case class ItemRasterTileRequest(
    collectionRaw: String,
    itemRaw: String,
    z: Int,
    x: Int,
    y: Int,
    asset: String,
    redBandOption: Option[Int],
    greenBandOption: Option[Int],
    blueBandOption: Option[Int],
    upperQuantileOption: Option[Quantile],
    lowerQuantileOption: Option[Quantile],
    singleBand: Option[NonNegInt]
) extends TileMatrixRequest {

  val collection = urlDecode(collectionRaw)
  val item       = urlDecode(itemRaw)

  val redBand   = redBandOption.getOrElse(0)
  val greenBand = greenBandOption.getOrElse(1)
  val blueBand  = blueBandOption.getOrElse(2)

  val bands = Seq(redBand, greenBand, blueBand)

  // Because lists are 0 indexed and humans are 1 indexed we need to adjust
  val upperQuantile = upperQuantileOption.map(_.value).getOrElse(100) - 1
  val lowerQuantile = lowerQuantileOption.map(_.value).getOrElse(-1) + 1

  val zxy = (z, x, y)

}

case class MapboxVectorTileFootprintRequest(
    collectionRaw: String,
    z: Int,
    x: Int,
    y: Int,
    colorField: NonEmptyString
) extends TileMatrixRequest {
  val collection = urlDecode(collectionRaw)
} 
Example 142
Source File: FeatureExtractor.scala    From franklin   with Apache License 2.0 5 votes vote down vote up
package com.azavea.franklin.crawler

import com.azavea.stac4s.StacLink
import com.azavea.stac4s.TwoDimBbox
import com.azavea.stac4s._
import geotrellis.vector.methods.Implicits._
import geotrellis.vector.{Feature, Geometry}
import io.circe.JsonObject

import java.net.URLEncoder
import java.nio.charset.StandardCharsets
import java.util.UUID

object FeatureExtractor {

  def toItem(
      feature: Feature[Geometry, JsonObject],
      forItem: StacItem,
      forItemCollection: String,
      inCollection: StacCollection
  ): StacItem = {
    val collectionHref =
      s"/collections/${URLEncoder.encode(inCollection.id, StandardCharsets.UTF_8.toString)}"
    val encodedSourceItemCollectionId =
      URLEncoder.encode(forItemCollection, StandardCharsets.UTF_8.toString)
    val sourceItemHref =
      s"/collections/$encodedSourceItemCollectionId/items/${URLEncoder.encode(forItem.id, StandardCharsets.UTF_8.toString)}"

    val collectionLink = StacLink(
      collectionHref,
      StacLinkType.Collection,
      Some(`application/json`),
      title = Some("Source item's original collection")
    )

    val sourceItemLink = StacLink(
      sourceItemHref,
      StacLinkType.VendorLinkType("derived_from"),
      Some(`application/json`),
      None
    )

    val featureExtent = feature.geom.extent

    StacItem(
      s"${UUID.randomUUID}",
      "0.9.0",
      Nil,
      "Feature",
      feature.geom,
      TwoDimBbox(featureExtent.xmin, featureExtent.ymin, featureExtent.xmax, featureExtent.ymax),
      links = List(collectionLink, sourceItemLink),
      assets = Map.empty,
      collection = Some(inCollection.id),
      properties = feature.data
    )
  }

} 
Example 143
Source File: CollectionsServiceSpec.scala    From franklin   with Apache License 2.0 5 votes vote down vote up
package com.azavea.franklin.api.services

import cats.data.OptionT
import cats.effect.IO
import cats.implicits._
import com.azavea.franklin.Generators
import com.azavea.franklin.api.{TestClient, TestServices}
import com.azavea.franklin.database.TestDatabaseSpec
import com.azavea.franklin.datamodel.CollectionsResponse
import com.azavea.stac4s.StacCollection
import com.azavea.stac4s.testing._
import org.http4s.circe.CirceEntityDecoder._
import org.http4s.{Method, Request, Uri}
import org.specs2.{ScalaCheck, Specification}

import java.net.URLEncoder
import java.nio.charset.StandardCharsets

class CollectionsServiceSpec
    extends Specification
    with ScalaCheck
    with TestDatabaseSpec
    with Generators {
  def is = s2"""
  This specification verifies that the collections service can run without crashing

  The collections service should:
    - create and delete collections $createDeleteCollectionExpectation
    - list collections              $listCollectionsExpectation
    - get collections by id         $getCollectionsExpectation
"""

  val testServices: TestServices[IO] = new TestServices[IO](transactor)

  val testClient: TestClient[IO] =
    new TestClient[IO](testServices.collectionsService, testServices.collectionItemsService)

  def listCollectionsExpectation = prop {
    (stacCollectionA: StacCollection, stacCollectionB: StacCollection) =>
      {
        val listIO = (
          testClient.getCollectionResource(stacCollectionA),
          testClient.getCollectionResource(stacCollectionB)
        ).tupled use { _ =>
          val request = Request[IO](method = Method.GET, Uri.unsafeFromString(s"/collections"))
          (for {
            resp    <- testServices.collectionsService.routes.run(request)
            decoded <- OptionT.liftF { resp.as[CollectionsResponse] }
          } yield decoded).value
        }

        val result = listIO.unsafeRunSync.get.collections map { _.id }

        (result must contain(stacCollectionA.id)) and (result must contain(stacCollectionB.id))
      }
  }

  def getCollectionsExpectation = prop { (stacCollection: StacCollection) =>
    val fetchIO =
      testClient.getCollectionResource(stacCollection) use { collection =>
        val encodedId = URLEncoder.encode(collection.id, StandardCharsets.UTF_8.toString)
        val request =
          Request[IO](method = Method.GET, Uri.unsafeFromString(s"/collections/$encodedId"))
        (for {
          resp    <- testServices.collectionsService.routes.run(request)
          decoded <- OptionT.liftF { resp.as[StacCollection] }
        } yield (decoded, collection)).value
      }

    val (fetched, inserted) = fetchIO.unsafeRunSync.get

    fetched must beTypedEqualTo(inserted)
  }

  // since creation / deletion is a part of the collection resource, and accurate creation is checked
  // in getCollectionsExpectation, this test just makes sure that if other tests are failing, it's
  // not because create/delete are broken
  def createDeleteCollectionExpectation = prop { (stacCollection: StacCollection) =>
    (testClient
      .getCollectionResource(stacCollection) use { _ => IO.unit }).unsafeRunSync must beTypedEqualTo(
      ()
    )
  }

} 
Example 144
Source File: TestClient.scala    From franklin   with Apache License 2.0 5 votes vote down vote up
package com.azavea.franklin.api

import cats.effect.Resource
import cats.effect.Sync
import cats.implicits._
import com.azavea.franklin.api.services.{CollectionItemsService, CollectionsService}
import com.azavea.stac4s.{StacCollection, StacItem}
import eu.timepit.refined.auto._
import io.circe.syntax._
import org.http4s.circe.CirceEntityDecoder._
import org.http4s.circe.CirceEntityEncoder._
import org.http4s.implicits._
import org.http4s.{Method, Request, Uri}

import java.net.URLEncoder
import java.nio.charset.StandardCharsets


class TestClient[F[_]: Sync](
    collectionsService: CollectionsService[F],
    collectionItemsService: CollectionItemsService[F]
) {

  private def createCollection(collection: StacCollection): F[StacCollection] =
    collectionsService.routes.orNotFound.run(
      Request(
        method = Method.POST,
        uri = Uri.unsafeFromString("/collections")
      ).withEntity(collection.asJson)
    ) flatMap { _.as[StacCollection] }

  private def deleteCollection(collection: StacCollection): F[Unit] = {
    val encodedCollectionId = URLEncoder.encode(collection.id, StandardCharsets.UTF_8.toString)
    collectionsService.routes.orNotFound
      .run(
        Request(
          method = Method.DELETE,
          uri = Uri.unsafeFromString(s"/collections/$encodedCollectionId")
        )
      )
      .void
  }

  private def createItemInCollection(collection: StacCollection, item: StacItem): F[StacItem] = {
    val encodedCollectionId = URLEncoder.encode(collection.id, StandardCharsets.UTF_8.toString)
    collectionItemsService.routes.orNotFound.run(
      Request(
        method = Method.POST,
        uri = Uri.unsafeFromString(s"/collections/$encodedCollectionId/items")
      ).withEntity(item)
    ) flatMap { _.as[StacItem] }
  }

  private def deleteItemInCollection(collection: StacCollection, item: StacItem): F[Unit] = {
    val encodedCollectionId = URLEncoder.encode(collection.id, StandardCharsets.UTF_8.toString)
    val encodedItemId       = URLEncoder.encode(item.id, StandardCharsets.UTF_8.toString)
    collectionItemsService.routes.orNotFound
      .run(
        Request(
          method = Method.DELETE,
          uri = Uri.unsafeFromString(s"/collections/$encodedCollectionId/items/$encodedItemId")
        )
      )
      .void
  }

  def getItemResource(collection: StacCollection, item: StacItem): Resource[F, StacItem] =
    Resource.make(createItemInCollection(collection, item.copy(collection = Some(collection.id))))(
      item => deleteItemInCollection(collection, item)
    )

  def getCollectionResource(collection: StacCollection): Resource[F, StacCollection] =
    Resource.make(createCollection(collection))(collection => deleteCollection(collection))

  def getCollectionItemResource(
      item: StacItem,
      collection: StacCollection
  ): Resource[F, (StacItem, StacCollection)] =
    (getItemResource(collection, item), getCollectionResource(collection)).tupled
} 
Example 145
Source File: MustacheTemplates.scala    From fintrospect   with Apache License 2.0 5 votes vote down vote up
package io.fintrospect.templating

import java.io.{ByteArrayOutputStream, File, OutputStreamWriter}
import java.nio.charset.StandardCharsets

import com.github.mustachejava.resolver.{DefaultResolver, FileSystemResolver}
import com.github.mustachejava.{DefaultMustacheFactory, Mustache}
import com.twitter.io.Buf

object MustacheTemplates extends Templates {

  private def render(view: View, mustache: Mustache): Buf = {
    val outputStream = new ByteArrayOutputStream(4096)
    val writer = new OutputStreamWriter(outputStream, StandardCharsets.UTF_8)
    try {
      mustache.execute(writer, view)
    } finally {
      writer.close()
    }

    Buf.ByteArray.Owned(outputStream.toByteArray)
  }

  def CachingClasspath(baseClasspathPackage: String = "."): TemplateRenderer = new TemplateRenderer {

    private val factory = new DefaultMustacheFactory(new DefaultResolver(baseClasspathPackage)) {
      setObjectHandler(new ScalaObjectHandler)
    }

    def toBuf(view: View): Buf = render(view, factory.compile(view.template + ".mustache"))
  }

  def Caching(baseTemplateDir: String): TemplateRenderer = new TemplateRenderer {

    private val factory = new DefaultMustacheFactory(new FileSystemResolver(new File(baseTemplateDir))) {
      setObjectHandler(new ScalaObjectHandler)
    }

    def toBuf(view: View): Buf = render(view, factory.compile(view.template + ".mustache"))
  }

  def HotReload(baseTemplateDir: String = "."): TemplateRenderer = new TemplateRenderer {

    class WipeableMustacheFactory extends DefaultMustacheFactory(new FileSystemResolver(new File(baseTemplateDir))) {
      setObjectHandler(new ScalaObjectHandler)
    }

    def toBuf(view: View): Buf = render(view, new WipeableMustacheFactory().compile(view.template + ".mustache"))
  }
} 
Example 146
Source File: IntegrationTests.scala    From scala-typed-holes   with Apache License 2.0 5 votes vote down vote up
package holes

import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Path, Paths}

import org.apache.commons.io.FileUtils
import org.scalatest.{BeforeAndAfterAll, FunSpec}

import scala.sys.process._

class IntegrationTests extends FunSpec with BeforeAndAfterAll {

  private val pluginJar = sys.props("plugin.jar")
  private val scalacClasspath = sys.props("scalac.classpath")
  private val targetDir = Paths.get("target/integration-tests")

  private def runScalac(args: String*): String = {
    val buf = new StringBuffer
    val logger = new ProcessLogger {
      override def out(s: => String): Unit = { buf.append(s); buf.append('\n') }
      override def err(s: => String): Unit = { buf.append(s); buf.append('\n') }
      override def buffer[T](f: => T): T = f
    }

    Process(
      "java"
        :: "-Dscala.usejavacp=true"
        :: "-cp" :: scalacClasspath
        :: "scala.tools.nsc.Main"
        :: args.toList
    ).!(logger)

    buf.toString
  }

  private def compileFile(path: Path): String =
    runScalac(
      s"-Xplugin:$pluginJar",
      "-P:typed-holes:log-level:info",
      "-d", targetDir.toString,
      path.toString
    )

  override def beforeAll(): Unit = {
    println(runScalac("-version"))

    FileUtils.deleteQuietly(targetDir.toFile)
    Files.createDirectories(targetDir)
  }

  describe("produces the expected output") {
    for (scenario <- Paths.get("src/test/resources").toFile.listFiles().toList.map(_.toPath)) {
      it(scenario.getFileName.toString) {
        val expected =
          new String(Files.readAllBytes(scenario.resolve("expected.txt")), StandardCharsets.UTF_8).trim
        val actual =
          compileFile(scenario.resolve("input.scala")).trim

        if (actual != expected) {
          println("Compiler output:")
          println("=====")
          println(actual)
          println("=====")
        }
        assert(actual === expected)
      }
    }
  }

} 
Example 147
Source File: WordCountTest.scala    From spark-example-project   with Apache License 2.0 5 votes vote down vote up
package me.soulmachine.spark

import java.io.File
import java.nio.charset.StandardCharsets
import java.nio.file.{Paths, Files}

import org.scalatest._

import scala.io.Source


class WordCountTest extends FlatSpec with Matchers {
  "A WordCount job" should  "count words correctly" in {
    val tempDir = Files.createTempDirectory(null)
    println(tempDir.toAbsolutePath)

    val inputFile = new File(tempDir.toAbsolutePath.toString, "input.txt")
    Files.write(Paths.get(inputFile.getAbsolutePath),
      "hack hack hack and hack".getBytes(StandardCharsets.UTF_8))
    inputFile.deleteOnExit()

    val outputDir = new File(tempDir.toAbsolutePath.toString, "output").getAbsolutePath

    WordCount.execute(
      master = Some("local"),
      input   = inputFile.getAbsolutePath,
      output  = outputDir
    )

    val outputFile = new File(outputDir, "part-00000")
    val actual = Source.fromFile(outputFile, "UTF-8").mkString
    // delete the temporary folder
    new ProcessBuilder("rm","-rf", tempDir.toAbsolutePath.toString).start().waitFor()

    assert(actual === "(hack,4)\n(and,1)\n")
  }
} 
Example 148
Source File: PlantumlHelpers.scala    From hepek   with Apache License 2.0 5 votes vote down vote up
package ba.sake.hepek.plantuml

import java.io.ByteArrayOutputStream
import java.nio.charset.StandardCharsets
import ba.sake.hepek.scalatags.all._
import ba.sake.hepek._
import net.sourceforge.plantuml.SourceStringReader
import net.sourceforge.plantuml.FileFormatOption
import net.sourceforge.plantuml.FileFormat

object PlantumlHelpers extends PlantumlHelpers

trait PlantumlHelpers {

  def plantSvg(str: String): Frag = {
    val reader    = new SourceStringReader(str)
    val os        = new ByteArrayOutputStream()
    val desc      = reader.generateImage(os, new FileFormatOption(FileFormat.SVG))
    val resultSvg = new String(os.toByteArray(), StandardCharsets.UTF_8.name())
    os.close()
    raw(resultSvg)
  }
} 
Example 149
Source File: Bytes.scala    From scala-commons   with MIT License 5 votes vote down vote up
package com.avsystem.commons
package misc

import java.nio.charset.StandardCharsets

import com.avsystem.commons.serialization.Base64


final case class Bytes(bytes: Array[Byte]) {
  def hex: String = bytes.iterator.map(b => f"${b & 0xFF}%02X").mkString

  def base64: String = base64()
  def base64(withoutPadding: Boolean = false, urlSafe: Boolean = false): String =
    Base64.encode(bytes, withoutPadding, urlSafe)

  override def hashCode(): Int = java.util.Arrays.hashCode(bytes)
  override def equals(obj: Any): Boolean = obj match {
    case Bytes(obytes) => java.util.Arrays.equals(bytes, obytes)
    case _ => false
  }
  override def toString: String = hex
}
object Bytes {
  def apply(str: String): Bytes = Bytes(str.getBytes(StandardCharsets.UTF_8))
  def fromHex(hex: String): Bytes = Bytes(hex.grouped(2).map(Integer.parseInt(_, 16).toByte).toArray)
  def fromBase64(base64: String, urlSafe: Boolean = false): Bytes = Bytes(Base64.decode(base64, urlSafe))
} 
Example 150
Source File: ScastieFileUtil.scala    From scastie   with Apache License 2.0 5 votes vote down vote up
package com.olegych.scastie.util

import java.nio.file._
import java.lang.management.ManagementFactory
import java.nio.charset.StandardCharsets

object ScastieFileUtil {
  def slurp(src: Path): Option[String] = {
    if (Files.exists(src)) Some(Files.readAllLines(src).toArray.mkString("\n"))
    else None
  }

  def write(dst: Path, content: String, truncate: Boolean = false, append: Boolean = false): Unit = {
    if (!Files.exists(dst)) {
      Files.write(dst, content.getBytes, StandardOpenOption.CREATE_NEW)
      ()
    } else if (truncate) {
      Files.write(dst, content.getBytes, StandardOpenOption.TRUNCATE_EXISTING)
      ()
    } else if (append) {
      Files.write(dst, content.getBytes, StandardOpenOption.APPEND)
      ()
    }
  }

  def writeRunningPid(): String = {
    val pid = ManagementFactory.getRuntimeMXBean.getName.split("@").head
    val pidFile = Paths.get("RUNNING_PID")
    Files.write(pidFile, pid.getBytes(StandardCharsets.UTF_8))
    sys.addShutdownHook {
      Files.delete(pidFile)
    }
    pid
  }
} 
Example 151
Source File: MQTTTestUtils.scala    From bahir   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.streaming.mqtt

import java.net.{ServerSocket, URI}
import java.nio.charset.StandardCharsets

import scala.language.postfixOps

import org.apache.activemq.broker.{BrokerService, TransportConnector}
import org.apache.activemq.usage.SystemUsage
import org.apache.commons.lang3.RandomUtils
import org.eclipse.paho.client.mqttv3._
import org.eclipse.paho.client.mqttv3.persist.MqttDefaultFilePersistence

import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.util.Utils


private[mqtt] class MQTTTestUtils extends Logging {

  private val persistenceDir = Utils.createTempDir()
  private val brokerHost = "localhost"
  private val brokerPort = findFreePort()

  private var broker: BrokerService = _
  private var systemUsage: SystemUsage = _
  private var connector: TransportConnector = _

  def brokerUri: String = {
    s"$brokerHost:$brokerPort"
  }

  def setup(): Unit = {
    broker = new BrokerService()
    broker.setDataDirectoryFile(Utils.createTempDir())
    broker.getSystemUsage().setSendFailIfNoSpace(false)
    systemUsage = broker.getSystemUsage()
    systemUsage.getStoreUsage().setLimit(1024L * 1024 * 256);  // 256 MB (default: 100 GB)
    systemUsage.getTempUsage().setLimit(1024L * 1024 * 128);   // 128 MB (default: 50 GB)
    connector = new TransportConnector()
    connector.setName("mqtt")
    connector.setUri(new URI("mqtt://" + brokerUri))
    broker.addConnector(connector)
    broker.start()
  }

  def teardown(): Unit = {
    if (broker != null) {
      broker.stop()
      broker = null
    }
    if (connector != null) {
      connector.stop()
      connector = null
    }
    Utils.deleteRecursively(persistenceDir)
  }

  private def findFreePort(): Int = {
    val candidatePort = RandomUtils.nextInt(1024, 65536)
    Utils.startServiceOnPort(candidatePort, (trialPort: Int) => {
      val socket = new ServerSocket(trialPort)
      socket.close()
      (null, trialPort)
    }, new SparkConf())._2
  }

  def publishData(topic: String, data: String): Unit = {
    var client: MqttClient = null
    try {
      val persistence = new MqttDefaultFilePersistence(persistenceDir.getAbsolutePath)
      client = new MqttClient("tcp://" + brokerUri, MqttClient.generateClientId(), persistence)
      client.connect()
      if (client.isConnected) {
        val msgTopic = client.getTopic(topic)
        val message = new MqttMessage(data.getBytes(StandardCharsets.UTF_8))
        message.setQos(1)
        message.setRetained(true)

        for (i <- 0 to 10) {
          try {
            msgTopic.publish(message)
          } catch {
            case e: MqttException if e.getReasonCode == MqttException.REASON_CODE_MAX_INFLIGHT =>
              // wait for Spark streaming to consume something from the message queue
              Thread.sleep(50)
          }
        }
      }
    } finally {
      if (client != null) {
        client.disconnect()
        client.close()
        client = null
      }
    }
  }

} 
Example 152
Source File: DocHelper.scala    From ScalaClean   with Apache License 2.0 5 votes vote down vote up
package scalafix.scalaclean.cli

import java.nio.charset.StandardCharsets

import scalafix.scalaclean.FixUtils
import scalafix.v1
import scalafix.v1.SyntacticDocument

import scala.meta.{AbsolutePath, RelativePath}
import scala.meta.inputs.Input
import scala.meta.internal.io.FileIO
import scala.meta.internal.symtab.SymbolTable

object DocHelper {
  def readSemanticDoc(
                       classLoader: ClassLoader,
                       symtab: SymbolTable,
                       absSourcePath: AbsolutePath,
                       buildBase: AbsolutePath,
                       targetFile: RelativePath
                     ): v1.SemanticDocument = {

    val input = Input.VirtualFile(targetFile.toString, FileIO.slurp(absSourcePath, StandardCharsets.UTF_8))
    val doc = SyntacticDocument.fromInput(input)

    val semanticDBPath = absSourcePath.toRelative(buildBase)

    FixUtils.fromPath(
      doc,
      semanticDBPath,
      classLoader,
      symtab)

  }
} 
Example 153
Source File: TestAISStreams.scala    From incubator-daffodil   with Apache License 2.0 5 votes vote down vote up
package org.apache.daffodil.layers

import org.junit.Assert._
import java.io._
import org.junit.Test
import org.apache.daffodil.io.RegexLimitingStream
import java.nio.charset.StandardCharsets
import org.apache.daffodil.util.Misc
import org.apache.commons.io.IOUtils
import org.apache.daffodil.io.LayerBoundaryMarkInsertingJavaOutputStream


  @Test def testAISPayloadArmoringDecode() = {
    val dataString = "14eGL:@000o8oQ'LMjOchmG@08HK,"
    val bba = new ByteArrayInputStream(dataString.getBytes(iso8859))
    //
    // regex is ",0*"
    //
    val rls = new RegexLimitingStream(bba, ",", ",", iso8859)
    val aas = new AISPayloadArmoringInputStream(rls)

    val baos = new ByteArrayOutputStream()
    var c: Int = -1
    while ({
      c = aas.read()
      c != -1
    }) {
      baos.write(c)
    }
    baos.close()
    val result = baos.toByteArray()
    val expected = Misc.bits2Bytes("000001 000100 101101 010111 011100 001010 010000 000000 000000 000000 110111 001000 110111 100001 101000 011100 011101 110010 011111 101011 110000 110101 010111 010000 000000 001000 011000 011011 ")
    assertEquals(expected.length, result.length)
    val pairs = expected zip result
    pairs.foreach {
      case (exp, act) =>
        assertEquals(exp, act)
    }
  }

  @Test def testAISPayloadArmoringEncode() = {
    val dataBytes = Misc.bits2Bytes("000001 000100 101101 010111 011100 001010 010000 000000 000000 000000 110111 001000 110111 100001 101000 011100 011101 110010 011111 101011 110000 110101 010111 010000 000000 001000 011000 011011 ")
    val bais = new ByteArrayInputStream(dataBytes)
    val baos = new ByteArrayOutputStream()
    val lbmijos = new LayerBoundaryMarkInsertingJavaOutputStream(baos, ",", iso8859)
    val aas = new AISPayloadArmoringOutputStream(lbmijos)
    IOUtils.copy(bais, aas)
    aas.close()
    val result = baos.toByteArray()
    val expected = "14eGL:@000o8oQ'LMjOchmG@08HK,".getBytes(iso8859)
    assertEquals(expected.length, result.length)
    (expected zip result).foreach {
      case (exp, act) =>
        assertEquals(exp, act)
    }
  }

} 
Example 154
Source File: TestByteSwapStream.scala    From incubator-daffodil   with Apache License 2.0 5 votes vote down vote up
package org.apache.daffodil.layers

import org.junit.Assert._
import java.io._
import org.junit.Test
import java.nio.charset.StandardCharsets

class TestByteSwapStreams {

  val iso8859 = StandardCharsets.ISO_8859_1

  val unswapped32BitData = Array[Byte](0x76, 0x54, 0x32,        0x10,
                                                   0xBA.toByte, 0x98.toByte)

  val swapped32BitData = Array[Byte](0x10, 0x32, 0x54, 0x76, 0x98.toByte, 0xBA.toByte)

  @Test def testFourByteSwapInputStream() = {
    val data = unswapped32BitData
    val bba = new ByteArrayInputStream(data)
    val bss = new ByteSwapInputStream(4, bba)

    val baos = new ByteArrayOutputStream()
    var c: Int = -1
    while ({
      c = bss.read()
      c != -1
    }) {
      baos.write(c)
    }
    baos.close()
    val result = new String(baos.toByteArray(), iso8859)
    val expected = new String(swapped32BitData, iso8859)
    assertEquals(expected, result)
  }

  @Test def testFourByteSwapOutputStream() = {
    val data = swapped32BitData
    val bba = new ByteArrayInputStream(data)

    val baos = new ByteArrayOutputStream()
    val bsos = new ByteSwapOutputStream(4, baos)
    var c: Int = -1
    while ({
      c = bba.read()
      c != -1
    }) {
      bsos.write(c)
    }
    bsos.close()
    baos.close()

    val result = new String(baos.toByteArray(), iso8859)
    val expected = new String(unswapped32BitData, iso8859)
    assertEquals(expected, result)
  }

} 
Example 155
Source File: InfluxPoint.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.cli.clients
import java.nio.charset.StandardCharsets
import java.time.Instant
import java.util.concurrent.TimeUnit.SECONDS

import ch.epfl.bluebrain.nexus.cli.config.influx.TypeConfig
import fs2.Chunk
import org.http4s.headers.`Content-Type`
import org.http4s.{EntityEncoder, MediaType}

import scala.util.Try


  def fromSparqlResults(
      results: SparqlResults,
      tc: TypeConfig
  ): List[InfluxPoint] =
    results.results.bindings.flatMap { bindings =>
      val values = tc.values.flatMap(value => bindings.get(value).map(value -> _.value)).toMap
      Option.when(values.nonEmpty) {
        val tags      = bindings.view
          .filterKeys(key => !tc.values(key) && key != tc.timestamp)
          .mapValues(_.value)
        val timestamp = bindings.get(tc.timestamp).flatMap(binding => Try(Instant.parse(binding.value)).toOption)
        InfluxPoint(tc.measurement, tags.toMap, values, timestamp)
      }
    }
} 
Example 156
Source File: AttributesComputationSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.storage.attributes

import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Paths}

import akka.actor.ActorSystem
import akka.http.scaladsl.model.ContentTypes.`text/plain(UTF-8)`
import akka.testkit.TestKit
import cats.effect.IO
import ch.epfl.bluebrain.nexus.storage.File.{Digest, FileAttributes}
import ch.epfl.bluebrain.nexus.storage.StorageError.InternalError
import ch.epfl.bluebrain.nexus.storage.utils.IOValues
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

import scala.concurrent.ExecutionContextExecutor

class AttributesComputationSpec
    extends TestKit(ActorSystem("AttributesComputationSpec"))
    with AnyWordSpecLike
    with Matchers
    with IOValues {

  implicit private val ec: ExecutionContextExecutor = system.dispatcher

  private trait Ctx {
    val path           = Files.createTempFile("storage-test", ".txt")
    val (text, digest) = "something" -> "3fc9b689459d738f8c88a3a48aa9e33542016b7a4052e001aaa536fca74813cb"
  }

  "Attributes computation computation" should {
    val computation = AttributesComputation.akkaAttributes[IO]
    val alg         = "SHA-256"

    "succeed" in new Ctx {
      Files.write(path, text.getBytes(StandardCharsets.UTF_8))
      computation(path, alg).ioValue shouldEqual FileAttributes(
        s"file://$path",
        Files.size(path),
        Digest(alg, digest),
        `text/plain(UTF-8)`
      )
      Files.deleteIfExists(path)
    }

    "fail when algorithm is wrong" in new Ctx {
      Files.write(path, text.getBytes(StandardCharsets.UTF_8))
      computation(path, "wrong-alg").failed[InternalError]
    }

    "fail when file does not exists" in new Ctx {
      computation(Paths.get("/tmp/non/existing"), alg).failed[InternalError]
    }
  }
} 
Example 157
Source File: InceptionModel.scala    From flink-tensorflow   with Apache License 2.0 5 votes vote down vote up
package org.apache.flink.contrib.tensorflow.examples.inception

import java.net.URI
import java.nio.charset.StandardCharsets

import org.apache.flink.contrib.tensorflow.examples.inception.LabelMethod._
import org.apache.flink.contrib.tensorflow.graphs.{DefaultGraphLoader, GraphLoader, GraphMethod}
import org.apache.flink.contrib.tensorflow.models.generic.GenericModel
import org.apache.flink.contrib.tensorflow.models.ModelFunction
import org.apache.flink.contrib.tensorflow.util.GraphUtils
import org.apache.flink.core.fs.Path
import org.slf4j.{Logger, LoggerFactory}
import org.tensorflow.Tensor
import org.tensorflow.contrib.scala._
import org.tensorflow.framework.{SignatureDef, TensorInfo}

import scala.collection.JavaConverters._

sealed trait LabelMethod extends GraphMethod {
  def name = LABEL_METHOD_NAME
  override type Input = ImageTensor
  override type Output = LabelTensor
}

@SerialVersionUID(1L)
object LabelMethod {
  val LABEL_METHOD_NAME = "inception/label"
  val LABEL_INPUTS = "inputs"
  val LABEL_OUTPUTS = "outputs"

  
    def toTextLabels(take: Int = 3)(implicit model: InceptionModel): Array[LabeledImage] = {
      // the tensor consists of a row per image, with columns representing label probabilities
      require(t.numDimensions() == 2, "expected a [M N] shaped tensor")
      val matrix = Array.ofDim[Float](t.shape()(0).toInt,t.shape()(1).toInt)
      t.copyTo(matrix)
      matrix.map { row =>
        LabeledImage(row.toList.zip(model.labels).sortWith(_._1 > _._1).take(take))
      }
    }
  }

} 
Example 158
Source File: FileUtils.scala    From subsearch   with GNU General Public License v2.0 5 votes vote down vote up
package com.gilazaria.subsearch.utils

import java.nio.charset.StandardCharsets
import java.nio.file._
import scala.collection.JavaConverters._
import scala.io.Source
import scala.util.Try

class File(val path: Path) {
  def exists: Boolean =
    FileUtils.exists(path)

  def isFile: Boolean =
    FileUtils.isFile(path)

  def isReadable: Boolean =
    FileUtils.isReadable(path)

  def linesIterator: Iterator[String] =
    FileUtils.linesIterator(path)

  lazy val getLines: List[String] =
    FileUtils.getLines(path)

  lazy val numberOfLines: Int =
    FileUtils.numberOfLines(path)

  lazy val getSource: String =
    getLines.mkString("\n")

  def isWriteable: Boolean =
    FileUtils.isWriteable(path)

  def write(data: String) =
    FileUtils.writeToPath(data, path)
}

object File {
  def fromFilename(filename: String): File =
    new File(Paths.get(filename))
}

object FileUtils {
  def exists(path: Path): Boolean =
    Files.exists(path)

  def isFile(path: Path): Boolean =
    Files.isRegularFile(path)

  def isReadable(path: Path): Boolean =
    Files.isReadable(path)

  def isWriteable(path: Path): Boolean = {
    if (!FileUtils.exists(path)) {
      val createFileAttempt =
        Try {
          Files.createFile(path)
          Files.delete(path)
        }

      createFileAttempt.isSuccess
    } else {
      Files.isWritable(path)
    }
  }

  def writeToPath(data: String, path: Path) =
    Files.write(path, data.getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE, StandardOpenOption.APPEND)

  def linesIterator(path: Path): Iterator[String] =
    io.Source.fromFile(path.toUri).getLines

  def getLines(path: Path): List[String] =
    Files.readAllLines(path).asScala.toList

  def numberOfLines(path: Path): Int =
    io.Source.fromFile(path.toUri).getLines.size

  
  def getResourceSource(filename: String): String = {
    val reader =
      Source
        .fromInputStream(getClass.getResourceAsStream("/" + filename))
        .bufferedReader()

    Stream
      .continually(reader.readLine())
      .takeWhile(_ != null)
      .mkString("\n")
  }
} 
Example 159
Source File: ProgressLoggerTest.scala    From fgbio   with MIT License 5 votes vote down vote up
package com.fulcrumgenomics.util

import java.io.{ByteArrayOutputStream, PrintStream}
import java.nio.charset.StandardCharsets

import com.fulcrumgenomics.bam.api.SamRecord
import com.fulcrumgenomics.commons.util.Logger
import com.fulcrumgenomics.testing.UnitSpec
import com.fulcrumgenomics.vcf.api.Variant
import org.scalatest.concurrent.PatienceConfiguration.Interval

class ProgressLoggerTest extends UnitSpec {

  private class LoggerHelper extends Logger(this.getClass) {
    private val baos = new ByteArrayOutputStream()
    out = Some(new PrintStream(baos, true, "UTF-8"))
    def lines: IndexedSeq[String] = new String(baos.toByteArray, StandardCharsets.UTF_8).split('\n').toIndexedSeq
  }
  
  // For Scala 2.12 compatibility
  private def emptyIterator[T]: Iterator[T] = Iterator.empty

  private val progressLogger = ProgressLogger(new LoggerHelper())

  "ProgressLoggingIterator" should "wrap a SamRecord, (String, Int), Variant, and Interval" in {
    import com.fulcrumgenomics.util.ProgressLogger.ProgressLoggingIterator

    // Check typing
    emptyIterator[SamRecord].progress(progressLogger)
    emptyIterator[(String, Int)].progress(progressLogger)
    emptyIterator[Variant].progress(progressLogger)
    emptyIterator[Interval].progress(progressLogger)

    // Do an actual test
    val logger = new LoggerHelper()
    val progress = ProgressLogger(logger, unit=2)
    Iterator(("chr1", 1), ("chr2", 2), ("chr3", 3)).progress(progress).foreach(_ => ())
    val lines = logger.lines
    lines.length shouldBe 2
    lines(0) should include("chr2:2")
    lines(1) should include("chr3:3")
  }

  it should "wrap unsupported types" in {
    import com.fulcrumgenomics.util.ProgressLogger.ProgressLoggingIterator

    // Check typing
    emptyIterator[Double].progress(progressLogger)
    emptyIterator[String].progress(progressLogger)

    // Do an actual test
    val logger = new LoggerHelper()
    val progress = ProgressLogger(logger, unit=2)
    Iterator("foo", "bar", "car").progress(progress).foreach(_ => ())
    val lines = logger.lines
    lines.length shouldBe 2
    lines(0) should include("**")
  }

  "TransformedProgressLoggingIterator" should "convert items to a supported type" in {
    import com.fulcrumgenomics.util.ProgressLogger.TransformedProgressLoggingIterator
    emptyIterator[(String, String)].progress(progressLogger, (item: (String, String)) => (item._1, item._2.toInt))

    // Do an actual test
    val logger = new LoggerHelper()
    val progress = ProgressLogger(logger, unit=2)
    Iterator(("chr1", "1"), ("chr2", "2"), ("chr3", "3")).progress(progress, (item: (String, String)) => (item._1, item._2.toInt)).foreach(_ => ())
    val lines = logger.lines
    lines.length shouldBe 2
    lines(0) should include("chr2:2")
    lines(1) should include("chr3:3")
  }
} 
Example 160
Source File: DictionaryBasedNormalizer.scala    From scalastringcourseday7   with Apache License 2.0 5 votes vote down vote up
package text.normalizer

import java.nio.charset.{CodingErrorAction, StandardCharsets}
import java.nio.file.Path

import text.{StringNone, StringOption}
import util.Config

import scala.collection.mutable
import scala.collection.mutable.ListBuffer
import scala.sys.process.Process
import scala.util.matching.Regex


class DictionaryBasedNormalizer(dictionaryNameOpt: StringOption) {
  private def ascii2native(inputPath: Path): Iterator[String] = {
    import util.ProcessBuilderUtils._
    Process(Seq[String](
      s"${System.getProperty("java.home")}/../bin/native2ascii",
      "-reverse",
      "-encoding", "UTF-8",
      inputPath.toAbsolutePath.toString)).lineStream(
        StandardCharsets.UTF_8,
        CodingErrorAction.REPORT,
        CodingErrorAction.REPORT,
        StringNone)
  }
  private val regex: Regex = """([^#:][^:]*):\[([^#]+)\](#.*)?""".r
  private val terms: Seq[(String, String)] = initialize()

  private def initialize(): Seq[(String, String)] = {
    if (dictionaryNameOpt.isEmpty) {
      return Nil
    }
    val dictionaryName: String = dictionaryNameOpt.get
    val map = mutable.Map.empty[String, List[String]]
    val buffer = ListBuffer.empty[(String, String)]
    val filePath: Path = Config.resourceFile("normalizer", dictionaryName)
    ascii2native(filePath) foreach {
      case regex(representation, notationalVariants, _) =>
        val trimmedRepresentation: String = representation.trim match {
          case "\"\"" => ""
          case otherwise => otherwise
        }
        val sortedNotationalVariants: List[String] = sortNotationVariants(notationalVariants.split(',').toList)
        map(trimmedRepresentation) = if (map.contains(trimmedRepresentation)) {
          sortNotationVariants(map(trimmedRepresentation) ++ sortedNotationalVariants)
        } else {
          sortedNotationalVariants
        }
      case _ =>
        //Do nothing
    }
    sortRepresentations(map.keySet.toList) foreach {
      representation =>
        map(representation) foreach {
          notationalVariant =>
            buffer += ((notationalVariant, representation))
        }
    }
    buffer.result
  }

  protected def sortNotationVariants(notationVariants: List[String]): List[String] = {
    notationVariants.sorted//alphabetical order
  }

  protected def sortRepresentations(representations: List[String]): List[String] = {
    representations.sorted//alphabetical order
  }

  def normalize(text: StringOption): StringOption = {
    text map {
      t: String =>
        var result: String = t
        if (terms.nonEmpty) {
          terms foreach {
            case (term, replacement) =>
              result = replaceAll(result, term, replacement)
            case _ =>
              //Do nothing
          }
        }
        result
    }
  }

  protected def replaceAll(input: String, term: String, replacement: String): String = {
    import util.primitive._
    input.replaceAllLiteratim(term, replacement)
  }
} 
Example 161
Source File: Meta.scala    From scaladex   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package ch.epfl.scala.index
package data

import java.nio.charset.StandardCharsets
import java.nio.file.Files

import ch.epfl.scala.index.model.Descending
import com.github.nscala_time.time.Imports._
import jawn.support.json4s.Parser
import org.joda.time.DateTime
import org.json4s._
import org.json4s.native.Serialization.{write => swrite}

case class Meta(
    sha1: String,
    path: String,
    created: DateTime
)

object Meta {

  
  object MetaSerializer
      extends CustomSerializer[Meta](
        format =>
          (
            {
              case in: JValue => {
                implicit val formats = DefaultFormats ++ Seq(
                  DateTimeSerializer
                )
                in.extract[Meta]
              }
            }, {
              case meta: Meta => {
                implicit val formats = DefaultFormats ++ Seq(
                  DateTimeSerializer
                )
                JObject(
                  JField("created", Extraction.decompose(meta.created)),
                  JField("path", Extraction.decompose(meta.path)),
                  JField("sha1", Extraction.decompose(meta.sha1))
                )
              }
            }
        )
      )

  implicit val formats = DefaultFormats ++ Seq(DateTimeSerializer,
                                               MetaSerializer)
  implicit val serialization = native.Serialization

  def load(paths: DataPaths, repository: LocalPomRepository): List[Meta] = {
    assert(
      repository == LocalPomRepository.MavenCentral ||
        repository == LocalPomRepository.UserProvided
    )

    val metaPath = paths.meta(repository)
    val metaRaw = new String(Files.readAllBytes(metaPath))

    metaRaw
      .split('\n')
      .toList
      .filter(_ != "")
      .map(json => Parser.parseUnsafe(json).extract[Meta])
      .sortBy(_.created)(Descending)
  }

  def append(paths: DataPaths,
             meta: Meta,
             repository: LocalPomRepository): Unit = {
    val all = load(paths, repository)
    write(paths, meta :: all, repository)
  }

  def write(paths: DataPaths,
            metas: List[Meta],
            repository: LocalPomRepository): Unit = {
    val sorted = metas.sortBy(_.created)(Descending)
    val jsonPerLine =
      sorted
        .map(s => swrite(s))
        .mkString("", "\n", "\n")

    val metaPath = paths.meta(repository)

    if (Files.exists(metaPath)) {
      Files.delete(metaPath)
    }

    Files.write(metaPath, jsonPerLine.getBytes(StandardCharsets.UTF_8))
    ()
  }
} 
Example 162
Source File: BintrayDownloadPoms.scala    From scaladex   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package ch.epfl.scala.index.data
package bintray

import download.PlayWsDownloader
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Path}

import play.api.libs.ws.{WSClient, WSRequest, WSResponse}
import play.api.libs.ws.ahc.AhcWSClient
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import org.slf4j.LoggerFactory

class BintrayDownloadPoms(paths: DataPaths)(
    implicit val system: ActorSystem,
    implicit val materializer: ActorMaterializer
) extends PlayWsDownloader {

  private val log = LoggerFactory.getLogger(getClass)

  private val bintrayPomBase = paths.poms(LocalPomRepository.Bintray)

  
  def run(): Unit = {

    download[BintraySearch, Unit]("Downloading POMs",
                                  searchesBySha1,
                                  downloadRequest,
                                  processPomDownload,
                                  parallelism = 32)
    ()
  }
} 
Example 163
Source File: SaveLiveData.scala    From scaladex   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package ch.epfl.scala.index
package data
package elastic

import model._
import project._
import org.json4s._
import org.json4s.native.Serialization.{read, write, writePretty}
import org.json4s.native.parseJson
import java.nio.file._
import java.nio.charset.StandardCharsets
import org.slf4j.LoggerFactory

import scala.concurrent.{ExecutionContext, Future}

// this allows us to save project as json object sorted by keys
case class LiveProjects(projects: Map[Project.Reference, ProjectForm])
object LiveProjectsSerializer
    extends CustomSerializer[LiveProjects](
      format =>
        (
          {
            case JObject(obj) => {
              implicit val formats = DefaultFormats
              LiveProjects(
                obj.map {
                  case (k, v) =>
                    val List(organization, repository) = k.split('/').toList

                    (Project.Reference(organization, repository),
                     v.extract[ProjectForm])
                }.toMap
              )
            }
          }, {
            case l: LiveProjects =>
              JObject(
                l.projects.toList
                  .sortBy {
                    case (Project.Reference(organization, repository), _) =>
                      (organization, repository)
                  }
                  .map {
                    case (Project.Reference(organization, repository), v) =>
                      import ch.epfl.scala.index.search.SearchProtocol._
                      JField(s"$organization/$repository", parseJson(write(v)))
                  }
              )
          }
      )
    )

trait LiveProjectsProtocol {
  implicit val formats: Formats = DefaultFormats ++ Seq(LiveProjectsSerializer)
  implicit val serialization: Serialization = native.Serialization
}

object SaveLiveData extends LiveProjectsProtocol {

  val logger = LoggerFactory.getLogger(getClass)

  def storedProjects(paths: DataPaths): Map[Project.Reference, ProjectForm] =
    read[LiveProjects](
      Files
        .readAllLines(paths.liveProjects)
        .toArray
        .mkString("")
    ).projects

  def saveProjects(paths: DataPaths,
                   live: Map[Project.Reference, ProjectForm]): Unit = {
    val projects = LiveProjects(live)

    val liveDir = paths.liveProjects.getParent
    if (!Files.isDirectory(liveDir)) {
      Files.createDirectory(liveDir)
    }

    Files.write(
      paths.liveProjects,
      writePretty(projects).getBytes(StandardCharsets.UTF_8)
    )
  }

  // Note: we use a future here just to catch exceptions. Our code is blocking, though.
  def saveProject(project: Project,
                  paths: DataPaths)(implicit ec: ExecutionContext): Future[_] =
    Future {
      concurrent.blocking {
        val stored = SaveLiveData.storedProjects(paths)
        val newProject = (project.reference -> ProjectForm(project))

        logger.info(s"Writing projects at ${paths.liveProjects}")
        saveProjects(paths, stored + newProject)
      }
    }

} 
Example 164
Source File: PidLock.scala    From scaladex   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package ch.epfl.scala.index
package data
package util

import java.lang.management.ManagementFactory

import java.nio.file.{Files, Paths}
import java.nio.charset.StandardCharsets

object PidLock {
  def create(prefix: String): Unit = {
    val pid = ManagementFactory.getRuntimeMXBean().getName().split("@").head
    val pidFile = Paths.get(s"$prefix-PID")
    Files.write(pidFile, pid.getBytes(StandardCharsets.UTF_8))
    sys.addShutdownHook {
      Files.delete(pidFile)
    }

    ()
  }
} 
Example 165
Source File: PublishData.scala    From scaladex   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package ch.epfl.scala.index
package server
package routes
package api
package impl

import data.{DataPaths, LocalPomRepository}
import data.github
import ch.epfl.scala.index.model.misc.Sha1
import org.joda.time.DateTime
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Path, Paths}

import org.slf4j.LoggerFactory

import scala.util.control.NonFatal


  private def tmpPath(sha1: String): Path = {
    val tmpDir =
      Files.createTempDirectory(Paths.get(Server.config.tempDirPath), sha1)
    Files.createTempFile(tmpDir, "", "")
  }
} 
Example 166
Source File: Main.scala    From scaldy   with Apache License 2.0 5 votes vote down vote up
package com.paytrue.scaldy

import java.io._
import java.nio.charset.StandardCharsets
import java.nio.file.{ Files, Path, Paths }
import scala.collection.JavaConversions._

case class Config(inputPath: String = ".", outputPath: File = new File("scaldy.dot"), groupSelector: String = "") {
  // allows to generate only one subgraph by specifying the name of a class in the subgraph, see def subGraph
  val selectGroup = groupSelector != ""

  val selectClasses = groupSelector.split(",")
}

object Main extends App {
  val parser = new scopt.OptionParser[Config]("scaldy") {
    head("scaldy", "0.1.x")
    opt[String]('i', "in") valueName "<directory>" action {
      (x, c) ⇒ c.copy(inputPath = x)
    } text "in is an optional input directory path, by default the current directory"
    opt[File]('o', "out") valueName "<file>" action {
      (x, c) ⇒ c.copy(outputPath = x)
    } text "out is an optional output file, scaldy.dot by default"
    opt[String]('g', "group") valueName "<class name>" action {
      (x, c) ⇒ c.copy(groupSelector = x)
    } text "group is an optional subgraph selector, list classes (comma separated class names) within the group to generate only their subgraph"
  }

  parser.parse(args, Config()) match {
    case Some(config) ⇒
      val output = exportGraph(config)
      val charOutput: OutputStreamWriter = new OutputStreamWriter(
        new FileOutputStream(
          config.outputPath
        ),
        StandardCharsets.UTF_8
      )

      print(output)
      charOutput.write(output)
      charOutput.close()

    case None ⇒ // bad arguments, error already printed
  }

  def exportGraph(c: Config) = {
    val sourceFiles = FileFinder.listFiles(Paths.get(c.inputPath), ".scala")
    val allClasses = sourceFiles.flatMap(FileClassFinder.getClassesFromFile).filterNot(_.name == "Validated")
    val allNames = allClasses.map(_.name)
    val allRelationships = allClasses.flatMap(_.relationships).filter(rel ⇒ allNames.contains(rel.to))
    val allConnectedClasses =
      allClasses.filter(c ⇒ allRelationships.exists(rel ⇒ rel.from == c.name || rel.to == c.name || c.properties.nonEmpty))
        .groupBy(_.sourceFile)
        .zip(GraphColors.cycledColors)
        .map { case ((file, clazz), color) ⇒ (file, clazz, color) }

    def subGraph(sourceFile: Path, classes: Traversable[BeanClass], color: Color) = {
      val classList = classes.toList
      if (!c.selectGroup || classList.map(_.name).exists(c.selectClasses.contains(_))) {
        val (innerRels, outerRels) = classList.flatMap(_.relationships).filter(rel ⇒ allNames.contains(rel.to)).partition(rel ⇒ classList.map(_.name).contains(rel.to))
        s"""subgraph "cluster_${sourceFile.toString}" {
           |style=invis
           |margin=30
           |${classes.map(_.node(color)).mkString("\n")}
           |${innerRels.map(_.edge).mkString("\n")}
           |}
           |${outerRels.map(_.edge).mkString("\n")}
           |""".stripMargin
      } else {
        ""
      }
    }

    s"""digraph "Class diagram" {
       |graph[splines=true dpi=55]
       |node[shape=none width=0 height=0 margin=0 fontname=Verdana fontsize=14]
       |edge[fontname=Verdana fontsize=12 arrowsize=1.5 minlen=2.5]
       |
       |${allConnectedClasses.map((subGraph _).tupled).mkString("\n")}
         |}
      |""".
      stripMargin
  }
}

object FileFinder {
  def listFiles(root: Path, fileSuffix: String): List[Path] = {
    var files = List.empty[Path]

    Files.newDirectoryStream(root).foreach(path ⇒ {
      if (Files.isDirectory(path)) {
        files = files ++ listFiles(path, fileSuffix)
      } else if (path.toString.endsWith(fileSuffix)) {
        files = files ++ List(path.toAbsolutePath)
      }
    })

    files
  }
} 
Example 167
Source File: Traverser.scala    From scaldy   with Apache License 2.0 5 votes vote down vote up
package com.paytrue.scaldy

import java.nio.charset.StandardCharsets
import java.nio.file.Path
import scala.io.Source
import scala.reflect.runtime.currentMirror
import scala.reflect.runtime.universe.Flag._
import scala.reflect.runtime.universe._
import scala.tools.reflect.ToolBox

class ClassDefTraverser(file: Path) extends Traverser {
  var classes: List[BeanClass] = List.empty

  override def traverse(tree: Tree) = {
    tree match {
      case ClassDef(mods, name, _, impl) ⇒
        val valTraverser = new ValDefTraverser
        valTraverser.traverse(tree)
        val parents = impl.parents.map(_.toString())
        classes = classes :+ BeanClass(name.toString, valTraverser.properties, parents,
          isAbstract = mods.hasFlag(ABSTRACT), isTrait = mods.hasFlag(TRAIT), sourceFile = file)

      case _ ⇒
    }
    super.traverse(tree)
  }
}

class ValDefTraverser extends Traverser {
  var properties: List[Property] = List.empty

  override def traverse(tree: Tree) = {
    tree match {
      case ValDef(Modifiers(_, _, annotations), valName, tpt, _) if hasBeanProperty(annotations) ⇒
        val isRequired = hasRequired(annotations)
        tpt match {
          case AppliedTypeTree(Select(qualifier, typeName), args) ⇒
            val typeTraverser = new TypeArgsTraverser
            typeTraverser.traverseTrees(args)
            properties :+= Property(valName.toString, tpt.toString(), typeTraverser.refTypes, isRequired)

          case _ ⇒
            properties :+= Property(valName.toString, tpt.toString(), Set.empty, isRequired)
        }

      case _ ⇒
    }
    super.traverse(tree)
  }

  private def hasBeanProperty(annotations: List[Tree]) = annotations.exists {
    case Apply(Select(New(Ident(TypeName("BeanProperty"))), _), _) ⇒ true
    case _ ⇒ false
  }

  private def hasRequired(annotations: List[Tree]) = annotations.exists {
    case Apply(Select(New(Annotated(_, Ident(TypeName("Required")))), _), _) ⇒ true
    case _ ⇒ false
  }
}

class TypeArgsTraverser extends Traverser {
  var refTypes: Set[String] = Set.empty

  override def traverse(tree: Tree) = {
    tree match {
      case ident @ Ident(identName) if ident.isType ⇒
        refTypes += identName.toString
      case _ ⇒
    }
    super.traverse(tree)
  }
}

object FileClassFinder {
  def getClassesFromFile(file: Path): List[BeanClass] = {
    val toolbox = currentMirror.mkToolBox()
    val fileContents = Source.fromFile(file.toString, StandardCharsets.UTF_8.name()).getLines().drop(1).mkString("\n")
    val tree = toolbox.parse(fileContents)
    val traverser = new ClassDefTraverser(file)
    traverser.traverse(tree)
    traverser.classes
  }
} 
Example 168
Source File: PathUtils.scala    From pureconfig   with Mozilla Public License 2.0 5 votes vote down vote up
package pureconfig

import java.nio.charset.StandardCharsets
import java.nio.file.{ Files, Path, Paths }

object PathUtils {

  def createTempFile(content: String): Path = {
    val path = Files.createTempFile("pureconfig", "conf")
    path.toFile.deleteOnExit()
    val writer = Files.newBufferedWriter(path, StandardCharsets.UTF_8)
    writer.write(content)
    writer.close()
    path
  }

  lazy val nonExistingPath: Path = {
    val path = Files.createTempFile("pureconfig", "conf")
    Files.delete(path)
    path
  }

  def resourceFromName(name: String): Path = {
    Paths.get(getClass.getResource(name).getPath)
  }

  def listResourcesFromNames(names: String*): Seq[Path] = names.map(resourceFromName)
} 
Example 169
Source File: ParseTests.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.benchmark

import java.io.ByteArrayInputStream
import java.nio.charset.StandardCharsets
import java.util.concurrent.TimeUnit

import coursier.maven.MavenRepository
import coursier.moduleString
import org.apache.maven.model.io.xpp3.MavenXpp3Reader
import org.openjdk.jmh.annotations._

import scala.concurrent.Await
import scala.concurrent.duration.Duration

@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.MILLISECONDS)
class ParseTests {

  @Benchmark
  def parseSparkParent(state: TestState): Unit = {
    val t = state.repositories.head.find(
      mod"org.apache.spark:spark-parent_2.12",
      "2.4.0",
      state.fetcher
    ).run
    val e = Await.result(t.future()(state.ec), Duration.Inf)
    assert(e.isRight)
  }

  @Benchmark
  def parseSparkParentXmlDom(state: TestState): Unit = {
    val content = state.inMemoryCache.fromCache("https://repo1.maven.org/maven2/org/apache/spark/spark-parent_2.12/2.4.0/spark-parent_2.12-2.4.0.pom")
    val res = MavenRepository.parseRawPomDom(content)
    assert(res.isRight)
  }

  @Benchmark
  def parseSparkParentXmlSax(state: TestState): Unit = {
    val content = state.inMemoryCache.fromCache("https://repo1.maven.org/maven2/org/apache/spark/spark-parent_2.12/2.4.0/spark-parent_2.12-2.4.0.pom")
    val res = MavenRepository.parseRawPomSax(content)
    assert(res.isRight)
  }

  @Benchmark
  def parseApacheParent(state: TestState): Unit = {
    val t = state.repositories.head.find(
      mod"org.apache:apache",
      "18",
      state.fetcher
    ).run
    val e = Await.result(t.future()(state.ec), Duration.Inf)
    assert(e.isRight)
  }

  @Benchmark
  def parseSparkParentMavenModel(state: TestState): Unit = {
    val b = state
      .inMemoryCache
      .fromCache("https://repo1.maven.org/maven2/org/apache/spark/spark-parent_2.12/2.4.0/spark-parent_2.12-2.4.0.pom")
      .getBytes(StandardCharsets.UTF_8)
    val reader = new MavenXpp3Reader
    val model = reader.read(new ByteArrayInputStream(b))
  }

} 
Example 170
Source File: PowershellRunner.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.env

import java.nio.charset.StandardCharsets
import java.util.Base64
import dataclass.data
import java.io.InputStream
import java.io.ByteArrayOutputStream

@data class PowershellRunner(
  powershellExePath: String = "powershell.exe",
  options: Seq[String] = PowershellRunner.defaultOptions,
  encodeProgram: Boolean = true
) {

  def runScript(script: String): String = {

    // inspired by https://github.com/soc/directories-jvm/blob/1f344ef0087e8422f6c7334317e73b8763d9e483/src/main/java/io/github/soc/directories/Util.java#L147
    val fullScript = "& {\n" +
      "[Console]::OutputEncoding = [System.Text.Encoding]::UTF8\n" +
      script +
      "\n}"

    val scriptArgs =
      if (encodeProgram) {
        val base64 = Base64.getEncoder()
        val encodedScript = base64.encodeToString(fullScript.getBytes(StandardCharsets.UTF_16LE))
        Seq("-EncodedCommand", encodedScript)
      } else
        Seq("-Command", fullScript)

    val command = Seq(powershellExePath) ++ options ++ scriptArgs

    val b = new ProcessBuilder(command: _*)
      .redirectInput(ProcessBuilder.Redirect.PIPE)
      .redirectOutput(ProcessBuilder.Redirect.PIPE)
      .redirectError(ProcessBuilder.Redirect.INHERIT)
    val p: Process = b.start()
    p.getOutputStream.close()
    val outputBytes = PowershellRunner.readFully(p.getInputStream)
    val retCode = p.waitFor()
    if (retCode == 0)
      new String(outputBytes, StandardCharsets.UTF_8)
    else
      throw new Exception(s"Error running powershell script (exit code: $retCode)")
  }

}

object PowershellRunner {

  def defaultOptions: Seq[String] =
    Seq("-NoProfile", "-NonInteractive")

  private def readFully(is: InputStream): Array[Byte] = {
    val buffer = new ByteArrayOutputStream
    val data = Array.ofDim[Byte](16384)

    var nRead = 0
    while ({
      nRead = is.read(data, 0, data.length)
      nRead != -1
    })
      buffer.write(data, 0, nRead)

    buffer.flush()
    buffer.toByteArray
  }


} 
Example 171
Source File: CacheChecksum.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.cache

import java.math.BigInteger
import java.nio.charset.StandardCharsets
import java.util.regex.Pattern

object CacheChecksum {

  private val checksumLength = Set(
    32, // md5
    40, // sha-1
    64, // sha-256
    128 // sha-512
  )

  private def ifHexString(s: String) =
    s.forall(c => c.isDigit || c >= 'a' && c <= 'z')

  private def findChecksum(elems: Seq[String]): Option[BigInteger] =
    elems.collectFirst {
      case rawSum if ifHexString(rawSum) && checksumLength.contains(rawSum.length) =>
        new BigInteger(rawSum, 16)
    }

  private def parseChecksumLine(lines: Seq[String]): Option[BigInteger] =
    findChecksum(lines.map(_.toLowerCase.replaceAll("\\s", "")))

  private def parseChecksumAlternative(lines: Seq[String]): Option[BigInteger] =
    findChecksum(lines.flatMap(_.toLowerCase.split("\\s+"))).orElse {
      findChecksum(
        lines.map { line =>
          line
            .toLowerCase
            .split("\\s+")
            .filter(ifHexString)
            .mkString
        }
      )
    }

  def parseChecksum(content: String): Option[BigInteger] = {
    val lines = Predef.augmentString(content)
      .lines
      .toVector

    parseChecksumLine(lines).orElse(parseChecksumAlternative(lines))
  }

  def parseRawChecksum(content: Array[Byte]): Option[BigInteger] =
    if (content.length == 16 || content.length == 20)
      Some(new BigInteger(content))
    else {
      val s = new String(content, StandardCharsets.UTF_8)
      val lines = Predef.augmentString(s)
        .lines
        .toVector

      parseChecksumLine(lines) orElse parseChecksumAlternative(lines)
    }

} 
Example 172
Source File: FetchCache.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.internal

import java.io.File
import java.math.BigInteger
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Path, Paths, StandardCopyOption}
import java.security.MessageDigest

import coursier.cache.CacheLocks
import coursier.core.{Classifier, Dependency, Repository, Type}
import coursier.params.ResolutionParams
import coursier.paths.CachePath
import dataclass.data

@data class FetchCache(base: Path) {

  def dir(key: FetchCache.Key): Path =
    base.resolve(s"${key.sha1.take(2)}/${key.sha1.drop(2)}")
  def resultFile(key: FetchCache.Key): Path =
    dir(key).resolve("artifacts")
  def lockFile(key: FetchCache.Key): Path =
    dir(key).resolve("lock")

  def read(key: FetchCache.Key): Option[Seq[File]] = {
    val resultFile0 = resultFile(key)
    if (Files.isRegularFile(resultFile0)) {
      val artifacts = Predef.augmentString(new String(Files.readAllBytes(resultFile0), StandardCharsets.UTF_8))
        .lines
        .map(_.trim)
        .filter(_.nonEmpty)
        .map(Paths.get(_))
        .toVector

      if (artifacts.forall(Files.isRegularFile(_)))
        Some(artifacts.map(_.toFile))
      else
        None
    } else
      None
  }

  def write(key: FetchCache.Key, artifacts: Seq[File]): Boolean = {
    val resultFile0 = resultFile(key)
    val tmpFile = CachePath.temporaryFile(resultFile0.toFile).toPath

    def doWrite(): Unit = {
      Files.write(tmpFile, artifacts.map(_.getAbsolutePath).mkString("\n").getBytes(StandardCharsets.UTF_8))
      Files.move(tmpFile, resultFile0, StandardCopyOption.ATOMIC_MOVE)
    }

    CacheLocks.withLockOr(
      base.toFile,
      resultFile0.toFile
    )(
      { doWrite(); true },
      Some(false)
    )
  }

}

object FetchCache {

  private[coursier] final case class Key(
    dependencies: Seq[Dependency],
    repositories: Seq[Repository],
    resolutionParams: ResolutionParams,

    // these 4 come from ResolutionParams, but are ordered here
    forceVersion: Seq[(coursier.core.Module, String)],
    properties: Seq[(String, String)],
    forcedProperties: Seq[(String, String)],
    profiles: Seq[String],

    cacheLocation: String,
    classifiers: Seq[Classifier],
    mainArtifacts: Option[Boolean],
    artifactTypesOpt: Option[Seq[Type]]
  ) {
    lazy val repr: String =
      productIterator.mkString("(", ", ", ")")
    lazy val sha1: String = {
      val md = MessageDigest.getInstance("SHA-1")
      val b = md.digest(repr.getBytes(StandardCharsets.UTF_8))
      val s = new BigInteger(1, b).toString(16)
      ("0" * (40 - s.length)) + s
    }
  }

} 
Example 173
Source File: PlatformTestHelpers.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier

import java.math.BigInteger
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Paths}
import java.security.MessageDigest
import java.util.Locale

import coursier.cache.{Cache, MockCache}
import coursier.paths.Util
import coursier.util.{Sync, Task}

import scala.concurrent.{ExecutionContext, Future}

abstract class PlatformTestHelpers {

  private lazy val pool = Sync.fixedThreadPool(6)

  private val mockDataLocation = {
    val dir = Paths.get("modules/tests/metadata")
    assert(Files.isDirectory(dir))
    dir
  }

  val handmadeMetadataLocation = {
    val dir = Paths.get("modules/tests/handmade-metadata/data")
    assert(Files.isDirectory(dir))
    dir
  }

  val handmadeMetadataBase = handmadeMetadataLocation
    .toAbsolutePath
    .toFile // .toFile.toURI gives file:/ URIs, whereas .toUri gives file:/// (the former appears in some test fixtures now)
    .toURI
    .toASCIIString
    .stripSuffix("/") + "/"

  val writeMockData = Option(System.getenv("FETCH_MOCK_DATA"))
    .exists(s => s == "1" || s.toLowerCase(Locale.ROOT) == "true")

  val cache: Cache[Task] =
    MockCache.create[Task](mockDataLocation, pool = pool, writeMissing = writeMockData)
      .withDummyArtifact(_.url.endsWith(".jar"))

  val handmadeMetadataCache: Cache[Task] =
    MockCache.create[Task](handmadeMetadataLocation, pool = pool)

  val cacheWithHandmadeMetadata: Cache[Task] =
    MockCache.create[Task](mockDataLocation, pool = pool, Seq(handmadeMetadataLocation), writeMissing = writeMockData)
      .withDummyArtifact(_.url.endsWith(".jar"))

  def textResource(path: String)(implicit ec: ExecutionContext): Future[String] =
    Future {
      val p = Paths.get(path)
      val b = Files.readAllBytes(p)
      new String(b, StandardCharsets.UTF_8)
    }

  def maybeWriteTextResource(path: String, content: String): Unit = {
    val p = Paths.get(path)
    Util.createDirectories(p.getParent)
    Files.write(p, content.getBytes(StandardCharsets.UTF_8))
  }

  def sha1(s: String): String = {
    val md = MessageDigest.getInstance("SHA-1")
    val b = md.digest(s.getBytes(StandardCharsets.UTF_8))
    new BigInteger(1, b).toString(16)
  }

} 
Example 174
Source File: Authentication.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.core

import java.nio.charset.StandardCharsets
import java.util.Base64

import dataclass.data

@data class Authentication(
  user: String,
  passwordOpt: Option[String],
  httpHeaders: Seq[(String, String)],
  optional: Boolean,
  realmOpt: Option[String],
  httpsOnly: Boolean,
  passOnRedirect: Boolean
) {

  override def toString: String =
    s"Authentication($user, ****, ${httpHeaders.map { case (k, v) => (k, "****") }}, $optional, $realmOpt, $httpsOnly, $passOnRedirect)"


  def withPassword(password: String): Authentication =
    withPasswordOpt(Some(password))
  def withRealm(realm: String): Authentication =
    withRealmOpt(Some(realm))

  def userOnly: Boolean =
    this == Authentication(user)

  def allHttpHeaders: Seq[(String, String)] = {
    val basicAuthHeader = passwordOpt.toSeq.map { p =>
      ("Authorization", "Basic " + Authentication.basicAuthenticationEncode(user, p))
    }
    basicAuthHeader ++ httpHeaders
  }

}

object Authentication {

  def apply(user: String): Authentication =
    Authentication(user, None, Nil, optional = false, None, httpsOnly = true, passOnRedirect = false)
  def apply(user: String, password: String): Authentication =
    Authentication(user, Some(password), Nil, optional = false, None, httpsOnly = true, passOnRedirect = false)

  def apply(
    user: String,
    passwordOpt: Option[String],
    optional: Boolean,
    realmOpt: Option[String],
    httpsOnly: Boolean,
    passOnRedirect: Boolean
  ): Authentication =
    new Authentication(user, passwordOpt, Nil, optional, realmOpt, httpsOnly, passOnRedirect)

  def apply(
    user: String,
    password: String,
    optional: Boolean,
    realmOpt: Option[String],
    httpsOnly: Boolean,
    passOnRedirect: Boolean
  ): Authentication =
    Authentication(user, Some(password), Nil, optional, realmOpt, httpsOnly, passOnRedirect)

  def apply(httpHeaders: Seq[(String, String)]): Authentication =
    Authentication("", None, httpHeaders, optional = false, None, httpsOnly = true, passOnRedirect = false)

  def apply(
    httpHeaders: Seq[(String, String)],
    optional: Boolean,
    realmOpt: Option[String],
    httpsOnly: Boolean,
    passOnRedirect: Boolean
  ): Authentication =
    Authentication("", None, httpHeaders, optional, realmOpt, httpsOnly, passOnRedirect)


  private[coursier] def basicAuthenticationEncode(user: String, password: String): String =
    Base64.getEncoder.encodeToString(
      s"$user:$password".getBytes(StandardCharsets.UTF_8)
    )

} 
Example 175
Source File: RepositoryParams.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.cli.params

import java.io.File
import java.nio.charset.StandardCharsets
import java.nio.file.Files

import cats.data.{NonEmptyList, Validated, ValidatedNel}
import cats.implicits._
import coursier.{Repositories, moduleString}
import coursier.cli.install.SharedChannelParams
import coursier.cli.options.RepositoryOptions
import coursier.core.Repository
import coursier.install.Channel
import coursier.ivy.IvyRepository
import coursier.maven.MavenRepository
import coursier.parse.RepositoryParser

final case class RepositoryParams(
  repositories: Seq[Repository],
  channels: SharedChannelParams
)

object RepositoryParams {

  def apply(options: RepositoryOptions, hasSbtPlugins: Boolean = false): ValidatedNel[String, RepositoryParams] = {

    val repositoriesV = Validated.fromEither(
      RepositoryParser.repositories(options.repository)
        .either
        .left
        .map {
          case h :: t => NonEmptyList(h, t)
        }
    )

    val channelsV = SharedChannelParams(options.channelOptions)

    (repositoriesV, channelsV).mapN {
      (repos0, channels) =>

        // preprend defaults
        val defaults =
          if (options.noDefault) Nil
          else {
            val extra =
              if (hasSbtPlugins) Seq(Repositories.sbtPlugin("releases"))
              else Nil
            coursier.Resolve.defaultRepositories ++ extra
          }
        var repos = defaults ++ repos0

        // take sbtPluginHack into account
        repos = repos.map {
          case m: MavenRepository => m.withSbtAttrStub(options.sbtPluginHack)
          case other => other
        }

        // take dropInfoAttr into account
        if (options.dropInfoAttr)
          repos = repos.map {
            case m: IvyRepository => m.withDropInfoAttributes(true)
            case other => other
          }

        RepositoryParams(
          repos,
          channels
        )
    }
  }
} 
Example 176
Source File: InstallParams.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.cli.install

import java.io.File
import java.nio.charset.StandardCharsets
import java.nio.file.Files

import cats.data.{NonEmptyList, Validated, ValidatedNel}
import cats.implicits._
import coursier.cli.jvm.SharedJavaParams
import coursier.cli.params.{CacheParams, EnvParams, OutputParams}
import coursier.install.Channel

final case class InstallParams(
  cache: CacheParams,
  output: OutputParams,
  shared: SharedInstallParams,
  sharedChannel: SharedChannelParams,
  sharedJava: SharedJavaParams,
  env: EnvParams,
  addChannels: Seq[Channel],
  installChannels: Seq[String],
  force: Boolean
) {
  lazy val channels: Seq[Channel] =
    (sharedChannel.channels ++ addChannels).distinct
}

object InstallParams {

  def apply(options: InstallOptions, anyArg: Boolean): ValidatedNel[String, InstallParams] = {

    val cacheParamsV = options.cacheOptions.params(None)
    val outputV = OutputParams(options.outputOptions)

    val sharedV = SharedInstallParams(options.sharedInstallOptions)

    val sharedChannelV = SharedChannelParams(options.sharedChannelOptions)
    val sharedJavaV = SharedJavaParams(options.sharedJavaOptions)

    val envV = EnvParams(options.envOptions)

    val addChannelsV = options.addChannel.traverse { s =>
      val e = Channel.parse(s)
        .left.map(NonEmptyList.one)
        .map(c => (s, c))
      Validated.fromEither(e)
    }

    val force = options.force

    val checkNeedsChannelsV =
      if (anyArg && sharedChannelV.toOption.exists(_.channels.isEmpty) && addChannelsV.toOption.exists(_.isEmpty))
        Validated.invalidNel(s"Error: no channels specified")
      else
        Validated.validNel(())

    val flags = Seq(
      options.addChannel.nonEmpty,
      envV.toOption.fold(false)(_.anyFlag)
    )
    val flagsV =
      if (flags.count(identity) > 1)
        Validated.invalidNel("Error: can only specify one of --add-channel, --env, --setup.")
      else
        Validated.validNel(())

    val checkArgsV =
      if (anyArg && flags.exists(identity))
        Validated.invalidNel(s"Error: unexpected arguments passed along --add-channel, --env, or --setup.")
      else
        Validated.validNel(())

    (cacheParamsV, outputV, sharedV, sharedChannelV, sharedJavaV, envV, addChannelsV, checkNeedsChannelsV, flagsV, checkArgsV).mapN {
      (cacheParams, output, shared, sharedChannel, sharedJava, env, addChannels, _, _, _) =>
        InstallParams(
          cacheParams,
          output,
          shared,
          sharedChannel,
          sharedJava,
          env,
          addChannels.map(_._2),
          addChannels.map(_._1),
          force
        )
    }
  }
} 
Example 177
Source File: SharedChannelParams.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.cli.install

import java.io.File
import java.nio.charset.StandardCharsets
import java.nio.file.Files

import cats.data.{NonEmptyList, Validated, ValidatedNel}
import cats.implicits._
import coursier.install.Channel
import coursier.install.Channels

final case class SharedChannelParams(
  channels: Seq[Channel]
)

object SharedChannelParams {
  def apply(options: SharedChannelOptions): ValidatedNel[String, SharedChannelParams] = {

    val channelsV = options
      .channel
      .traverse { s =>
        val e = Channel.parse(s)
          .left.map(NonEmptyList.one)
        Validated.fromEither(e)
      }

    val defaultChannels =
      if (options.defaultChannels) Channels.defaultChannels
      else Nil

    val contribChannels =
      if (options.contrib) Channels.contribChannels
      else Nil

    val fileChannelsV =
      if (options.fileChannels) {
        val configDir = coursier.paths.CoursierPaths.configDirectory()
        val channelDir = new File(configDir, "channels")
        val files = Option(channelDir.listFiles())
          .getOrElse(Array.empty[File])
          .filter(f => !f.getName.startsWith("."))
        val rawChannels = files.toList.flatMap { f =>
          val b = Files.readAllBytes(f.toPath)
          val s = new String(b, StandardCharsets.UTF_8)
          s.linesIterator.map(_.trim).filter(_.nonEmpty).toSeq
        }
        rawChannels.traverse { s =>
          val e = Channel.parse(s)
            .left.map(NonEmptyList.one)
          Validated.fromEither(e)
        }
      } else
        Validated.validNel(Nil)

    (channelsV, fileChannelsV).mapN {
      (channels, fileChannels) =>
        SharedChannelParams(
          (channels ++ fileChannels ++ defaultChannels ++ contribChannels).distinct
        )
    }
  }
} 
Example 178
Source File: IOStreamOps.scala    From scala-server-lambda   with MIT License 5 votes vote down vote up
package io.github.howardjohn.lambda

import java.io.{InputStream, OutputStream}
import java.nio.charset.StandardCharsets

import scala.io.Source

object StreamOps {
  implicit class InputStreamOps(val is: InputStream) extends AnyVal {
    def consume(): String = {
      val contents = Source.fromInputStream(is).mkString
      is.close()
      contents
    }
  }

  implicit class OutputStreamOps(val os: OutputStream) extends AnyVal {
    def writeAndClose(contents: String): Unit = {
      os.write(contents.getBytes(StandardCharsets.UTF_8))
      os.close()
    }
  }
} 
Example 179
Source File: DotRenderer.scala    From reftree   with GNU General Public License v3.0 5 votes vote down vote up
package reftree.render

import reftree.dot.Graph

import java.io.StringWriter
import java.nio.charset.StandardCharsets
import java.nio.file.Path

import scala.sys.process.{Process, BasicIO}

object DotRenderer {
  case class RenderingException(message: String) extends Exception(message)

  def render(
    graph: Graph, output: Path, options: RenderingOptions, format: String
  ): Unit = {
    val args = Seq(
      "-K", "dot",
      "-T", format,
      s"-Gdpi=${options.density}",
      "-o", output.toString
    )
    val process = Process("dot", args)
    val error = new StringWriter
    val io = BasicIO.standard { stream ⇒
      stream.write(graph.encode.getBytes(StandardCharsets.UTF_8))
      stream.close()
    }.withError(BasicIO.processFully(error))
    (process run io).exitValue()
    if (error.toString.nonEmpty) throw RenderingException(error.toString)
    ()
  }
} 
Example 180
Source File: ExportModel.scala    From cdsw-simple-serving   with Apache License 2.0 5 votes vote down vote up
package com.cloudera.datascience.cdsw.acme

import java.nio.charset.StandardCharsets
import java.nio.file.StandardOpenOption._
import java.nio.file.{Files, Paths}
import javax.xml.transform.stream.StreamResult

import org.dmg.pmml.Application
import org.jpmml.model.JAXBUtil
import org.jpmml.sparkml.ConverterUtil

import acme.ACMEModel

object ExportModel {
  
  def main(args: Array[String]): Unit = {
    val training = ACMEData.readData()
    val pipeline = ACMEModel.buildModel()
    
    val pmml = ConverterUtil.toPMML(training.schema, pipeline)
    pmml.getHeader.setApplication(new Application("ACME Occupancy Detection"))
    
    val modelPath = Paths.get("src", "main", "resources")
    if (!Files.exists(modelPath)) {
      Files.createDirectory(modelPath)
    }
    val pmmlFile = modelPath.resolve("model.pmml")
    val writer = Files.newBufferedWriter(pmmlFile, StandardCharsets.UTF_8, WRITE, CREATE, TRUNCATE_EXISTING)
    try {
      JAXBUtil.marshalPMML(pmml, new StreamResult(writer))
    } finally {
      writer.close()
    }
  }

} 
Example 181
Source File: PMMLModelSuite.scala    From cdsw-simple-serving   with Apache License 2.0 5 votes vote down vote up
package com.cloudera.datascience.cdsw.acme

import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Paths}
import java.nio.file.StandardOpenOption._
import scala.collection.JavaConverters._

import org.apache.commons.csv.CSVFormat
import org.dmg.pmml.FieldName
import org.jpmml.evaluator.{ModelEvaluatorFactory, ProbabilityDistribution}
import org.jpmml.model.PMMLUtil
import org.scalatest.{FlatSpec, Matchers}

class PMMLModelSuite extends FlatSpec with Matchers {
  
  "model" should "be 90+% accurate" in {
    val modelPath = Paths.get("src", "main", "resources", "model.pmml")
    val stream = Files.newInputStream(modelPath, READ)
    val pmml = 
      try {
        PMMLUtil.unmarshal(stream)
      } finally {
        stream.close()
      }
    
    val evaluator = ModelEvaluatorFactory.newInstance().newModelEvaluator(pmml)
    evaluator.verify()
    
    var correct = 0
    var total = 0
    
    val testPath = Paths.get("src", "test", "resources", "datatest.csv")
    val testReader = Files.newBufferedReader(testPath, StandardCharsets.UTF_8)
    try {
      CSVFormat.RFC4180.withFirstRecordAsHeader().parse(testReader).asScala.foreach { record =>
        val inputMap = record.toMap.asScala.
          filterKeys(_ != "Occupancy").
          map { case (field, fieldValue) => (new FieldName(field), fieldValue) }.asJava
        val outputMap = evaluator.evaluate(inputMap)
        
        val expected = record.get("Occupancy").toInt
        val actual = outputMap.get(new FieldName("Occupancy")).
          asInstanceOf[ProbabilityDistribution].getResult.toString.toInt
        
        if (expected == actual) {
          correct += 1
        }
        total += 1
      }
    } finally {
      testReader.close()
    }
    
    val accuracy = correct.toDouble / total
    println(s"Accuracy: $accuracy")
    assert(accuracy >= 0.9)
  }

} 
Example 182
Source File: S3Utils.scala    From osmesa   with Apache License 2.0 5 votes vote down vote up
package osmesa.analytics

import java.nio.charset.StandardCharsets

import com.amazonaws.services.s3.AmazonS3URI
import geotrellis.spark.io.s3.S3Client

import scala.io.Source

object S3Utils {
  def readText(uri: String): String = {
    val s3Uri = new AmazonS3URI(uri)
    val is = S3Client.DEFAULT.getObject(s3Uri.getBucket, s3Uri.getKey).getObjectContent
    try {
      Source.fromInputStream(is)(StandardCharsets.UTF_8).mkString
    } finally {
      is.close()
    }
  }
} 
Example 183
Source File: CountriesTest.scala    From osmesa   with Apache License 2.0 5 votes vote down vote up
package osmesa.analytics

import org.locationtech.jts.geom.Coordinate
import geotrellis.vector._
import geotrellis.vector.io._
import org.scalatest._
import spray.json._

import geotrellis.spark.util._

class CountriesTest extends FunSuite with Matchers {
  def time[T](msg: String)(f: => T) = {
    val start = System.currentTimeMillis
    val v = f
    val end = System.currentTimeMillis
    println(s"[TIMING] ${msg}: ${java.text.NumberFormat.getIntegerInstance.format(end - start)} ms")
    v
  }

  def write(path: String, txt: String): Unit = {
    import java.nio.file.{Paths, Files}
    import java.nio.charset.StandardCharsets

    Files.write(Paths.get(path), txt.getBytes(StandardCharsets.UTF_8))
  }

  test("Generate some random points and see if they make sense") {
    val countries = Countries.all
    val rand = new scala.util.Random
    val points =
      countries.flatMap { mpf =>
        val env = mpf.geom.envelope

        for(i <- 0 until 10) yield {
          val x = env.xmin + (rand.nextDouble * env.width)
          val y = env.ymin + (rand.nextDouble * env.height)
          new Coordinate(x, y)
        }
      }

    val l = {
      // Ensure that we can serialize the Lookup.
      val x =
        time("Creating CountryLookup") { new CountryLookup() }
      val s = KryoSerializer.serialize(x)
      KryoSerializer.deserialize[CountryLookup](s)
    }

    val pcs =
      Countries.all.map { mpf =>
        (mpf.geom.prepare, mpf.data)
      }

    // Brute force lookup, without spatial index
    def bfLookup(coord: Coordinate): Option[CountryId] =
      pcs.find { case (pg, _) => pg.contains(Point(coord.x, coord.y)) }.
        map { case (_, data) => data }

    val actual =
      time("LOOKUP") {
        points.
          map { p => l.lookup(p).map { cid => PointFeature(Point(p.x, p.y), cid) } }
      }

    val expected =
      time("BRUTE FORCE LOOKUP") {
        points.
          map { p =>
            bfLookup(p).map { cid => PointFeature(Point(p.x, p.y), cid) }
          }
      }

    val nodeIndex =
      time("Creating nodeIndex") {
        SpatialIndex(points) { p => (p.x, p.y) }
      }

    val nodeIndexed =
      time("NODE INDEX LOOKUP") {
        // Another way to do the spatial index, indexing the nodes instead of the countries.
        // This turns out to be slower than the lookup for large point sets.
        val result: Vector[Option[PointFeature[CountryId]]] =
          Countries.all.
            flatMap { mpf =>
              val pg = mpf.geom.prepare
              nodeIndex.traversePointsInExtent(mpf.geom.envelope).
                map { p =>
                  if(pg.covers(p)) { Some(PointFeature(Point(p.x, p.y), mpf.data)) }
                  else { None }
                }
            }
        result
      }

    actual.flatten.length should be (expected.flatten.length)
    actual.flatten.length should be (nodeIndexed.flatten.length)
  }
} 
Example 184
Source File: Base64.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.infra

import java.nio.charset.StandardCharsets

object Base64 {
  private val encoder = java.util.Base64.getEncoder
  private val decoder = java.util.Base64.getDecoder

  def encodeString(s: String): String = {
    val bytes = s.getBytes(StandardCharsets.UTF_8)
    encoder.encodeToString(bytes)
  }

  def decodeString(s: String): String = {
    val bytes = decoder.decode(s)
    new String(bytes, StandardCharsets.UTF_8)
  }
} 
Example 185
Source File: ScriptImpl.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.nashorn

import java.nio.charset.StandardCharsets

import com.programmaticallyspeaking.ncd.host.{Script, ScriptVersion}
import com.programmaticallyspeaking.ncd.infra.{Hasher, ScriptURL}

import scala.collection.mutable
import scala.collection.mutable.ListBuffer

class ScriptImpl(val url: ScriptURL, scriptData: Array[Byte], val id: String, val version: ScriptVersion) extends Script {
  import ScriptImpl._

  val contents = new String(scriptData, UTF8)

  val lines: Seq[String] = contents.split("\r?\n")

  val lineCount = lines.length
  val lastLineLength = lines.lastOption.map(_.length).getOrElse(0)

  private var cachedHash: String = _
  private object hashLock
  override def contentsHash(): String = {
    if (cachedHash == null) {
      hashLock.synchronized {
        if (cachedHash == null) {
          cachedHash = Hasher.md5(scriptData)
        }
      }
    }
    cachedHash
  }

  private val sourceMapUrlBegin = "//# sourceMappingURL="
  private val sourceUrlBegin = "//# sourceURL="

  private var _sourceMapUrl: Option[ScriptURL] = None
  private var _sourceUrl: Option[ScriptURL] = None

  lines.foreach { line =>
    if (line.startsWith(sourceMapUrlBegin)) {
      _sourceMapUrl = Some(line.substring(sourceMapUrlBegin.length)).map(url.resolve)
    } else if (line.startsWith(sourceUrlBegin)) {
      _sourceUrl = Some(line.substring(sourceUrlBegin.length)).map(url.resolve)
    }
  }

  override def sourceMapUrl(): Option[ScriptURL] = _sourceMapUrl
  override def sourceUrl(): Option[ScriptURL] = _sourceUrl

  override def sourceLine(lineNumber1Based: Int): Option[String] = {
    lines.lift(lineNumber1Based - 1)
  }

  override def toString: String = {
    val str = url.toString
    if (str == "") s"[id=$id]" else str
  }
}

object ScriptImpl {

  private val UTF8 = StandardCharsets.UTF_8

  def fromSource(url: ScriptURL, source: String, id: String, version: ScriptVersion): Script = {
    val bytes = source.getBytes(UTF8)
    new ScriptImpl(url, bytes, id, version)
  }
} 
Example 186
Source File: HasherTest.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.infra

import java.nio.charset.StandardCharsets

import com.programmaticallyspeaking.ncd.testing.AsyncUnitTest

import scala.concurrent.Future

class HasherTest extends AsyncUnitTest {

  "Hasher.md5" - {
    "hashes a known value" in {
      val bytes = "foobar".getBytes(StandardCharsets.US_ASCII)
      Hasher.md5(bytes) should be ("3858F62230AC3C915F300C664312C63F")
    }

    "hashes multiple times" in {
      "barbar".getBytes(StandardCharsets.US_ASCII)
      val bytes = "barbar".getBytes(StandardCharsets.US_ASCII)
      Hasher.md5(bytes) should be ("5426824942DB4253F87A1009FD5D2D4F")
    }

    "is thread safe" in {
      val bytes = "foobar".getBytes(StandardCharsets.US_ASCII)
      val fs = (1 to 50).map { i => Future { Hasher.md5(bytes) } }
      val all = Future.sequence(fs)
      whenReady(all) { results =>
        results.distinct should be (Seq("3858F62230AC3C915F300C664312C63F"))
      }
    }
  }
} 
Example 187
Source File: ScriptPublisherTest.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.nashorn

import java.nio.charset.StandardCharsets

import com.programmaticallyspeaking.ncd.host.{ScriptAdded, ScriptEvent, ScriptVersion}
import com.programmaticallyspeaking.ncd.infra.ScriptURL
import com.programmaticallyspeaking.ncd.nashorn.NashornDebuggerHost.InternalScriptAdded
import com.programmaticallyspeaking.ncd.testing.UnitTest

import scala.collection.mutable.ListBuffer
import scala.reflect.ClassTag

class ScriptPublisherTest extends UnitTest {

  def sut(collectTo: ListBuffer[ScriptEvent]) = new ScriptPublisher(new CollectingEmitter(collectTo))

  def isScriptEvent(pf: PartialFunction[ScriptEvent, Boolean]): ScriptEvent => Boolean =
    e => pf.applyOrElse(e, {
      _: ScriptEvent => false
    })

  "ScriptPublisher" - {
    "with a script" - {
      val script = testScript("a")

      "publishes it as ScriptAdded to the event emitter" in {
        val target = ListBuffer[ScriptEvent]()
        sut(target).publish(script)
        target should contain (ScriptAdded(script))
      }

      "publishes it as InternalScriptAdded to the event emitter" in {
        val target = ListBuffer[ScriptEvent]()
        sut(target).publish(script)
        target should contain (InternalScriptAdded(script))
      }

      "publishes InternalScriptAdded _before_ ScriptAdded to ensure internal functions run before the domain actors" in {
        val target = ListBuffer[ScriptEvent]()
        sut(target).publish(script)
        val idxInternal = target.indexWhere(isScriptEvent { case InternalScriptAdded(s) if s.id == script.id => true })
        val idxExternal = target.indexWhere(isScriptEvent { case ScriptAdded(s) if s.id == script.id => true })
        idxInternal should be < (idxExternal)
      }
    }

    "with a script that contains the marker for suppressed publishing" - {
      val script = testScript("a", s"42")

      "does NOT publish it as ScriptAdded to the event emitter" in {
        val target = ListBuffer[ScriptEvent]()
        sut(target).publish(script)
        target should not contain (ScriptAdded(script))
      }

      "DOES publish it as InternalScriptAdded to the event emitter" in {
        val target = ListBuffer[ScriptEvent]()
        sut(target).publish(script)
        target should contain (InternalScriptAdded(script))
      }
    }

    "publishes multiple scripts" in {
      val target = ListBuffer[ScriptEvent]()
      val publisher = sut(target)
      publisher.publish(testScript("a"))
      publisher.publish(testScript("b"))
      countOfType[ScriptAdded](target) should be (2)
    }

    "doesn't republish a script with the same ID" in {
      val target = ListBuffer[ScriptEvent]()
      val publisher = sut(target)
      publisher.publish(testScript("a"))
      publisher.publish(testScript("a"))
      countOfType[ScriptAdded](target) should be (1)
    }
  }

  private def countOfType[A <: ScriptEvent : ClassTag](list: Seq[ScriptEvent]): Int = {
    val clazz = implicitly[ClassTag[A]].runtimeClass
    list.count(clazz.isInstance)
  }

  class CollectingEmitter(collectTo: ListBuffer[ScriptEvent]) extends ScriptEventEmitter {
    override def emit(event: ScriptEvent): Unit = collectTo += event
  }

  def testScript(id: String, contents: String = "") = {
    val data = contents.getBytes(StandardCharsets.UTF_8)
    new ScriptImpl(ScriptURL.create(""), data, id, ScriptVersion(1, true))
  }
} 
Example 188
Source File: MemoryAppender.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.testing

import ch.qos.logback.classic.spi.ILoggingEvent
import ch.qos.logback.core.UnsynchronizedAppenderBase
import ch.qos.logback.core.encoder.Encoder
import ch.qos.logback.core.status.ErrorStatus
import java.io.{ByteArrayOutputStream, IOException, OutputStream}
import java.nio.charset.StandardCharsets

import com.programmaticallyspeaking.ncd.messaging.{Observable, SerializedSubject}

object MemoryAppender {
  private[MemoryAppender] val logEventSubject = new SerializedSubject[String]

  def logEvents: Observable[String] = logEventSubject
}

class MemoryAppender extends UnsynchronizedAppenderBase[ILoggingEvent] {
  import MemoryAppender._
  private var encoder: Encoder[ILoggingEvent] = _
  private var outputStream = new OutputStream {
    override def write(b: Int): Unit = ???

    override def write(b: Array[Byte]): Unit = {
      val str = new String(b, StandardCharsets.UTF_8)
      logEventSubject.onNext(str)
    }
  }

  override def start(): Unit = {
    try {
      Option(encoder).foreach(_.init(outputStream))
      super.start()
    } catch {
      case e: IOException =>
        started = false
        addStatus(new ErrorStatus("Failed to initialize encoder for appender named [" + name + "].", this, e))
    }
  }

  override protected def append(event: ILoggingEvent): Unit = {
    if (!isStarted) return
    try {
      event.prepareForDeferredProcessing()
      Option(encoder).foreach(_.doEncode(event))
    } catch {
      case ioe: IOException =>
        started = false
        addStatus(new ErrorStatus("IO failure in appender", this, ioe))
    }
  }

  def setEncoder(e: Encoder[ILoggingEvent]): Unit = {
    encoder = e
  }
} 
Example 189
Source File: VowpalWabbitMurmurWithPrefix.scala    From mmlspark   with MIT License 5 votes vote down vote up
// Copyright (C) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in project root for information.

package com.microsoft.ml.spark.vw

import org.vowpalwabbit.spark.VowpalWabbitMurmur
import java.nio.charset.StandardCharsets


class VowpalWabbitMurmurWithPrefix(val prefix: String, val maxSize: Int = 2 * 1024) extends Serializable {
  // worst case is 4 bytes per character
  val ys: Array[Byte] = new Array(maxSize * 4)

  val ysStart: Int = {
      // pre-populate the string with the prefix - we could go so-far as keep the intermediate hash state :)
      val prefixBytes = prefix.getBytes(StandardCharsets.UTF_8)
      Array.copy(prefixBytes, 0, ys, 0, prefixBytes.length)

      prefixBytes.length
    }

  def hash(str: String, namespaceHash: Int): Int =
    hash(str, 0, str.length, namespaceHash)

  def hash(str: String, start: Int, end: Int, namespaceHash: Int): Int = {
    if (end - start > maxSize)
      VowpalWabbitMurmur.hash(prefix + str.substring(start, end), namespaceHash)
    else {
      // adapted from https://stackoverflow.com/questions/5513144/converting-char-to-byte/20604909#20604909
      // copy sub part
      var i = start
      var j = ysStart // i for chars; j for bytes
      while (i < end) { // fill ys with bytes
        val c = str.charAt(i)
        if (c < 0x80) {
          ys(j) = c.toByte
          i = i + 1
          j = j + 1
        } else if (c < 0x800) {
          ys(j) = (0xc0 | (c >> 6)).toByte
          ys(j + 1) = (0x80 | (c & 0x3f)).toByte
          i = i + 1
          j = j + 2
        } else if (Character.isHighSurrogate(c)) {
          if (end - i < 2) throw new Exception("overflow") // this is not reachable due to maxSize * 4, so just in case
          val d = str.charAt(i + 1)
          val uc: Int =
            if (Character.isLowSurrogate(d))
              Character.toCodePoint(c, d)
            else
              throw new Exception("malformed")

          ys(j) = (0xf0 | ((uc >> 18))).toByte
          ys(j + 1) = (0x80 | ((uc >> 12) & 0x3f)).toByte
          ys(j + 2) = (0x80 | ((uc >> 6) & 0x3f)).toByte
          ys(j + 3) = (0x80 | (uc & 0x3f)).toByte
          i = i + 2 // 2 chars
          j = j + 4
        } else if (Character.isLowSurrogate(c)) {
          throw new Exception("malformed")
        } else {
          ys(j) = (0xe0 | (c >> 12)).toByte
          ys(j + 1) = (0x80 | ((c >> 6) & 0x3f)).toByte
          ys(j + 2) = (0x80 | (c & 0x3f)).toByte
          i = i + 1
          j = j + 3
        }
      }

      VowpalWabbitMurmur.hash(ys, 0, j, namespaceHash)
    }
  }
} 
Example 190
Source File: VerifyVowpalWabbitMurmurWithPrefix.scala    From mmlspark   with MIT License 5 votes vote down vote up
// Copyright (C) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in project root for information.

package com.microsoft.ml.spark.vw

import org.vowpalwabbit.spark.VowpalWabbitMurmur
import java.nio.charset.StandardCharsets

import com.microsoft.ml.spark.core.test.base.TestBase

class VerifyVowpalWabbitMurmurWithPrefix extends TestBase {

  case class Sample1(val str: String, val seq: Seq[String])

  test("Verify VowpalWabbitMurmurWithPrefix-based hash produces same results") {
    val prefix = "Markus"

    val fastStringHash = new VowpalWabbitMurmurWithPrefix(prefix)

    var time1: Long = 0
    var time2: Long = 0
    var time3: Long = 0

    for (j <- 0 until 1024) {
      val sb = new StringBuilder

      for (i <- 0 until 128) {
        sb.append(i)

        val str = sb.toString

        // prefix caching + manual UTF-8 conversion with byte array re-usage
        var start = System.nanoTime()
        val h1 = fastStringHash.hash(str, 0, str.length, 0)
        time1 += System.nanoTime() - start

        // allocation of new array for Java char to UTF-8 bytes
        start = System.nanoTime()
        val h2 = VowpalWabbitMurmur.hash(prefix + str, 0)
        time2 += System.nanoTime() - start

        //
        start = System.nanoTime()
        val bytes = (prefix + str).getBytes(StandardCharsets.UTF_8)
        val h3 = VowpalWabbitMurmur.hashNative(bytes, 0, bytes.length, 0)
        time3 += System.nanoTime() - start

        assert(h1 == h2)
        assert(h1 == h3)
      }
    }

    println(s"FastStringHashing:   $time1")
    println(s"Java String to UTF8: $time2")
    println(s"Java to C++:         $time3")
  }

  test("Verify VowpalWabbitMurmurWithPrefix verify max-size exceed") {
    val fastStringHash = new VowpalWabbitMurmurWithPrefix("a", 2)

    val longStr = (1 to 32).mkString("_")

    assert(fastStringHash.hash(longStr, 0) == VowpalWabbitMurmur.hash("a" + longStr, 0))
  }

  def verifyHashesAreTheSame(): Unit = {
    Seq("\u0900def", "\ud800\udc00def").foreach { unicodeString =>
      test(s"Verify VowpalWabbitMurmurWithPrefix verify unicode $unicodeString") {
        val fastStringHash = new VowpalWabbitMurmurWithPrefix("abc")

        assert(fastStringHash.hash(unicodeString, 0) == VowpalWabbitMurmur.hash("abc" + unicodeString, 0))
      }
    }
  }

  test("VowpalWabbitMurmurWithPrefix invalid unicode string") {
    assertThrows[Exception] {
      new VowpalWabbitMurmurWithPrefix("abc").hash("\ud800def", 0)
    }
  }
} 
Example 191
Source File: KafkaJsonSerializer.scala    From ticket-booking-aecor   with Apache License 2.0 5 votes vote down vote up
package ru.pavkin.payment.kafka
import java.nio.charset.StandardCharsets
import java.util

import io.circe.parser._
import io.circe.Encoder
import org.apache.kafka.common.serialization.{ Deserializer, Serializer, StringSerializer }
import ru.pavkin.payment.event.PaymentReceived

class PaymentReceivedEventSerializer extends Serializer[PaymentReceived] {
  private val stringSerializer = new StringSerializer

  def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()

  def serialize(topic: String, data: PaymentReceived): Array[Byte] =
    stringSerializer.serialize(topic, Encoder[PaymentReceived].apply(data).noSpaces)

  def close(): Unit = ()
}

class PaymentReceivedEventDeserializer extends Deserializer[PaymentReceived] {
  def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()

  def close(): Unit = ()

  def deserialize(topic: String, data: Array[Byte]): PaymentReceived =
    if (data ne null)
      decode[PaymentReceived](new String(data, StandardCharsets.UTF_8)).fold(throw _, identity)
    else null

} 
Example 192
Source File: LoggerOutputStream.scala    From hail   with MIT License 5 votes vote down vote up
package is.hail.utils

import java.io.{ByteArrayOutputStream, OutputStream}
import java.nio.charset.StandardCharsets

import org.apache.log4j.{Level, Logger}

class LoggerOutputStream(logger: Logger, level: Level) extends OutputStream {
  private val buffer = new ByteArrayOutputStream()

  override def write(b: Int) {
    buffer.write(b)
    if (b == '\n') {
      val line = buffer.toString(StandardCharsets.UTF_8.name())
      level match {
        case Level.TRACE => logger.trace(line)
        case Level.DEBUG => logger.debug(line)
        case Level.INFO  => logger.info(line)
        case Level.WARN  => logger.warn(line)
        case Level.ERROR => logger.error(line)
      }
      buffer.reset()
    }
  }
} 
Example 193
Source File: HTTPClient.scala    From hail   with MIT License 5 votes vote down vote up
package is.hail.utils

import java.net.URL
import java.io.OutputStream
import java.io.InputStream
import java.net.HttpURLConnection
import is.hail.utils._
import java.nio.charset.StandardCharsets
import org.apache.commons.io.output.ByteArrayOutputStream


object HTTPClient {
  def post[T](
    url: String,
    contentLength: Int,
    writeBody: OutputStream => Unit,
    readResponse: InputStream => T = (_: InputStream) => (),
    chunkSize: Int = 0
  ): T = {
    val conn = new URL(url).openConnection().asInstanceOf[HttpURLConnection]
    conn.setRequestMethod("POST")
    if (chunkSize > 0)
      conn.setChunkedStreamingMode(chunkSize)
    conn.setDoOutput(true);
    conn.setRequestProperty("Content-Length", Integer.toString(contentLength))
    using(conn.getOutputStream())(writeBody)
    assert(200 <= conn.getResponseCode() && conn.getResponseCode() < 300,
      s"POST ${url} ${conn.getResponseCode()} ${using(conn.getErrorStream())(fullyReadInputStreamAsString)}")
    val result = using(conn.getInputStream())(readResponse)
    conn.disconnect()
    result
  }

  def get[T](
    url: String,
    readResponse: InputStream => T
  ): T = {
    val conn = new URL(url).openConnection().asInstanceOf[HttpURLConnection]
    conn.setRequestMethod("GET")
    assert(200 <= conn.getResponseCode() && conn.getResponseCode() < 300,
      s"GET ${url} ${conn.getResponseCode()} ${using(conn.getErrorStream())(fullyReadInputStreamAsString)}")
    val result = using(conn.getInputStream())(readResponse)
    conn.disconnect()
    result
  }

  def delete(
    url: String,
    readResponse: InputStream => Unit = (_: InputStream) => ()
  ): Unit = {
    val conn = new URL(url).openConnection().asInstanceOf[HttpURLConnection]
    conn.setRequestMethod("DELETE")
    assert(200 <= conn.getResponseCode() && conn.getResponseCode() < 300,
      s"DELETE ${url} ${conn.getResponseCode()} ${using(conn.getErrorStream())(fullyReadInputStreamAsString)}")
    val result = using(conn.getInputStream())(readResponse)
    conn.disconnect()
    result
  }

  private[this] def fullyReadInputStreamAsString(is: InputStream): String =
    using(new ByteArrayOutputStream()) { baos =>
      drainInputStreamToOutputStream(is, baos)
      new String(baos.toByteArray(), StandardCharsets.UTF_8)
    }
} 
Example 194
Source File: FileInfotonTests.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.it

import java.nio.charset.StandardCharsets

import com.typesafe.scalalogging.LazyLogging
import org.scalatest.{AsyncFunSpec, Matchers, TryValues}
import play.api.libs.json._

import scala.concurrent.duration.DurationInt
import scala.io.Source

class FileInfotonTests extends AsyncFunSpec with Matchers with TryValues with Helpers with LazyLogging {
  describe("file infoton") {
    val path = cmt / "InfoFile4"
    val fileStr = Source.fromURL(this.getClass.getResource("/article.txt")).mkString
    val j = Json.obj("Offcourse" -> Seq("I can do it"),"I'm" -> Seq("a spellbinder"))

    val f0 = Http.post(path, fileStr, Some("text/plain;charset=UTF-8"), Nil, ("X-CM-WELL-TYPE" -> "FILE") :: tokenHeader).map { res =>
      withClue(res){
        Json.parse(res.payload) should be(jsonSuccess)
      }
    }
    val f1 = f0.flatMap {_ => spinCheck(100.millis, true)(Http.get(path)){res =>
      new String(res.payload, StandardCharsets.UTF_8) == fileStr && res.contentType.takeWhile(_ != ';') == "text/plain"}
      .map { res =>
        withClue(res) {
          new String(res.payload, StandardCharsets.UTF_8) should be(fileStr)
          res.contentType.takeWhile(_ != ';') should be("text/plain")
        }
      }}
    val f2 = f1.flatMap(_ => Http.post(path, Json.stringify(j), None, Nil, ("X-CM-WELL-TYPE" -> "FILE_MD") :: tokenHeader)).map {res =>
      withClue(res) {
        Json.parse(res.payload) should be(jsonSuccess)
      }
    }
    val f3 = f2.flatMap(_ => spinCheck(100.millis, true)(Http.get(path, List("format" -> "json"))){
      res =>
        val jsonResult = Json.parse(res.payload).transform(fieldsSorter andThen (__ \ 'fields).json.pick)
        jsonResult match {
          case JsSuccess(value, _) => value == j
          case JsError(_) => false
        }
    }.map{ res =>
        withClue(res) {
          Json
            .parse(res.payload)
            .transform(fieldsSorter andThen (__ \ 'fields).json.pick)
            .get shouldEqual j
        }
      }
    )
    val f4 = f3.flatMap(_ => Http.delete(uri = path, headers = tokenHeader).map { res =>
       withClue(res) {
         Json.parse(res.payload) should be(jsonSuccess)
       }
    })
    val lenna = cmt / "lenna"
    val f5 = {
      val lennaInputStream = this.getClass.getResource("/Lenna.png").openStream()
      Http.post(lenna / "Lenna.png", () => lennaInputStream, Some("image/png"), Nil, ("X-CM-WELL-TYPE" -> "FILE") :: tokenHeader).transform { res =>
        // first, close the stream
        lennaInputStream.close()
        withClue(res)(res.map { r =>
          Json.parse(r.payload) should be(jsonSuccess)
        })
      }
    }
    val f6 = spinCheck(100.millis,true,1.minute)(Http.get(lenna,List("op" -> "search","qp" -> "content.mimeType:image/png", "format" -> "json"))){ res =>
        res.status match {
          case 503 => Recoverable
          case 200 => {
            val j = Json.parse(res.payload) \ "results"
            (j \ "total": @unchecked) match {
              case JsDefined(JsNumber(n)) => n.intValue == 1
            }
          }
          case _ => UnRecoverable
        }
      }.map { res =>
      withClue(res) {
        val j = Json.parse(res.payload) \ "results"
        (j \ "infotons": @unchecked) match {
          case JsDefined(JsArray(arr)) => (arr.head \ "system" \ "path": @unchecked) match {
            case JsDefined(JsString(lennaPath)) =>
              lennaPath shouldEqual "/cmt/cm/test/lenna/Lenna.png"
          }
        }
      }
    }

    it("should put File infoton")(f0)
    it("should get previously inserted file with text/plain mimetype")(f1)
    it("should put file infoton metadata")(f2)
    it("should get file infoton metadata")(f3)
    it("should delete file infoton")(f4)
    it("should upload Lenna.png image")(f5)
    it("should search by content.mimeType")(f6)
  }
} 
Example 195
Source File: OffsetsService.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.common

import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets

import scala.concurrent.{Await, Future}
import scala.concurrent.duration._
import cmwell.zstore.ZStore


trait OffsetsService {

  def read(id: String): Option[Long]
  def readWithTimestamp(id: String): Option[PersistedOffset]
  def writeAsync(id: String, offset: Long): Future[Unit]
}

case class PersistedOffset(offset: Long, timestamp: Long)

class ZStoreOffsetsService(zStore: ZStore) extends OffsetsService {

  override def readWithTimestamp(id: String): Option[PersistedOffset] =
    Await.result(zStore.getOpt(id, dontRetry = true), 10.seconds).map { payload =>
      //todo: this is a check to allow backward compatibility until all clusters` persisted offsets will contain also timestamp
      if (payload.length == 8)
        PersistedOffset(ByteBuffer.wrap(payload).getLong, -1)
      else {
        val s = new String(payload, StandardCharsets.UTF_8)
        val (offset, timestamp) = cmwell.util.string.splitAtNoSep(s, ',')
        PersistedOffset(offset.toLong, timestamp.toLong)
      }
    }

  override def read(id: String): Option[Long] =
    Await.result(zStore.getOpt(id, dontRetry = true), 10.seconds).map { payload =>
      //todo: this is a check to allow backward compatibility until all clusters` persisted offsets will contain also timestamp
      if (payload.length == 8)
        ByteBuffer.wrap(payload).getLong
      else {
        val s = new String(payload, StandardCharsets.UTF_8)
        s.substring(0, s.indexOf(',')).toLong
      }
    }
//    Await.result(zStore.getStringOpt(id), 10.seconds).map(s => s.substring(0, s.indexOf(',')).toLong)
//    Await.result(zStore.getLongOpt(id), 10.seconds)

  override def writeAsync(id: String, offset: Long): Future[Unit] =
    zStore.putString(id, s"$offset,${System.currentTimeMillis}", batched = true)
} 
Example 196
Source File: GrpcGatewayHandler.scala    From grpcgateway   with MIT License 5 votes vote down vote up
package grpcgateway.handlers

import java.nio.charset.StandardCharsets

import scalapb.GeneratedMessage
import scalapb.json4s.JsonFormat
import io.grpc.ManagedChannel
import io.netty.channel.ChannelHandler.Sharable
import io.netty.channel.{ ChannelFutureListener, ChannelHandlerContext, ChannelInboundHandlerAdapter }
import io.netty.handler.codec.http._

import scala.concurrent.{ ExecutionContext, Future }

@Sharable
abstract class GrpcGatewayHandler(channel: ManagedChannel)(implicit ec: ExecutionContext) extends ChannelInboundHandlerAdapter {

  def name: String

  def shutdown(): Unit =
    if (!channel.isShutdown) channel.shutdown()

  def supportsCall(method: HttpMethod, uri: String): Boolean
  def unaryCall(method: HttpMethod, uri: String, body: String): Future[GeneratedMessage]

  override def channelRead(ctx: ChannelHandlerContext, msg: scala.Any): Unit = {

    msg match {
      case req: FullHttpRequest =>

        if (supportsCall(req.method(), req.uri())) {

          val body = req.content().toString(StandardCharsets.UTF_8)

          unaryCall(req.method(), req.uri(), body)
            .map(JsonFormat.toJsonString)
            .map(json => {
              buildFullHttpResponse(
                requestMsg = req,
                responseBody = json,
                responseStatus = HttpResponseStatus.OK,
                responseContentType = "application/json"
              )
            })
            .recover({ case err =>

              val (body, status) = err match {
                case e: GatewayException => e.details -> GRPC_HTTP_CODE_MAP.getOrElse(e.code, HttpResponseStatus.INTERNAL_SERVER_ERROR)
                case _ => "Internal error" -> HttpResponseStatus.INTERNAL_SERVER_ERROR
              }

              buildFullHttpResponse(
                requestMsg = req,
                responseBody = body,
                responseStatus = status,
                responseContentType = "application/text"
              )
            }).foreach(resp => {
              ctx.writeAndFlush(resp).addListener(ChannelFutureListener.CLOSE)
            })

        } else {
          super.channelRead(ctx, msg)
        }
      case _ => super.channelRead(ctx, msg)
    }
  }
} 
Example 197
Source File: package.scala    From grpcgateway   with MIT License 5 votes vote down vote up
package grpcgateway

import java.nio.charset.StandardCharsets

import scalapb.json4s.JsonFormatException
import io.grpc.Status.Code
import io.netty.buffer.Unpooled
import io.netty.handler.codec.http._

import scala.util.{Failure, Try}
import handlers._

package object handlers {

  val GRPC_HTTP_CODE_MAP: Map[Int, HttpResponseStatus] = Map(
    Code.OK.value()                  -> HttpResponseStatus.OK,
    Code.CANCELLED.value()           -> HttpResponseStatus.GONE,
    Code.UNKNOWN.value()             -> HttpResponseStatus.NOT_FOUND,
    Code.INVALID_ARGUMENT.value()    -> HttpResponseStatus.BAD_REQUEST,
    Code.DEADLINE_EXCEEDED.value()   -> HttpResponseStatus.GATEWAY_TIMEOUT,
    Code.NOT_FOUND.value()           -> HttpResponseStatus.NOT_FOUND,
    Code.ALREADY_EXISTS.value()      -> HttpResponseStatus.CONFLICT,
    Code.PERMISSION_DENIED.value()   -> HttpResponseStatus.FORBIDDEN,
    Code.RESOURCE_EXHAUSTED.value()  -> HttpResponseStatus.INSUFFICIENT_STORAGE,
    Code.FAILED_PRECONDITION.value() -> HttpResponseStatus.PRECONDITION_FAILED,
    Code.ABORTED.value()             -> HttpResponseStatus.GONE,
    Code.OUT_OF_RANGE.value()        -> HttpResponseStatus.BAD_REQUEST,
    Code.UNIMPLEMENTED.value()       -> HttpResponseStatus.NOT_IMPLEMENTED,
    Code.INTERNAL.value()            -> HttpResponseStatus.INTERNAL_SERVER_ERROR,
    Code.UNAVAILABLE.value()         -> HttpResponseStatus.NOT_ACCEPTABLE,
    Code.DATA_LOSS.value()           -> HttpResponseStatus.PARTIAL_CONTENT,
    Code.UNAUTHENTICATED.value()     -> HttpResponseStatus.UNAUTHORIZED
  )

  def buildFullHttpResponse(
   requestMsg: HttpMessage,
   responseBody: String,
   responseStatus: HttpResponseStatus,
   responseContentType: String
  ): FullHttpResponse = {

    val buf = Unpooled.copiedBuffer(responseBody, StandardCharsets.UTF_8)

    val res = new DefaultFullHttpResponse(
      requestMsg.protocolVersion(),
      responseStatus,
      buf
    )

    res.headers().set(HttpHeaderNames.CONTENT_TYPE, responseContentType)

    HttpUtil.setContentLength(res, buf.readableBytes)
    HttpUtil.setKeepAlive(res, HttpUtil.isKeepAlive(requestMsg))

    res

  }

  def jsonException2GatewayExceptionPF[U]: PartialFunction[Throwable, Try[U]] = {
    case _: NoSuchElementException => Failure(InvalidArgument("Wrong json input. Check proto file"))
    case err: JsonFormatException => Failure(InvalidArgument("Wrong json syntax: " + err.msg))
    case err => Failure(InvalidArgument("Wrong json input. Check proto file. Details: " + err.getMessage))
  }

} 
Example 198
Source File: Json4sSerialization.scala    From kafka-serialization   with Apache License 2.0 5 votes vote down vote up
package com.ovoenergy.kafka.serialization.json4s

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, InputStreamReader, OutputStreamWriter}
import java.nio.charset.StandardCharsets

import com.ovoenergy.kafka.serialization.core._
import org.apache.kafka.common.serialization.{Deserializer => KafkaDeserializer, Serializer => KafkaSerializer}
import org.json4s.Formats
import org.json4s.native.Serialization.{read, write}

import scala.reflect.ClassTag
import scala.reflect.runtime.universe._

trait Json4sSerialization {

  def json4sSerializer[T <: AnyRef](implicit jsonFormats: Formats): KafkaSerializer[T] = serializer { (_, data) =>
    val bout = new ByteArrayOutputStream()
    val writer = new OutputStreamWriter(bout, StandardCharsets.UTF_8)

    // TODO Use scala-arm
    try {
      write(data, writer)
      writer.flush()
    } finally {
      writer.close()
    }
    bout.toByteArray
  }

  def json4sDeserializer[T: TypeTag](implicit jsonFormats: Formats): KafkaDeserializer[T] = deserializer { (_, data) =>
    val tt = implicitly[TypeTag[T]]
    implicit val cl = ClassTag[T](tt.mirror.runtimeClass(tt.tpe))
    read[T](new InputStreamReader(new ByteArrayInputStream(data), StandardCharsets.UTF_8))
  }

} 
Example 199
Source File: SpraySerialization.scala    From kafka-serialization   with Apache License 2.0 5 votes vote down vote up
package com.ovoenergy.kafka.serialization.spray

import java.io.{ByteArrayOutputStream, OutputStreamWriter}
import java.nio.charset.StandardCharsets

import org.apache.kafka.common.serialization.{Deserializer => KafkaDeserializer, Serializer => KafkaSerializer}
import spray.json._
import com.ovoenergy.kafka.serialization.core._

trait SpraySerialization {

  def spraySerializer[T](implicit format: JsonWriter[T]): KafkaSerializer[T] = serializer { (_, data) =>
    val bout = new ByteArrayOutputStream()
    val osw = new OutputStreamWriter(bout, StandardCharsets.UTF_8)

    // TODO use scala-arm
    try {
      osw.write(data.toJson.compactPrint)
      osw.flush()
    } finally {
      osw.close()
    }
    bout.toByteArray
  }

  def sprayDeserializer[T](implicit format: JsonReader[T]): KafkaDeserializer[T] = deserializer { (_, data) =>
    JsonParser(ParserInput(data)).convertTo[T]
  }

} 
Example 200
Source File: CirceSerialization.scala    From kafka-serialization   with Apache License 2.0 5 votes vote down vote up
package com.ovoenergy.kafka.serialization.circe

import java.nio.charset.StandardCharsets

import cats.syntax.either._
import com.ovoenergy.kafka.serialization.core._
import io.circe.parser._
import io.circe.syntax._
import io.circe.{Decoder, Encoder, Error, Json}
import org.apache.kafka.common.serialization.{Deserializer => KafkaDeserializer, Serializer => KafkaSerializer}

private[circe] trait CirceSerialization {

  def circeJsonSerializer[T: Encoder]: KafkaSerializer[T] = serializer { (_, data) =>
    data.asJson.noSpaces.getBytes(StandardCharsets.UTF_8)
  }

  def circeJsonDeserializer[T: Decoder]: KafkaDeserializer[T] = deserializer { (_, data) =>
    (for {
      json <- parse(new String(data, StandardCharsets.UTF_8)): Either[Error, Json]
      t <- json.as[T]: Either[Error, T]
    } yield
      t).fold(error => throw new RuntimeException(s"Deserialization failure: ${error.getMessage}", error), identity _)
  }

}