com.google.common.base.Charsets Scala Examples

The following examples show how to use com.google.common.base.Charsets. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: Schemas.scala    From ratatool   with Apache License 2.0 5 votes vote down vote up
package com.spotify.ratatool

import com.google.api.client.json.JsonObjectParser
import com.google.api.client.json.jackson2.JacksonFactory
import com.google.api.services.bigquery.model.TableSchema
import com.google.common.base.Charsets
import org.apache.avro.Schema

object Schemas {

  val avroSchema: Schema =
    new Schema.Parser().parse(this.getClass.getResourceAsStream("/schema.avsc"))
  val simpleAvroSchema: Schema =
    new Schema.Parser().parse(this.getClass.getResourceAsStream("/SimpleRecord.avsc"))
  val evolvedSimpleAvroSchema: Schema =
    new Schema.Parser().parse(this.getClass.getResourceAsStream("/EvolvedSimpleRecord.avsc"))

  val simpleAvroByteFieldSchema: Schema =
    new Schema.Parser().parse(this.getClass.getResourceAsStream("/SimpleByteFieldRecord.avsc"))

  val tableSchema: TableSchema = new JsonObjectParser(new JacksonFactory)
    .parseAndClose(
      this.getClass.getResourceAsStream("/schema.json"),
      Charsets.UTF_8,
      classOf[TableSchema])

} 
Example 2
Source File: SpringConfigController.scala    From izanami   with Apache License 2.0 5 votes vote down vote up
package controllers

import java.security.MessageDigest

import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import com.google.common.base.Charsets
import controllers.actions.SecuredAuthContext
import domains.Key
import domains.config.{ConfigContext, ConfigService}
import env.IzanamiConfig
import org.apache.commons.codec.binary.Hex
import libs.logs.IzanamiLogger
import play.api.libs.json._
import play.api.mvc._
import libs.http.HttpContext
import libs.http.HttpContext

class SpringConfigController(izanamiConfig: IzanamiConfig,
                             AuthAction: ActionBuilder[SecuredAuthContext, AnyContent],
                             val cc: ControllerComponents)(implicit system: ActorSystem, R: HttpContext[ConfigContext])
    extends AbstractController(cc) {

  import libs.http._

  val digester = MessageDigest.getInstance("SHA-256")

  def byteToHexString(bytes: Array[Byte]): String = String.valueOf(Hex.encodeHex(bytes))

  def raw(rootKey: String, appName: String, profileName: String): Action[Unit] = AuthAction.asyncZio(parse.empty) {
    ctx =>
      val appConfigKey     = Key(s"$rootKey:$appName:$profileName:spring-config")
      val profileConfigKey = Key(s"$rootKey:spring-profiles:$profileName:spring-config")
      val globalConfigKey  = Key(s"$rootKey:spring-globals:spring-config")

      val host: String = ctx.request.headers
        .get(izanamiConfig.headerHost)
        .orElse(ctx.request.headers.get("Host"))
        .getOrElse("localhost:9000")

      val result = for {
        app     <- ConfigService.getById(appConfigKey)
        profile <- ConfigService.getById(profileConfigKey)
        global  <- ConfigService.getById(globalConfigKey)
      } yield {
        (app, profile, global) match {
          case (None, None, None) => NotFound(Json.obj("error" -> "No config found !"))
          case _ => {
            val propertySources = JsArray(
              Seq(
                app
                  .map(_.value)
                  .collect { case o: JsObject => o }
                  .map(
                    c =>
                      Json.obj(
                        "name"   -> s"${ctx.request.protocol}://$host/api/configs/$rootKey:$profileName:$appName:spring-config",
                        "source" -> c
                    )
                  ),
                profile
                  .map(_.value)
                  .collect { case o: JsObject => o }
                  .map(
                    c =>
                      Json.obj(
                        "name"   -> s"${ctx.request.protocol}://$host/api/configs/$rootKey:spring-profiles:$profileName:spring-config",
                        "source" -> c
                    )
                  ),
                global
                  .map(_.value)
                  .collect { case o: JsObject => o }
                  .map(
                    c =>
                      Json.obj(
                        "name"   -> s"${ctx.request.protocol}://$host/api/configs/$rootKey:spring-globals:spring-config",
                        "source" -> c
                    )
                  )
              ).flatten
            )
            val payload = Json.obj(
              "name"            -> s"$appName",
              "profiles"        -> Json.arr(s"://$profileName"),
              "label"           -> JsNull,
              "state"           -> JsNull,
              "propertySources" -> propertySources
            )
            IzanamiLogger.debug(s"Spring config request for $rootKey, $appName, $profileName: \n $payload")
            val version: String = byteToHexString(digester.digest(Json.stringify(payload).getBytes(Charsets.UTF_8)))
            Ok(payload ++ Json.obj("version" -> version))
          }
        }
      }
      result.mapError { _ =>
        InternalServerError("")
      }
  }
} 
Example 3
Source File: MustacheViewRenderer.scala    From peregrine   with Apache License 2.0 5 votes vote down vote up
package io.peregrine.view

import io.peregrine._
import com.github.mustachejava._
import com.google.common.base.Charsets
import com.twitter.mustache._
import com.twitter.util._
import java.io._
import java.util.concurrent.Executors


class PeregrineMustacheFactory(templatePath: String)
  extends DefaultMustacheFactory(templatePath) {

  def invalidateCaches() : Unit = {
    mustacheCache.clear()
    templateCache.clear()
  }
}

private[peregrine] object MustacheViewFactoryHolder {
  val templatePath  = config.templatePath()
  lazy val factory  = new PeregrineMustacheFactory(templatePath)

  factory.setObjectHandler(new ScalaObjectHandler())
  factory.setExecutorService(Executors.newCachedThreadPool)
}

trait MustacheViewRenderer extends ViewRenderer {

  val format = "mustache"

  lazy val location = MustacheViewFactoryHolder.templatePath
  lazy val factory  = MustacheViewFactoryHolder.factory

  def render(templateName: String, view: View) = {
    if (config.env() == "development") {
      factory.invalidateCaches()
    }

    getPath(templateName) match {
      case None            =>
        throw new FileNotFoundException(s"""Template file [$templateName] not found in [
          ${System.getProperty("user.dir")}/app$location,
          ${getClass.getResource("")}
        ]""")

      case Some(reader)  =>

        val mustache = factory.compile(reader, templateName)
        val output   = new StringWriter
        mustache.execute(output, view).flush()
        output.toString
    }
  }

  def getPath(templateName: String): Option[Reader] = {
    val templatePathName = if (location == "/") s"/$templateName.mustache" else s"$location/$templateName.mustache"
    val path = s"${System.getProperty("user.dir")}$templatePathName"
    val file = new File(path)
    if(file.exists && file.isFile) {
      Some(new BufferedReader(new InputStreamReader(new FileInputStream(file))))
    } else {
      Option(getClass.getResourceAsStream(templatePathName)).map(r => new BufferedReader(new InputStreamReader(r)))
    }
  }
}

object MustacheViewRenderer extends MustacheViewRenderer 
Example 4
Source File: example.scala    From aloha   with Apache License 2.0 5 votes vote down vote up
package me.jrwang.aloha.example

import com.google.common.base.Charsets
import io.netty.buffer.{ByteBuf, Unpooled}
import me.jrwang.aloha.common.{AlohaConf, Logging}
import me.jrwang.aloha.transport.client.{RpcResponseCallback, TransportClient}
import me.jrwang.aloha.transport.server.RpcHandler
import me.jrwang.aloha.transport.{AlohaTransportConf, TransportContext}

object SimpleAlohaServer extends Logging {
  def main(args: Array[String]): Unit = {
    val transportConf = AlohaTransportConf.fromAlohaConf(new AlohaConf(), "rpc")
    val rpcHandler = new RpcHandler {
      override def receive(client: TransportClient, message: ByteBuf, callback: RpcResponseCallback): Unit = {
        logInfo(s"server receive ${message.toString(Charsets.UTF_8)}")
        callback.onSuccess(Unpooled.wrappedBuffer("hello".getBytes))
      }

      override def channelActive(client: TransportClient): Unit = {
        logInfo("server channel active")
      }

      override def channelInactive(client: TransportClient): Unit = {
        logInfo("server channel inactive")
      }

      override def exceptionCaught(cause: Throwable, client: TransportClient): Unit = {
        logInfo(s"server exception$cause")
      }
    }

    new TransportContext(transportConf, rpcHandler)
      .createServer("localhost", 9999)
      .awaitTermination()
  }
}

object SimpleAlohaClient extends Logging {
  def main(args: Array[String]): Unit = {
    val transportConf = AlohaTransportConf.fromAlohaConf(new AlohaConf(), "rpc")
    val rpcHandler = new RpcHandler {
      override def receive(client: TransportClient, message: ByteBuf, callback: RpcResponseCallback): Unit = {
        logInfo(s"client receive ${message.toString(Charsets.UTF_8)}")
        callback.onSuccess(Unpooled.wrappedBuffer("hello".getBytes))
      }

      override def channelActive(client: TransportClient): Unit = {
        logInfo("client channel active")
      }

      override def channelInactive(client: TransportClient): Unit = {
        logInfo("client channel inactive")
      }

      override def exceptionCaught(cause: Throwable, client: TransportClient): Unit = {
        logInfo(s"client exception$cause")
      }
    }

    val client = new TransportContext(transportConf, rpcHandler).createClientFactory()
      .createClient("localhost", 9999)

    client.sendRpc(Unpooled.wrappedBuffer("hello world.".getBytes), new RpcResponseCallback {
      override def onSuccess(response: ByteBuf): Unit = {
        logInfo(s"rpc request success with ${response.toString(Charsets.UTF_8)}")
      }

      override def onFailure(e: Throwable): Unit = {
        logInfo(s"rpc request failed $e")
      }
    })

    client.channel.closeFuture().sync()
  }
} 
Example 5
Source File: package.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform

import java.security.SecureRandom

import com.google.common.base.Charsets
import com.google.protobuf.ByteString
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.common.state.ByteStr._
import com.wavesplatform.common.utils.Base58
import org.apache.commons.lang3.time.DurationFormatUtils
import play.api.libs.json._

import scala.annotation.tailrec

package object utils extends ScorexLogging {

  private val BytesMaxValue  = 256
  private val Base58MaxValue = 58

  private val BytesLog = math.log(BytesMaxValue)
  private val BaseLog  = math.log(Base58MaxValue)

  def base58Length(byteArrayLength: Int): Int = math.ceil(BytesLog / BaseLog * byteArrayLength).toInt

  def forceStopApplication(reason: ApplicationStopReason = Default): Unit =
    System.exit(reason.code)

  def humanReadableSize(bytes: Long, si: Boolean = true): String = {
    val (baseValue, unitStrings) =
      if (si)
        (1000, Vector("B", "kB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"))
      else
        (1024, Vector("B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"))

    @tailrec
    def getExponent(curBytes: Long, baseValue: Int, curExponent: Int = 0): Int =
      if (curBytes < baseValue) curExponent
      else {
        val newExponent = 1 + curExponent
        getExponent(curBytes / (baseValue * newExponent), baseValue, newExponent)
      }

    val exponent   = getExponent(bytes, baseValue)
    val divisor    = Math.pow(baseValue, exponent)
    val unitString = unitStrings(exponent)

    f"${bytes / divisor}%.1f $unitString"
  }

  def humanReadableDuration(duration: Long): String =
    DurationFormatUtils.formatDurationHMS(duration)

  implicit class Tap[A](a: A) {
    def tap(g: A => Unit): A = {
      g(a)
      a
    }
  }

  def randomBytes(howMany: Int = 32): Array[Byte] = {
    val r = new Array[Byte](howMany)
    new SecureRandom().nextBytes(r) //overrides r
    r
  }

  implicit val byteStrFormat: Format[ByteStr] = new Format[ByteStr] {
    override def writes(o: ByteStr): JsValue = JsString(o.toString)

    override def reads(json: JsValue): JsResult[ByteStr] = json match {
      case JsString(v) if v.startsWith("base64:") =>
        decodeBase64(v.substring(7)).fold(e => JsError(s"Error parsing base64: ${e.getMessage}"), b => JsSuccess(b))
      case JsString(v) if v.length > Base58.defaultDecodeLimit => JsError(s"Length ${v.length} exceeds maximum length of 192")
      case JsString(v)                                         => decodeBase58(v).fold(e => JsError(s"Error parsing base58: ${e.getMessage}"), b => JsSuccess(b))
      case _                                                   => JsError("Expected JsString")
    }
  }

  implicit class StringBytes(val s: String) extends AnyVal {
    def utf8Bytes: Array[Byte]   = s.getBytes(Charsets.UTF_8)
    def toByteString: ByteString = ByteString.copyFromUtf8(s)
  }
} 
Example 6
Source File: Handshake.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.network

import java.net.{InetAddress, InetSocketAddress}

import com.google.common.base.Charsets
import io.netty.buffer.ByteBuf
import com.wavesplatform.utils._

case class Handshake(applicationName: String,
                     applicationVersion: (Int, Int, Int),
                     nodeName: String,
                     nodeNonce: Long,
                     declaredAddress: Option[InetSocketAddress]) {
  def encode(out: ByteBuf): out.type = {
    val applicationNameBytes = applicationName.utf8Bytes
    require(applicationNameBytes.length <= Byte.MaxValue, "The application name is too long!")
    out.writeByte(applicationNameBytes.length)
    out.writeBytes(applicationNameBytes)

    out.writeInt(applicationVersion._1)
    out.writeInt(applicationVersion._2)
    out.writeInt(applicationVersion._3)

    val nodeNameBytes = nodeName.utf8Bytes
    require(nodeNameBytes.length <= Byte.MaxValue, "A node name is too long!")
    out.writeByte(nodeNameBytes.length)
    out.writeBytes(nodeNameBytes)

    out.writeLong(nodeNonce)

    val peer = for {
      inetAddress <- declaredAddress
      address     <- Option(inetAddress.getAddress)
    } yield (address.getAddress, inetAddress.getPort)

    peer match {
      case None => out.writeInt(0)
      case Some((addressBytes, peerPort)) =>
        out.writeInt(addressBytes.length + Integer.BYTES)
        out.writeBytes(addressBytes)
        out.writeInt(peerPort)
    }

    out.writeLong(System.currentTimeMillis() / 1000)
    out
  }
}

object Handshake {
  class InvalidHandshakeException(msg: String) extends IllegalArgumentException(msg)

  def decode(in: ByteBuf): Handshake = {
    val appNameSize = in.readByte()

    if (appNameSize < 0 || appNameSize > Byte.MaxValue) {
      throw new InvalidHandshakeException(s"An invalid application name's size: $appNameSize")
    }
    val appName    = in.readSlice(appNameSize).toString(Charsets.UTF_8)
    val appVersion = (in.readInt(), in.readInt(), in.readInt())

    val nodeNameSize = in.readByte()
    if (nodeNameSize < 0 || nodeNameSize > Byte.MaxValue) {
      throw new InvalidHandshakeException(s"An invalid node name's size: $nodeNameSize")
    }
    val nodeName = in.readSlice(nodeNameSize).toString(Charsets.UTF_8)

    val nonce = in.readLong()

    val declaredAddressLength = in.readInt()
    // 0 for no declared address, 8 for ipv4 address + port, 20 for ipv6 address + port
    if (declaredAddressLength != 0 && declaredAddressLength != 8 && declaredAddressLength != 20) {
      throw new InvalidHandshakeException(s"An invalid declared address length: $declaredAddressLength")
    }
    val isa =
      if (declaredAddressLength == 0) None
      else {
        val addressBytes = new Array[Byte](declaredAddressLength - Integer.BYTES)
        in.readBytes(addressBytes)
        val address = InetAddress.getByAddress(addressBytes)
        val port    = in.readInt()
        Some(new InetSocketAddress(address, port))
      }
    in.readLong() // time is ignored

    Handshake(appName, appVersion, nodeName, nonce, isa)
  }
} 
Example 7
Source File: FlumeStreamSuite.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.streaming.flume

import scala.collection.JavaConverters._
import scala.collection.mutable.{ArrayBuffer, SynchronizedBuffer}
import scala.concurrent.duration._
import scala.language.postfixOps

import com.google.common.base.Charsets
import org.jboss.netty.channel.ChannelPipeline
import org.jboss.netty.channel.socket.SocketChannel
import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory
import org.jboss.netty.handler.codec.compression._
import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.concurrent.Eventually._

import org.apache.spark.{Logging, SparkConf, SparkFunSuite}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Milliseconds, StreamingContext, TestOutputStream}

class FlumeStreamSuite extends SparkFunSuite with BeforeAndAfter with Matchers with Logging {
  val conf = new SparkConf().setMaster("local[4]").setAppName("FlumeStreamSuite")
  var ssc: StreamingContext = null

  test("flume input stream") {
    testFlumeStream(testCompression = false)
  }

  test("flume input compressed stream") {
    testFlumeStream(testCompression = true)
  }

  
  private class CompressionChannelFactory(compressionLevel: Int)
    extends NioClientSocketChannelFactory {

    override def newChannel(pipeline: ChannelPipeline): SocketChannel = {
      val encoder = new ZlibEncoder(compressionLevel)
      pipeline.addFirst("deflater", encoder)
      pipeline.addFirst("inflater", new ZlibDecoder())
      super.newChannel(pipeline)
    }
  }
} 
Example 8
Source File: LibSVMRelationSuite.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ml.source.libsvm

import java.io.File

import com.google.common.base.Charsets
import com.google.common.io.Files

import org.apache.spark.SparkFunSuite
import org.apache.spark.mllib.linalg.{DenseVector, SparseVector, Vectors}
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.util.Utils

class LibSVMRelationSuite extends SparkFunSuite with MLlibTestSparkContext {
  var tempDir: File = _
  var path: String = _

  override def beforeAll(): Unit = {
    super.beforeAll()
    val lines =
      """
        |1 1:1.0 3:2.0 5:3.0
        |0
        |0 2:4.0 4:5.0 6:6.0
      """.stripMargin
    tempDir = Utils.createTempDir()
    val file = new File(tempDir, "part-00000")
    Files.write(lines, file, Charsets.US_ASCII)
    path = tempDir.toURI.toString
  }

  override def afterAll(): Unit = {
    Utils.deleteRecursively(tempDir)
    super.afterAll()
  }

  test("select as sparse vector") {
    val df = sqlContext.read.format("libsvm").load(path)
    assert(df.columns(0) == "label")
    assert(df.columns(1) == "features")
    val row1 = df.first()
    assert(row1.getDouble(0) == 1.0)
    val v = row1.getAs[SparseVector](1)
    assert(v == Vectors.sparse(6, Seq((0, 1.0), (2, 2.0), (4, 3.0))))
  }

  test("select as dense vector") {
    val df = sqlContext.read.format("libsvm").options(Map("vectorType" -> "dense"))
      .load(path)
    assert(df.columns(0) == "label")
    assert(df.columns(1) == "features")
    assert(df.count() == 3)
    val row1 = df.first()
    assert(row1.getDouble(0) == 1.0)
    val v = row1.getAs[DenseVector](1)
    assert(v == Vectors.dense(1.0, 0.0, 2.0, 0.0, 3.0, 0.0))
  }

  test("select a vector with specifying the longer dimension") {
    val df = sqlContext.read.option("numFeatures", "100").format("libsvm")
      .load(path)
    val row1 = df.first()
    val v = row1.getAs[SparseVector](1)
    assert(v == Vectors.sparse(100, Seq((0, 1.0), (2, 2.0), (4, 3.0))))
  }
} 
Example 9
Source File: SparkPodInitContainerSuite.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.k8s

import java.io.File
import java.util.UUID

import com.google.common.base.Charsets
import com.google.common.io.Files
import org.mockito.Mockito
import org.scalatest.BeforeAndAfter
import org.scalatest.mockito.MockitoSugar._

import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.deploy.k8s.Config._
import org.apache.spark.util.Utils

class SparkPodInitContainerSuite extends SparkFunSuite with BeforeAndAfter {

  private val DOWNLOAD_JARS_SECRET_LOCATION = createTempFile("txt")
  private val DOWNLOAD_FILES_SECRET_LOCATION = createTempFile("txt")

  private var downloadJarsDir: File = _
  private var downloadFilesDir: File = _
  private var downloadJarsSecretValue: String = _
  private var downloadFilesSecretValue: String = _
  private var fileFetcher: FileFetcher = _

  override def beforeAll(): Unit = {
    downloadJarsSecretValue = Files.toString(
      new File(DOWNLOAD_JARS_SECRET_LOCATION), Charsets.UTF_8)
    downloadFilesSecretValue = Files.toString(
      new File(DOWNLOAD_FILES_SECRET_LOCATION), Charsets.UTF_8)
  }

  before {
    downloadJarsDir = Utils.createTempDir()
    downloadFilesDir = Utils.createTempDir()
    fileFetcher = mock[FileFetcher]
  }

  after {
    downloadJarsDir.delete()
    downloadFilesDir.delete()
  }

  test("Downloads from remote server should invoke the file fetcher") {
    val sparkConf = getSparkConfForRemoteFileDownloads
    val initContainerUnderTest = new SparkPodInitContainer(sparkConf, fileFetcher)
    initContainerUnderTest.run()
    Mockito.verify(fileFetcher).fetchFile("http://localhost:9000/jar1.jar", downloadJarsDir)
    Mockito.verify(fileFetcher).fetchFile("hdfs://localhost:9000/jar2.jar", downloadJarsDir)
    Mockito.verify(fileFetcher).fetchFile("http://localhost:9000/file.txt", downloadFilesDir)
  }

  private def getSparkConfForRemoteFileDownloads: SparkConf = {
    new SparkConf(true)
      .set(INIT_CONTAINER_REMOTE_JARS,
        "http://localhost:9000/jar1.jar,hdfs://localhost:9000/jar2.jar")
      .set(INIT_CONTAINER_REMOTE_FILES,
        "http://localhost:9000/file.txt")
      .set(JARS_DOWNLOAD_LOCATION, downloadJarsDir.getAbsolutePath)
      .set(FILES_DOWNLOAD_LOCATION, downloadFilesDir.getAbsolutePath)
  }

  private def createTempFile(extension: String): String = {
    val dir = Utils.createTempDir()
    val file = new File(dir, s"${UUID.randomUUID().toString}.$extension")
    Files.write(UUID.randomUUID().toString, file, Charsets.UTF_8)
    file.getAbsolutePath
  }
} 
Example 10
Source File: SparkKubernetesClientFactory.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.k8s

import java.io.File

import com.google.common.base.Charsets
import com.google.common.io.Files
import io.fabric8.kubernetes.client.{ConfigBuilder, DefaultKubernetesClient, KubernetesClient}
import io.fabric8.kubernetes.client.utils.HttpClientUtils
import okhttp3.Dispatcher

import org.apache.spark.SparkConf
import org.apache.spark.deploy.k8s.Config._
import org.apache.spark.util.ThreadUtils


private[spark] object SparkKubernetesClientFactory {

  def createKubernetesClient(
      master: String,
      namespace: Option[String],
      kubernetesAuthConfPrefix: String,
      sparkConf: SparkConf,
      defaultServiceAccountToken: Option[File],
      defaultServiceAccountCaCert: Option[File]): KubernetesClient = {
    val oauthTokenFileConf = s"$kubernetesAuthConfPrefix.$OAUTH_TOKEN_FILE_CONF_SUFFIX"
    val oauthTokenConf = s"$kubernetesAuthConfPrefix.$OAUTH_TOKEN_CONF_SUFFIX"
    val oauthTokenFile = sparkConf.getOption(oauthTokenFileConf)
      .map(new File(_))
      .orElse(defaultServiceAccountToken)
    val oauthTokenValue = sparkConf.getOption(oauthTokenConf)
    KubernetesUtils.requireNandDefined(
      oauthTokenFile,
      oauthTokenValue,
      s"Cannot specify OAuth token through both a file $oauthTokenFileConf and a " +
        s"value $oauthTokenConf.")

    val caCertFile = sparkConf
      .getOption(s"$kubernetesAuthConfPrefix.$CA_CERT_FILE_CONF_SUFFIX")
      .orElse(defaultServiceAccountCaCert.map(_.getAbsolutePath))
    val clientKeyFile = sparkConf
      .getOption(s"$kubernetesAuthConfPrefix.$CLIENT_KEY_FILE_CONF_SUFFIX")
    val clientCertFile = sparkConf
      .getOption(s"$kubernetesAuthConfPrefix.$CLIENT_CERT_FILE_CONF_SUFFIX")
    val dispatcher = new Dispatcher(
      ThreadUtils.newDaemonCachedThreadPool("kubernetes-dispatcher"))
    val config = new ConfigBuilder()
      .withApiVersion("v1")
      .withMasterUrl(master)
      .withWebsocketPingInterval(0)
      .withOption(oauthTokenValue) {
        (token, configBuilder) => configBuilder.withOauthToken(token)
      }.withOption(oauthTokenFile) {
        (file, configBuilder) =>
            configBuilder.withOauthToken(Files.toString(file, Charsets.UTF_8))
      }.withOption(caCertFile) {
        (file, configBuilder) => configBuilder.withCaCertFile(file)
      }.withOption(clientKeyFile) {
        (file, configBuilder) => configBuilder.withClientKeyFile(file)
      }.withOption(clientCertFile) {
        (file, configBuilder) => configBuilder.withClientCertFile(file)
      }.withOption(namespace) {
        (ns, configBuilder) => configBuilder.withNamespace(ns)
      }.build()
    val baseHttpClient = HttpClientUtils.createHttpClient(config)
    val httpClientWithCustomDispatcher = baseHttpClient.newBuilder()
      .dispatcher(dispatcher)
      .build()
    new DefaultKubernetesClient(httpClientWithCustomDispatcher, config)
  }

  private implicit class OptionConfigurableConfigBuilder(val configBuilder: ConfigBuilder)
    extends AnyVal {

    def withOption[T]
        (option: Option[T])
        (configurator: ((T, ConfigBuilder) => ConfigBuilder)): ConfigBuilder = {
      option.map { opt =>
        configurator(opt, configBuilder)
      }.getOrElse(configBuilder)
    }
  }
} 
Example 11
Source File: FunnelDerivation.scala    From magnolify   with Apache License 2.0 5 votes vote down vote up
package magnolify.guava.semiauto

import com.google.common.base.Charsets
import com.google.common.hash.{Funnel, Funnels, PrimitiveSink}
import magnolia._

import scala.language.experimental.macros

object FunnelDerivation {
  type Typeclass[T] = Funnel[T]

  def combine[T](caseClass: ReadOnlyCaseClass[Typeclass, T]): Typeclass[T] = new Funnel[T] {
    override def funnel(from: T, into: PrimitiveSink): Unit =
      if (caseClass.parameters.isEmpty) {
        into.putString(caseClass.typeName.short, Charsets.UTF_8)
      } else {
        caseClass.parameters.foreach { p =>
          // inject index to distinguish cases like `(Some(1), None)` and `(None, Some(1))`
          into.putInt(p.index)
          p.typeclass.funnel(p.dereference(from), into)
        }
      }
  }

  def dispatch[T](sealedTrait: SealedTrait[Typeclass, T]): Typeclass[T] = new Funnel[T] {
    override def funnel(from: T, into: PrimitiveSink): Unit =
      sealedTrait.dispatch(from)(sub => sub.typeclass.funnel(sub.cast(from), into))
  }

  implicit def apply[T]: Typeclass[T] = macro Magnolia.gen[T]

  def by[T, S](f: T => S)(implicit fnl: Funnel[S]): Funnel[T] = new Funnel[T] {
    override def funnel(from: T, into: PrimitiveSink): Unit = fnl.funnel(f(from), into)
  }
}

trait FunnelImplicits {
  private def funnel[T](f: (PrimitiveSink, T) => Unit): Funnel[T] = new Funnel[T] {
    override def funnel(from: T, into: PrimitiveSink): Unit = f(into, from)
  }

  implicit val intFunnel: Funnel[Int] = Funnels.integerFunnel().asInstanceOf[Funnel[Int]]
  implicit val longFunnel: Funnel[Long] = Funnels.longFunnel().asInstanceOf[Funnel[Long]]
  implicit val bytesFunnel: Funnel[Array[Byte]] = Funnels.byteArrayFunnel()
  implicit val charSequenceFunnel: Funnel[CharSequence] = Funnels.unencodedCharsFunnel()

  implicit val booleanFunnel: Funnel[Boolean] = funnel[Boolean](_.putBoolean(_))
  implicit val stringFunnel: Funnel[String] = funnel[String](_.putString(_, Charsets.UTF_8))
  implicit val byteFunnel: Funnel[Byte] = funnel[Byte](_.putByte(_))
  implicit val charFunnel: Funnel[Char] = funnel[Char](_.putChar(_))
  implicit val shortFunnel: Funnel[Short] = funnel[Short](_.putShort(_))

  // There is an implicit Option[T] => Iterable[T]
  implicit def iterableFunnel[T, C[_]](implicit
    fnl: Funnel[T],
    ti: C[T] => Iterable[T]
  ): Funnel[C[T]] =
    funnel { (sink, from) =>
      var i = 0
      from.foreach { x =>
        fnl.funnel(x, sink)
        i += 1
      }
      // inject size to distinguish `None`, `Some("")`, and `List("", "", ...)`
      sink.putInt(i)
    }
} 
Example 12
Source File: FlumeStreamSuite.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.streaming.flume

import scala.collection.JavaConversions._
import scala.collection.mutable.{ArrayBuffer, SynchronizedBuffer}
import scala.concurrent.duration._
import scala.language.postfixOps

import com.google.common.base.Charsets
import org.jboss.netty.channel.ChannelPipeline
import org.jboss.netty.channel.socket.SocketChannel
import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory
import org.jboss.netty.handler.codec.compression._
import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.concurrent.Eventually._

import org.apache.spark.{Logging, SparkConf, SparkFunSuite}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Milliseconds, StreamingContext, TestOutputStream}

  private class CompressionChannelFactory(compressionLevel: Int)
    extends NioClientSocketChannelFactory {

    override def newChannel(pipeline: ChannelPipeline): SocketChannel = {
      val encoder = new ZlibEncoder(compressionLevel)
      pipeline.addFirst("deflater", encoder)
      pipeline.addFirst("inflater", new ZlibDecoder())
      super.newChannel(pipeline)
    }
  }
} 
Example 13
Source File: PrepareStatementCache.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.cassandra

import java.util.concurrent.Callable

import com.google.common.base.Charsets
import com.google.common.cache.CacheBuilder
import com.google.common.hash.Hashing

class PrepareStatementCache[V <: AnyRef](size: Long) {

  private val cache =
    CacheBuilder
      .newBuilder
      .maximumSize(size)
      .build[java.lang.Long, V]()

  private val hasher = Hashing.goodFastHash(128)

  def apply(stmt: String)(prepare: String => V): V = {
    cache.get(
      hash(stmt),
      new Callable[V] {
        override def call: V = prepare(stmt)
      }
    )
  }

  def invalidate(stmt: String): Unit = cache.invalidate(hash(stmt))

  private def hash(string: String): java.lang.Long = {
    hasher
      .hashString(string, Charsets.UTF_8)
      .asLong()
  }

} 
Example 14
Source File: FlumeStreamSuite.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.streaming.flume

import java.net.{InetSocketAddress, ServerSocket}
import java.nio.ByteBuffer

import scala.collection.JavaConversions._
import scala.collection.mutable.{ArrayBuffer, SynchronizedBuffer}
import scala.concurrent.duration._
import scala.language.postfixOps

import com.google.common.base.Charsets
import org.apache.avro.ipc.NettyTransceiver
import org.apache.avro.ipc.specific.SpecificRequestor
import org.apache.commons.lang3.RandomUtils
import org.apache.flume.source.avro
import org.apache.flume.source.avro.{AvroFlumeEvent, AvroSourceProtocol}
import org.jboss.netty.channel.ChannelPipeline
import org.jboss.netty.channel.socket.SocketChannel
import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory
import org.jboss.netty.handler.codec.compression._
import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.concurrent.Eventually._

import org.apache.spark.{Logging, SparkConf, SparkFunSuite}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Milliseconds, StreamingContext, TestOutputStream}
import org.apache.spark.util.Utils

class FlumeStreamSuite extends SparkFunSuite with BeforeAndAfter with Matchers with Logging {
  val conf = new SparkConf().setMaster("local[4]").setAppName("FlumeStreamSuite")

  var ssc: StreamingContext = null
  var transceiver: NettyTransceiver = null

  after {
    if (ssc != null) {
      ssc.stop()
    }
    if (transceiver != null) {
      transceiver.close()
    }
  }

  test("flume input stream") {
    testFlumeStream(testCompression = false)
  }

  test("flume input compressed stream") {
    testFlumeStream(testCompression = true)
  }

  
  private class CompressionChannelFactory(compressionLevel: Int)
    extends NioClientSocketChannelFactory {

    override def newChannel(pipeline: ChannelPipeline): SocketChannel = {
      val encoder = new ZlibEncoder(compressionLevel)
      pipeline.addFirst("deflater", encoder)
      pipeline.addFirst("inflater", new ZlibDecoder())
      super.newChannel(pipeline)
    }
  }
} 
Example 15
Source File: SummaryManager.scala    From Argus-SAF   with Apache License 2.0 5 votes vote down vote up
package org.argus.jawa.flow.summary

import com.google.common.base.Charsets
import com.google.common.io.Resources
import org.argus.jawa.core._
import org.argus.jawa.core.elements.Signature
import org.argus.jawa.core.util._
import org.argus.jawa.flow.summary.susaf.HeapSummaryProcessor
import org.argus.jawa.flow.summary.susaf.parser.SummaryParser
import org.argus.jawa.flow.summary.susaf.rule.HeapSummary

import scala.reflect.{ClassTag, classTag}


class SummaryManager(global: Global) {

  //  Map from signature to Summary
  private val summaries: MMap[Signature, MSet[Summary[_]]] = mmapEmpty
  private val heapSummariesMatchFileAndSubsig: MMap[String, IMap[String, HeapSummary]] = mmapEmpty

  def register(signature: Signature, summary: Summary[_]): Unit = summaries.getOrElseUpdate(signature, msetEmpty) += summary

  def register(name: String, suCode: String, fileAndSubsigMatch: Boolean): IMap[Signature, Summary[_]] = {
    val su = SummaryParser(suCode)
    su.defaultTypes.foreach { case (baseType, fields) =>
      HeapSummaryProcessor.addDefaultTypes(global, baseType, fields)
    }
    if(fileAndSubsigMatch) {
      val s = su.summaries.map{ case (k, v) => k.getSubSignature -> v}
      this.heapSummariesMatchFileAndSubsig(name) = s
    } else {
      su.summaries.foreach { case (signature, summary) =>
        register(signature, summary)
      }
    }
    su.summaries
  }

  def contains(sig: Signature): Boolean = summaries.contains(sig)
  def contains(file: String, subsig: String): Boolean = heapSummariesMatchFileAndSubsig.get(file) match {
    case Some(map) => map.contains(subsig)
    case None => false
  }

  def getSummaries(sig: Signature): ISet[Summary[_]] = summaries.getOrElse(sig, msetEmpty).toSet

  def getSummary[T <: Summary[_] : ClassTag](sig: Signature): Option[T] = {
    summaries.get(sig) match {
      case Some(sus) =>
        sus.foreach {
          case t if classTag[T].runtimeClass.isInstance(t) => return Some(t.asInstanceOf[T])
          case _ =>
        }
        None
      case None => None
    }
  }

  def getHeapSummaryPb(sig: Signature): Option[summary.HeapSummary] = {
    getSummary[HeapSummary](sig) match {
      case Some(su) =>
        Some(SummaryToProto.toProto(su))
      case None =>
        None
    }
  }

  def registerFile(safsuPath: String, name: String, fileAndSubsigMatch: Boolean): Unit = {
    val url = Resources.getResource(safsuPath)
    val code = Resources.toString(url, Charsets.UTF_8)
    register(name, code, fileAndSubsigMatch)
  }

  def registerExternalFile(safsuPath: FileResourceUri, name: String, fileAndSubsigMatch: Boolean): Unit = {
    val url = FileUtil.toFile(safsuPath).toURI.toURL
    val code = Resources.toString(url, Charsets.UTF_8)
    register(name, code, fileAndSubsigMatch)
  }

  def getSummariesByFile(name: String): IMap[String, HeapSummary] = {
    this.heapSummariesMatchFileAndSubsig.getOrElse(name, imapEmpty)
  }
} 
Example 16
Source File: PubSubSourceIT.scala    From akka-cloudpubsub   with Apache License 2.0 5 votes vote down vote up
package com.qubit.pubsub.akka

import akka.NotUsed
import akka.actor.ActorSystem
import akka.stream.scaladsl.Source
import akka.stream.testkit.scaladsl.TestSink
import akka.stream.{ActorMaterializer, Attributes, Graph, SourceShape}
import com.google.common.base.Charsets
import com.qubit.pubsub.PubSubIntegrationTest
import com.qubit.pubsub.akka.attributes.{
  PubSubClientAttribute,
  PubSubStageBufferSizeAttribute
}
import com.qubit.pubsub.client.PubSubMessage
import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers}

import scala.concurrent.Await
import scala.concurrent.duration._

class PubSubSourceIT
    extends FunSuite
    with Matchers
    with BeforeAndAfterAll
    with PubSubIntegrationTest {

  implicit val actorSystem = ActorSystem("pubsub-stream-test")
  implicit val materializer = ActorMaterializer()

  override def testName = "pubsubsource"

  override def beforeAll(): Unit = {
    Await.ready(client.createTopic(testTopic), timeout)
    Await
      .ready(client.createSubscription(testSubscription, testTopic), timeout)
  }

  override def afterAll(): Unit = {
    actorSystem.terminate()
    Await.ready(client.deleteSubscription(testSubscription), timeout)
    Await.ready(client.deleteTopic(testTopic), timeout)
  }

  test("PubSubSource success") {
    val data = Range(0, 100)
      .map(i => s"msg$i".getBytes(Charsets.UTF_8))
      .map(PubSubMessage(_))
    Await.ready(client.publish(testTopic, data), timeout)

    val sourceGraph: Graph[SourceShape[PubSubMessage], NotUsed] =
      new PubSubSource(testSubscription, 1.millisecond)
    val sourceAttributes = Attributes(
      List(PubSubClientAttribute(client), PubSubStageBufferSizeAttribute(30)))
    val pubsubSource =
      Source.fromGraph(sourceGraph).withAttributes(sourceAttributes)

    val msgList = pubsubSource
      .runWith(TestSink.probe[PubSubMessage])
      .request(100)
      .expectNextN(100)

    msgList should not be (null)
    msgList should have size (100)
    msgList
      .map(m => new String(m.payload, Charsets.UTF_8))
      .forall(_.startsWith("msg")) should be(true)
  }
} 
Example 17
Source File: PubSubSinkIT.scala    From akka-cloudpubsub   with Apache License 2.0 5 votes vote down vote up
package com.qubit.pubsub.akka

import akka.NotUsed
import akka.actor.ActorSystem
import akka.stream.scaladsl.{Keep, Sink}
import akka.stream.testkit.scaladsl.TestSource
import akka.stream.{ActorMaterializer, Attributes, Graph, SinkShape}
import com.google.common.base.Charsets
import com.qubit.pubsub.PubSubIntegrationTest
import com.qubit.pubsub.akka.attributes.{
  PubSubClientAttribute,
  PubSubStageBufferSizeAttribute
}
import com.qubit.pubsub.client.PubSubMessage
import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers}

import scala.concurrent.Await
import scala.concurrent.duration._
import scala.util.Try

class PubSubSinkIT
    extends FunSuite
    with Matchers
    with BeforeAndAfterAll
    with PubSubIntegrationTest {

  implicit val actorSystem = ActorSystem("pubsub-stream-test")
  implicit val materializer = ActorMaterializer()

  override def testName = "pubsubsink"

  override def beforeAll(): Unit = {
    Await.ready(client.createTopic(testTopic), timeout)
    Await
      .ready(client.createSubscription(testSubscription, testTopic), timeout)
  }

  override def afterAll(): Unit = {
    actorSystem.terminate()
    Await.ready(client.deleteSubscription(testSubscription), timeout)
    Await.ready(client.deleteTopic(testTopic), timeout)
  }

  test("PubSubSink success") {
    val sinkGraph: Graph[SinkShape[PubSubMessage], NotUsed] =
      new PubSubSink(testTopic, 1.second)
    val sinkAttributes = Attributes(
      List(PubSubClientAttribute(client), PubSubStageBufferSizeAttribute(30)))
    val pubsubSink = Sink.fromGraph(sinkGraph).withAttributes(sinkAttributes)

    val (pub, _) = TestSource
      .probe[Array[Byte]]
      .map(PubSubMessage(_))
      .toMat(pubsubSink)(Keep.both)
      .run()

    Range(0, 100)
      .map(i => s"xxx$i".getBytes(Charsets.UTF_8))
      .foreach(pub.sendNext)
    pub.sendComplete()

    // wait for buffers to flush
    Try(Thread.sleep(1000))

    val output = Await.result(client.pull(testSubscription, 100), timeout)
    client.ack(testSubscription, output.map(m => m.ackId))

    output should not be (null)
    output should have size (100)
    output
      .map(m => new String(m.payload.payload, Charsets.UTF_8))
      .forall(_.startsWith("xxx")) should be(true)
  }
} 
Example 18
Source File: AuthenticationFilter.scala    From maha   with Apache License 2.0 5 votes vote down vote up
// Copyright 2018, Oath Inc.
// Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms.
package filter
import akka.stream.Materializer
import com.google.common.base.Charsets
import com.google.common.hash.Hashing
import com.yahoo.maha.core.auth.{AuthValidator, ValidationResult}
import play.api.Logger
import play.api.mvc._

import scala.concurrent.Future
import scala.util.{Failure, Success, Try}

class AuthenticationFilter(authValidator: AuthValidator)(implicit val mat: Materializer) extends Filter {

  private val routesWhichRequireAuth : Set[String] = Set("/segments", "/overlord/workers", "/lookups", "/kill/segments")

  def apply(nextFilter: RequestHeader => Future[Result])
           (requestHeader: RequestHeader): Future[Result] = {

    if(routesWhichRequireAuth.contains(requestHeader.path)) {
      Try {
        val result: ValidationResult = authValidator.validate(requestHeader)
        result
      } match {
        case Success(result) =>
          val requestHeaderWithId = requestHeader.copy(tags = requestHeader.tags + ("X-Request-Id" -> generateRequestId(requestHeader))
            + ("userId" -> result.user.getOrElse("Authorized User")))
          nextFilter(requestHeaderWithId)
        case Failure(e) =>
          Logger.error(s"Exception while authenticating user", e)
          val result: Result = authValidator.handleAuthFailure(requestHeader)
          Future.successful(result)
      }
    } else {
      Logger.debug(s"no auth required for path : ${requestHeader.path}")
      nextFilter(requestHeader)
    }
  }

  private def generateRequestId(requestHeader: RequestHeader): String = {
    return s" ${Hashing.goodFastHash(128).newHasher.putString(requestHeader.path + requestHeader.queryString, Charsets.UTF_8).hash.asLong}-${System.nanoTime}"
  }

} 
Example 19
Source File: config.scala    From spark-integration   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.k8s.integrationtest

import java.io.File

import com.google.common.base.Charsets
import com.google.common.io.Files

package object config {
  def getTestImageTag: String = {
    val imageTagFileProp = System.getProperty("spark.kubernetes.test.imageTagFile")
    require(imageTagFileProp != null, "Image tag file must be provided in system properties.")
    val imageTagFile = new File(imageTagFileProp)
    require(imageTagFile.isFile, s"No file found for image tag at ${imageTagFile.getAbsolutePath}.")
    Files.toString(imageTagFile, Charsets.UTF_8).trim
  }

  def getTestImageRepo: String = {
    val imageRepo = System.getProperty("spark.kubernetes.test.imageRepo")
    require(imageRepo != null, "Image repo must be provided in system properties.")
    imageRepo
  }
}