scala.util.Try Scala Examples

The following examples show how to use scala.util.Try. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: DateTimeTools.scala    From pertax-frontend   with Apache License 2.0 9 votes vote down vote up
package util

import com.google.inject.{Inject, Singleton}
import org.joda.time.format.{DateTimeFormat, DateTimeFormatter}
import org.joda.time.{DateTime, _}
import play.api.Logger
import uk.gov.hmrc.time.CurrentTaxYear

import scala.util.{Failure, Success, Try}

import java.time.{LocalDateTime => JavaLDT}

object DateTimeTools extends CurrentTaxYear {

  //Timezone causing problem on dev server
  val defaultTZ = DateTimeZone.forID("Europe/London")
  val unixDateFormat = "yyyy-MM-dd"
  val unixDateTimeFormat = "yyyy-MM-dd'T'HH:mm:ss"
  val humanDateFormat = "dd MMMMM yyyy"

  //Returns for example 1516 in March 2016
  def previousAndCurrentTaxYear = previousAndCurrentTaxYearFromGivenYear(current.currentYear)

  def previousAndCurrentTaxYearFromGivenYear(year: Int) = {
    def y = year

    (y - 1).toString.takeRight(2) + (y).toString.takeRight(2)
  }

  private def formatter(pattern: String): DateTimeFormatter = DateTimeFormat.forPattern(pattern).withZone(defaultTZ)

  def short(dateTime: LocalDate) = formatter("dd/MM/yyy").print(dateTime)

  def asHumanDateFromUnixDate(unixDate: String): String =
    Try(DateTimeFormat.forPattern(humanDateFormat).print(DateTime.parse(unixDate))) match {
      case Success(v) => v
      case Failure(e) => {
        Logger.warn("Invalid date parse in DateTimeTools.asHumanDateFromUnixDate: " + e)
        unixDate
      }
    }

  def toPaymentDate(dateTime: JavaLDT): LocalDate =
    new LocalDate(dateTime.getYear, dateTime.getMonthValue, dateTime.getDayOfMonth)

  override def now: () => DateTime = DateTime.now
}

@Singleton
class DateTimeTools @Inject()() {

  def showSendTaxReturnByPost = {

    val start = new DateTime(s"${DateTime.now().getYear}-11-01T00:00:00Z")
    val end = new DateTime(s"${DateTime.now().getYear + 1}-01-31T23:59:59Z")
    !DateTime.now().isAfter(start) && DateTime.now().isBefore(end)
  }
} 
Example 2
Source File: Versions.scala    From daml   with Apache License 2.0 6 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

object Versions {

  private val daSdkVersionKey = "da.sdk.version"

  private val errorMsg =
    s"Error: cannot determine DA SDK version, either specify it with '-D${daSdkVersionKey}=<VERSION>' or use 'daml.yaml' with configured 'sdk-version' field."

  val daSdkVersion: String = sys.props
    .get(daSdkVersionKey)
    .getOrElse(
      sdkVersionFromFile(new java.io.File("daml.yaml")).fold(
        error => { println(errorMsg); throw error },
        identity
      )
    )

  println(s"$daSdkVersionKey = ${daSdkVersion: String}")

  private def sdkVersionFromFile(file: java.io.File): Either[io.circe.Error, String] = {
    import io.circe.yaml.parser
    import io.circe.ParsingFailure
    import scala.util.Try
    for {
      str <- Try(sbt.IO.read(file)).toEither.left.map(e =>
        ParsingFailure(s"Cannot read file: $file", e))
      yaml <- parser.parse(str)
      version <- yaml.hcursor.downField("sdk-version").as[String]
    } yield version
  }
} 
Example 3
Source File: KeyUtils.scala    From daml   with Apache License 2.0 6 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.jwt

import java.io.{File, FileInputStream}
import java.nio.charset.StandardCharsets
import java.nio.file.Files
import java.security.cert.CertificateFactory
import java.security.interfaces.{ECPublicKey, RSAPrivateKey, RSAPublicKey}
import java.security.spec.PKCS8EncodedKeySpec
import java.security.KeyFactory

import com.daml.lf.data.TryOps.Bracket.bracket
import scalaz.Show
import scalaz.syntax.show._

import scala.util.Try

object KeyUtils {
  final case class Error(what: Symbol, message: String)

  object Error {
    implicit val showInstance: Show[Error] =
      Show.shows(e => s"KeyUtils.Error: ${e.what}, ${e.message}")
  }

  private val mimeCharSet = StandardCharsets.ISO_8859_1

  
  def generateJwks(keys: Map[String, RSAPublicKey]): String = {
    def generateKeyEntry(keyId: String, key: RSAPublicKey): String =
      s"""    {
         |      "kid": "$keyId",
         |      "kty": "RSA",
         |      "alg": "RS256",
         |      "use": "sig",
         |      "e": "${java.util.Base64.getUrlEncoder
           .encodeToString(key.getPublicExponent.toByteArray)}",
         |      "n": "${java.util.Base64.getUrlEncoder.encodeToString(key.getModulus.toByteArray)}"
         |    }""".stripMargin

    s"""
       |{
       |  "keys": [
       |${keys.toList.map { case (keyId, key) => generateKeyEntry(keyId, key) }.mkString(",\n")}
       |  ]
       |}
    """.stripMargin
  }
} 
Example 4
Source File: ComponentsFixture.scala    From daml   with Apache License 2.0 6 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.navigator.test

import java.util.concurrent.atomic.AtomicReference

import com.daml.navigator.test.config.Arguments
import com.daml.navigator.test.runner.{HeadNavigator, PackagedDamlc, PackagedSandbox}
import com.typesafe.scalalogging.LazyLogging

import scala.io.Source
import scala.util.{Failure, Success, Try}

class ComponentsFixture(
    val args: Arguments,
    val navigatorPort: Int,
    val sandboxPort: Int,
    val scenario: String
) extends LazyLogging {

  // A list of commands on how to destroy started processes
  private val killProcs: AtomicReference[List[Unit => Unit]] = new AtomicReference(List.empty)

  private val onlineUrl = s"http://localhost:$navigatorPort/api/about"

  private def get(
      url: String,
      connectTimeout: Int = 1000,
      readTimeout: Int = 1000,
      requestMethod: String = "GET"
  ): String = {
    import java.net.{URL, HttpURLConnection}
    val connection = (new URL(url)).openConnection.asInstanceOf[HttpURLConnection]
    connection.setConnectTimeout(connectTimeout)
    connection.setReadTimeout(readTimeout)
    connection.setRequestMethod(requestMethod)
    val inputStream = connection.getInputStream
    val content = Source.fromInputStream(inputStream).mkString
    if (inputStream != null) inputStream.close()
    content
  }

  def startup(): Try[Unit] = {
    if (args.startComponents) {
      logger.info("Starting the sandbox and the Navigator")
      for {
        (darFile, tempFiles) <- Try(PackagedDamlc.run(args.damlPath))
        sandbox <- Try(PackagedSandbox.runAsync(sandboxPort, darFile, scenario))
        _ = killProcs.updateAndGet(s => sandbox :: s)
        navigator <- Try(
          HeadNavigator.runAsync(args.navConfPAth, args.navigatorDir, navigatorPort, sandboxPort))
        _ = killProcs.updateAndGet(s => navigator :: s)
      } yield { () }
    } else {
      Success(())
    }
  }

  private def retry[R](action: => R, maxRetries: Int, delayMillis: Int): Try[R] = {
    def retry0(count: Int): Try[R] = {
      Try(action) match {
        case Success(r) => Success(r)
        case Failure(e) =>
          if (count > maxRetries) {
            logger.error(
              s"Navigator is not available after $maxRetries retries with $delayMillis millis interval.")
            Failure(e)
          } else {
            logger.info(s"Navigator is not available yet, waiting $delayMillis millis ")
            Thread.sleep(delayMillis.toLong)
            retry0(count + 1)
          }
      }
    }

    retry0(0)
  }

  def waitForNavigator(): Try[Unit] = {
    logger.info(s"Waiting for the Navigator to start up (waiting for $onlineUrl)")
    retry({ get(onlineUrl); () }, 120, 1000)
  }

  def shutdown(): Unit = {
    killProcs.getAndUpdate(procs => {
      procs.foreach(killAction => Try { killAction(()) })
      List.empty
    })
    ()
  }
} 
Example 5
Source File: Publisher.scala    From incubator-s2graph   with Apache License 2.0 6 votes vote down vote up
import sbt.Keys._
import sbt._
import scala.util.Try
import scala.xml.XML

object Publisher {

  val defaultSettings = Seq(
    publish := {
      streams.value.log.error("use publishSigned task instead, to produce code-signed artifacts")
    },
    publishMavenStyle := true,
    publishTo := {
      if (isSnapshot.value) {
        Some("apache" at "https://repository.apache.org/content/repositories/snapshots")
      } else {
        Some("apache" at "https://repository.apache.org/content/repositories/releases")
      }
    },
    credentials ++= {
      Try(XML.loadFile(new File(System.getProperty("user.home")) / ".m2" / "settings.xml")).toOption.toSeq.flatMap { xml =>
        for (server <- xml \\ "server" if (server \ "id").text == "apache") yield {
          Credentials("Sonatype Nexus Repository Manager", "repository.apache.org", (server \ "username").text, (server \ "password").text)
        }
      }
    },
    pomIncludeRepository := { _ => false },
    pomExtra := {
      <url>https://github.com/apache/incubator-s2graph</url>
      <licenses>
        <license>
          <name>Apache 2</name>
          <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
        </license>
      </licenses>
      <scm>
        <connection>scm:git://git.apache.org/incubator-s2graph.git</connection>
        <developerConnection>scm:git:https://git-wip-us.apache.org/repos/asf/incubator-s2graph.git</developerConnection>
        <url>github.com/apache/incubator-s2graph</url>
      </scm>
      <developers>
        <developer>
          <id>s2graph</id>
          <name>S2Graph Team</name>
          <url>http://s2graph.incubator.apache.org/</url>
        </developer>
      </developers>
      <mailingLists>
        <mailingList>
          <name>Dev Mailing List</name>
          <post>[email protected]</post>
          <subscribe>[email protected]</subscribe>
          <unsubscribe>[email protected]</unsubscribe>
        </mailingList>
        <mailingList>
          <name>User Mailing List</name>
          <post>[email protected]</post>
          <subscribe>[email protected]</subscribe>
          <unsubscribe>[email protected]</unsubscribe>
        </mailingList>
        <mailingList>
          <name>Commits Mailing List</name>
          <post>[email protected]</post>
          <subscribe>[email protected]</subscribe>
          <unsubscribe>[email protected]</unsubscribe>
        </mailingList>
      </mailingLists>
    }
  )
} 
Example 6
Source File: package.scala    From sonar-scala   with GNU Lesser General Public License v3.0 6 votes vote down vote up
package com.mwz.sonar.scala

import scala.util.Try

import cats.instances.int.catsKernelStdGroupForInt
import cats.instances.map.catsKernelStdMonoidForMap
import cats.kernel.Semigroup
import cats.syntax.semigroup.catsSyntaxSemigroup

package object scoverage {

  
  private[scoverage] final implicit val FileCoverageSemigroup: Semigroup[FileCoverage] =
    new Semigroup[FileCoverage] {
      override def combine(a: FileCoverage, b: FileCoverage): FileCoverage = {
        val mergedFileScoverage = a.fileScoverage |+| b.fileScoverage
        val mergedLinesCoverage = a.linesCoverage |+| b.linesCoverage
        FileCoverage(mergedFileScoverage, mergedLinesCoverage)
      }
    }
} 
Example 7
Source File: Launcher.scala    From sparkplug   with MIT License 6 votes vote down vote up
package springnz.sparkplug.client

import java.net.{ URLEncoder, InetAddress }

import better.files._
import com.typesafe.config.{ ConfigRenderOptions, Config }
import org.apache.spark.launcher.SparkLauncher
import springnz.sparkplug.util.{ BuilderOps, ConfigUtils, Logging, Pimpers }

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.util.{ Properties, Try }

object Launcher extends Logging {
  import BuilderOps._
  import Pimpers._

  def startProcess(launcher: SparkLauncher): Future[Unit] = {
    val processFuture = Future {
      launcher.launch()
    }.withErrorLog("Failed to launch: ")
    processFuture.flatMap {
      process ⇒ executeProcess(process)
    }
  }

  private def executeProcess(process: Process): Future[Unit] = Future {
    val outStream = scala.io.Source.fromInputStream(process.getInputStream)
    for (line ← outStream.getLines()) {
      log.info(line)
    }
    val errorStream = scala.io.Source.fromInputStream(process.getErrorStream)
    for (line ← errorStream.getLines()) {
      log.info(line)
    }
    process.waitFor()
  }

  def launch(clientAkkaAddress: String,
    jarPath: File,
    mainJarPattern: String,
    mainClass: String,
    sparkConfig: Config,
    akkaRemoteConfig: Option[Config],
    sendJars: Boolean = true): Try[Future[Unit]] = Try {

    val fullExtraJarFolder = jarPath.pathAsString

    val sparkHome = Properties.envOrNone("SPARK_HOME")
    val sparkMaster = Properties.envOrElse("SPARK_MASTER", s"spark://${InetAddress.getLocalHost.getHostAddress}:7077")
    log.debug(s"Spark master set to: $sparkMaster")

    // TODO: enable this functionality (need Spark 1.5 for this)
    //    val sparkArgs: Array[String] = config.getString("spark.submit.sparkargs").split(' ')

    if (!sparkMaster.startsWith("local[") && !sparkHome.isDefined)
      throw new RuntimeException("If 'SPARK_MASTER' is not set to local, 'SPARK_HOME' must be set.")

    val appName = mainClass.split('.').last

    val mainJar = jarPath.glob(mainJarPattern).collectFirst { case f ⇒ f.pathAsString }

    val configVars: Seq[(String, String)] = ConfigUtils.configFields(sparkConfig).toSeq

    val akkaRemoteConfigString = akkaRemoteConfig.map { config ⇒
      val configString = config.root().render(ConfigRenderOptions.concise())
      URLEncoder.encode(configString, "UTF-8")
    }

    val launcher = (new SparkLauncher)
      .setIfSome[String](mainJar) { (l, mj) ⇒ l.setAppResource(mj) }
      .setMainClass(mainClass)
      .setAppName(appName)
      .setMaster(sparkMaster)
      .setIfSome[String](sparkHome) { (l, sh) ⇒ l.setSparkHome(sh) }
      .addAppArgs("appName", appName)
      .addAppArgs("clientAkkaAddress", clientAkkaAddress)
      .setIfSome(akkaRemoteConfigString) { (l, config) ⇒ l.addAppArgs("remoteAkkaConfig", config) }
      .setFoldLeft(configVars) { case (launcher, (key, value)) ⇒ launcher.setConf(key, value) }
      .setDeployMode(sparkConfig.getString("spark.deploymode"))

    val extraJarFiles = jarPath.glob("*.jar")
      .map { case f ⇒ f.pathAsString }
      .filterNot(_.contains("/akka-"))

    val launcherWithJars =
      if (sendJars)
        extraJarFiles.foldLeft(launcher) { case (l, jarFile) ⇒ l.addJar(jarFile) }
      else if (extraJarFiles.length == 0) launcher
      else launcher
        .setConf(SparkLauncher.DRIVER_EXTRA_CLASSPATH, s"$fullExtraJarFolder/*")
        .setConf(SparkLauncher.EXECUTOR_EXTRA_CLASSPATH, s"$fullExtraJarFolder/*")

    startProcess(launcherWithJars)
  }

} 
Example 8
Source File: RegexConstraint.scala    From drunken-data-quality   with Apache License 2.0 5 votes vote down vote up
package de.frosner.ddq.constraints

import java.util.regex.Pattern

import org.apache.spark.sql.functions._
import org.apache.spark.sql.{Column, DataFrame}

import scala.util.Try

case class RegexConstraint(columnName: String, regex: String) extends Constraint {

  val fun = (df: DataFrame) => {
    val pattern = Pattern.compile(regex)
    val doesNotMatch = udf((column: String) => column != null && !pattern.matcher(column).find())
    val maybeDoesNotMatchCount = Try(df.filter(doesNotMatch(new Column(columnName))).count)
    RegexConstraintResult(
      constraint = this,
      data = maybeDoesNotMatchCount.toOption.map(RegexConstraintResultData),
      status = ConstraintUtil.tryToStatus[Long](maybeDoesNotMatchCount, _ == 0)
    )
  }

}

case class RegexConstraintResult(constraint: RegexConstraint,
                                 data: Option[RegexConstraintResultData],
                                 status: ConstraintStatus) extends ConstraintResult[RegexConstraint] {

  val message: String = {
    val columnName = constraint.columnName
    val regex = constraint.regex
    val maybeFailedRows = data.map(_.failedRows)
    val maybePluralSAndVerb = maybeFailedRows.map(failedRows => if (failedRows == 1) ("", "does") else ("s", "do"))
    (status, maybeFailedRows, maybePluralSAndVerb) match {
      case (ConstraintSuccess, Some(0), _) =>
        s"Column $columnName matches $regex"
      case (ConstraintFailure, Some(failedRows), Some((pluralS, verb))) =>
        s"Column $columnName contains $failedRows row$pluralS that $verb not match $regex"
      case (ConstraintError(throwable), None, None) =>
        s"Checking whether column $columnName matches $regex failed: $throwable"
      case default => throw IllegalConstraintResultException(this)
    }
  }

}

case class RegexConstraintResultData(failedRows: Long) 
Example 9
Source File: FileDownloader.scala    From releaser   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc

import java.net.URL
import java.nio.file.{Files, Path}

import scala.util.{Failure, Success, Try}

class FileDownloader extends Logger {
  import resource._

  def url2File(url: String, targetFile: Path): Try[Path] = {
    if(targetFile.toFile.exists()){
      log.info(s"not downloading from $url as file already exists")
      Success(targetFile)
    } else {
      log.info(s"downloading $url to $targetFile")

      try {
        managed(new URL(url).openConnection().getInputStream).foreach { in =>
          Files.createDirectories(targetFile.getParent)
          Files.copy(in, targetFile)
        }

        Success(targetFile)
      } catch {
        case e: Exception => Failure(e)
      }
    }
  }
} 
Example 10
Source File: GithubHttp.scala    From releaser   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.releaser.github

import java.util.concurrent.TimeUnit

import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import play.api.libs.json.JsValue
import play.api.libs.ws._
import play.api.libs.ws.ning.{NingAsyncHttpClientConfigBuilder, NingWSClient}
import uk.gov.hmrc.{Logger, ServiceCredentials}

import scala.concurrent.Await
import scala.concurrent.duration.Duration
import scala.util.{Failure, Success, Try}

class GithubHttp(cred: ServiceCredentials) extends Logger {

  implicit val system = ActorSystem()
  implicit val materializer = ActorMaterializer()

  val ws = new NingWSClient(new NingAsyncHttpClientConfigBuilder().build())

  def buildCall(method:String, url:String, body:Option[JsValue] = None): WSRequest ={
    log.debug(s"github client_id ${cred.user.takeRight(5)}")
    log.debug(s"github client_secret ${cred.pass.takeRight(5)}")

    val req = ws.url(url)
      .withMethod(method)
      .withAuth(cred.user, cred.pass, WSAuthScheme.BASIC)
      .withQueryString("client_id" -> cred.user, "client_secret" -> cred.pass)
      .withHeaders("content-type" -> "application/json")

    body.map { b =>
      req.withBody(b)
    }.getOrElse(req)
  }

  def callAndWait(req:WSRequest): WSResponse = {
    log.info(s"${req.method} with ${req.url}")
    val result: WSResponse = Await.result(req.execute(), Duration.apply(1, TimeUnit.MINUTES))
    log.info(s"${req.method} with ${req.url} result ${result.status} - ${result.statusText}")
    result
  }

  def get(url:String): Try[Unit] = {
    val result = callAndWait(buildCall("GET", url))
    result.status match {
      case s if s >= 200 && s < 300 => Success(Unit)
      case _@e => Failure(new scala.Exception(s"Didn't get expected status code when writing to Github. Got status ${result.status}: ${result.body}"))
    }
  }

  def post[A](responseBuilder:(WSResponse) => Try[A])(url:String, body:JsValue): Try[A] = {
    log.debug("github url: " + url)
    log.debug("github body: " + body)

    val result = callAndWait(buildCall("POST", url, Some(body)))
    result.status match {
      case s if s >= 200 && s < 300 => responseBuilder(result)
      case _@e => Failure(new scala.Exception(s"Didn't get expected status code when writing to Github. Got status ${result.status}: ${result.body}"))
    }
  }

  def postUnit(url:String, body:JsValue): Try[Unit] = {
    post[Unit](_ => Success(Unit))(url, body)
  }
} 
Example 11
Source File: GithubTagAndRelease.scala    From releaser   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.releaser.github

import org.joda.time.DateTime

import scala.util.Try

trait GithubTagAndRelease {

  def verifyGithubTagExists(repo: Repo, sha: CommitSha): Try[Unit]

  def createGithubTagAndRelease(tagDate: DateTime,
                                commitSha: CommitSha,
                                commitAuthor: String,
                                commitDate: DateTime,
                                artefactName: String,
                                gitRepo: Repo,
                                releaseCandidateVersion: String,
                                version: String,
                                releaseNotes: Option[String]): Try[Unit]

} 
Example 12
Source File: BintrayHttp.scala    From releaser   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.releaser.bintray

import java.net.URL
import java.nio.file.Path
import java.util.concurrent.TimeUnit

import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import play.api.libs.ws.ning.{NingAsyncHttpClientConfigBuilder, NingWSClient, NingWSClientConfig}
import play.api.libs.ws.{WSAuthScheme, WSClientConfig, WSResponse}
import play.api.mvc.Results
import uk.gov.hmrc.{Logger, ServiceCredentials}

import scala.concurrent.Await
import scala.concurrent.duration.Duration
import scala.util.{Failure, Success, Try}
import scala.concurrent.duration._

class BintrayHttp(creds:ServiceCredentials) extends Logger {

  implicit val system = ActorSystem()
  implicit val materializer = ActorMaterializer()

  private def getTimeoutPropertyOptional(key: String) = Option(System.getProperty(key)).map(_.toLong milliseconds)

  def wsClientConfig = NingWSClientConfig(
    wsClientConfig = WSClientConfig(
    connectionTimeout = getTimeoutPropertyOptional("wsclient.timeout.connection").getOrElse(2 seconds),
    idleTimeout = getTimeoutPropertyOptional("wsclient.timeout.idle").getOrElse(2 seconds),
    requestTimeout = getTimeoutPropertyOptional("wsclient.timeout.request").getOrElse(2 seconds)
    )
  )

  val ws = new NingWSClient(new NingAsyncHttpClientConfigBuilder(wsClientConfig).build())

  def apiWs(url:String) = ws.url(url)
    .withAuth(
      creds.user, creds.pass, WSAuthScheme.BASIC)
    .withHeaders("content-type" -> "application/json")

  def emptyPost(url:String): Try[Unit] = {
    log.info(s"posting file to $url")

    val call = apiWs(url).post(Results.EmptyContent())
    val result: WSResponse = Await.result(call, Duration.apply(5, TimeUnit.MINUTES))

    result.status match {
      case s if s >= 200 && s < 300 => Success(new URL(url))
      case _@e => Failure(new scala.Exception(s"Didn't get expected status code when writing to Bintray. Got status ${result.status}: ${result.body}"))
    }
  }

  def get[A](url:String): Try[String] ={
    log.info(s"getting file from $url")

    val call = apiWs(url).get()
    val result: WSResponse = Await.result(call, Duration.apply(5, TimeUnit.MINUTES))

    result.status match {
      case s if s >= 200 && s < 300 => Success(result.body)
      case _@e => Failure(new scala.Exception(s"Didn't get expected status code when writing to Bintray. Got status ${result.status}: ${result.body}"))
    }
  }

  def putFile(version: VersionDescriptor, file: Path, url: String): Try[Unit] = {
    log.info(s"version $version")
    log.info(s"putting file to $url")

    val call = apiWs(url)
      .withHeaders(
        "X-Bintray-Package" -> version.artefactName,
        "X-Bintray-Version" -> version.version)
      .put(file.toFile)

    val result: WSResponse = Await.result(call, Duration.apply(6, TimeUnit.MINUTES))

    result.status match {
      case s if s >= 200 && s < 300 => Success(Unit)
      case _@e => Failure(new scala.Exception(s"Didn't get expected status code when writing to Bintray. Got status ${result.status}: ${result.body}"))
    }
  }
} 
Example 13
Source File: DefaultBintrayRepoConnector.scala    From releaser   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.releaser.bintray

import java.net.{HttpURLConnection, URL}
import java.nio.file.Path

import play.api.libs.json.{JsValue, Json}
import uk.gov.hmrc.{FileDownloader, Logger, ServiceCredentials}

import scala.util.{Failure, Success, Try}

object BintrayRepoConnector extends Logger {
  def apply(bintrayCreds: ServiceCredentials, workDir : Path): BintrayRepoConnector =
    new DefaultBintrayRepoConnector(workDir, new BintrayHttp(bintrayCreds), new FileDownloader())

  def dryRun(bintrayCreds: ServiceCredentials, workDir : Path) = {
    log.info("Bintray : running in dry-run mode")
    val dryRunHttp = new BintrayHttp(bintrayCreds){
      override def emptyPost(url:String): Try[Unit] = { println("BintrayHttp emptyPost DRY_RUN");Success(Unit)}
      override def putFile(version: VersionDescriptor, file: Path, url: String): Try[Unit] = { println("BintrayHttp putFile DRY_RUN");Success(Unit) }
    }

    new DefaultBintrayRepoConnector(workDir, dryRunHttp, new FileDownloader())
  }
}

trait BintrayRepoConnector {
  def findJar(jarFileName: String, jarUrl: String, version: VersionDescriptor): Option[Path]
  def publish(version: VersionDescriptor): Try[Unit]
  def downloadFile(url: String, fileName: String): Try[Path]
  def uploadFile(version: VersionDescriptor, filePath: Path, url: String): Try[Unit]
  def verifyTargetDoesNotExist(version: VersionDescriptor): Try[Unit]
  def findFiles(version: VersionDescriptor): Try[List[String]]
  def getRepoMetaData(repoName:String, artefactName: String): Try[Unit]
}

class DefaultBintrayRepoConnector(workDir: Path, bintrayHttp: BintrayHttp, fileDownloader: FileDownloader)
  extends BintrayRepoConnector with Logger {

  def publish(version: VersionDescriptor):Try[Unit] = {
    val url = BintrayPaths.publishUrlFor(version)
    bintrayHttp.emptyPost(url)
  }

  def verifyTargetDoesNotExist(version: VersionDescriptor): Try[Unit] = {
    val url = BintrayPaths.fileListUrlFor(version)
    log.info(s"Bintray : checking to see if $url exists")

    val conn = new URL(url).openConnection().asInstanceOf[HttpURLConnection]
    conn.setRequestMethod("HEAD")
    conn.connect()

    conn.getResponseCode match {
      case 200 => Failure(new IllegalArgumentException(s"${version.artefactName} ${version.version} already exists"))
      case _ => Success()
    }
  }

  def findJar(jarFileName: String, jarUrl: String, version: VersionDescriptor): Option[Path] = {
    downloadFile(jarUrl, jarFileName) match {
      case Success(x) => Some(x)
      case Failure(y) => None
    }
  }

  def uploadFile(version: VersionDescriptor, filePath: Path, url: String): Try[Unit] = {
    bintrayHttp.putFile(version, filePath, url)
  }

  def downloadFile(url: String, fileName: String): Try[Path] = {
    val targetFile = workDir.resolve(fileName)
    fileDownloader.url2File(url, targetFile) map { unit => targetFile }
  }

  def findFiles(version: VersionDescriptor): Try[List[String]] = {
    val url = BintrayPaths.fileListUrlFor(version)
    bintrayHttp.get(url).map { st =>
      val fileNames: Seq[JsValue] = Json.parse(st) \\ "path"
      fileNames.map(_.as[String]).toList
    }
  }

  def getRepoMetaData(repoName:String, artefactName: String): Try[Unit] = {
    val url = BintrayPaths.metadata(repoName, artefactName)
    bintrayHttp.get(url).map { _ => Unit}
  }
} 
Example 14
Source File: Versions.scala    From releaser   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.releaser

import uk.gov.hmrc.releaser.bintray.VersionDescriptor
import uk.gov.hmrc.releaser.github.Repo

import scala.util.Try

trait Version {
  def value:String

  override def toString = value
}
case class ReleaseVersion(major: Int, minor: Int, revision: Int) extends Version{
  val value = s"$major.$minor.$revision"
}

object ReleaseVersion{
  def apply(st:String):ReleaseVersion = {
    val parts = st.split('.').map(_.toInt)
    ReleaseVersion(parts.head, parts(1), parts(2))
  }
}
case class ReleaseCandidateVersion(value:String) extends Version


case class VersionMapping (repo:RepoFlavour,
                            artefactName:String,
                            gitRepo:Repo,
                            sourceVersion:ReleaseCandidateVersion,
                            targetVersion:ReleaseVersion) {

  def targetArtefact = VersionDescriptor(repo.releaseRepo, artefactName, gitRepo.value, targetVersion.value)
  def sourceArtefact = VersionDescriptor(repo.releaseCandidateRepo, artefactName, gitRepo.value, sourceVersion.value)
}

object VersionNumberCalculator{

  val VersionRegex = """(\d+)\.(\d+)\.(\d+)-.*-g.*""".r

  def calculateTarget(rcVersion:ReleaseCandidateVersion, releaseType: ReleaseType.Value): Try[ReleaseVersion] = Try {
    groups(rcVersion.value).toList.map(_.toInt) match {
      case List(major, minor, hotfix) => releaseType match {
        case ReleaseType.HOTFIX => ReleaseVersion(major, minor, hotfix + 1)
        case ReleaseType.MINOR => ReleaseVersion(major, minor+1, 0)
        case ReleaseType.MAJOR => ReleaseVersion(major+1, 0, 0)
      }
      case _ => throw new IllegalArgumentException("invalid release candidate version " + rcVersion)
    }
  }

  def groups(rcVersion: String): Iterator[String] = {
    for (m <- VersionRegex.findAllIn(rcVersion).matchData;
         e <- m.subgroups) yield e
  }
} 
Example 15
Source File: MetaDataProvider.scala    From releaser   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.releaser

import java.nio.file.Path
import java.util.jar.Manifest
import java.util.zip.ZipFile

import org.joda.time.DateTime
import org.joda.time.format.DateTimeFormat
import uk.gov.hmrc.releaser.github.CommitSha

import scala.collection.JavaConversions._
import scala.io.Source
import scala.util.{Failure, Success, Try}

trait MetaDataProvider {
  def fromJarFile(p: Path): Try[ArtefactMetaData]
  def fromCommitManifest(p: Path): Try[ArtefactMetaData]
}

case class ArtefactMetaData(sha:CommitSha, commitAuthor:String, commitDate:DateTime)

class ArtefactMetaDataProvider extends MetaDataProvider {
  import ArtefactMetaDataProvider._

  def fromJarFile(p: Path): Try[ArtefactMetaData] = {
    Try {new ZipFile(p.toFile) }.flatMap { jarFile =>
      jarFile.entries().filter(_.getName == "META-INF/MANIFEST.MF").toList.headOption.map { ze =>
        val man = new Manifest(jarFile.getInputStream(ze))
        ArtefactMetaData(
          man.getMainAttributes.getValue("Git-Head-Rev"),
          man.getMainAttributes.getValue("Git-Commit-Author"),
          gitCommitDateFormat.parseDateTime(man.getMainAttributes.getValue("Git-Commit-Date"))
        )
      }.toTry(new Exception(s"Failed to retrieve manifest from $p"))
    }
  }

  def fromCommitManifest(p: Path): Try[ArtefactMetaData] = {
    Try {
      val map = Source.fromFile(p.toFile)
        .getLines().toSeq
        .map(_.split("="))
        .map { case Array(key, value) => key.trim -> value.trim }.toMap

      ArtefactMetaData(map("sha"), map("author"),  gitCommitDateFormat.parseDateTime(map("date")))
    }
  }
}

object ArtefactMetaDataProvider {

  val gitCommitDateFormat = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ")

  implicit class OptionPimp[A](opt: Option[A]){
    def toTry(e:Exception):Try[A] = opt match {
      case Some(x) => Success(x)
      case None => Failure(e)
    }
  }
} 
Example 16
Source File: Releaser.scala    From releaser   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.releaser

import java.io.File
import java.nio.file.{Files, Path}

import org.apache.commons.io.FileUtils
import uk.gov.hmrc.releaser.bintray.{BintrayHttp, BintrayRepoConnector, DefaultBintrayRepoConnector}
import uk.gov.hmrc.releaser.github.{GithubConnector, Repo}
import uk.gov.hmrc.{CredentialsFinder, FileDownloader, Logger}

import scala.util.{Failure, Success, Try}

object ReleaserMain {
  def main(args: Array[String]): Unit = {
    val result = Releaser(args)
    System.exit(result)
  }
}

object Releaser extends Logger {

  import ArgParser._

  def apply(args: Array[String]): Int = {
    parser.parse(args, Config()) match {
      case Some(config) =>
        val githubName = config.githubNameOverride.getOrElse(config.artefactName)
        run(config.artefactName, ReleaseCandidateVersion(config.rcVersion), config.releaseType, githubName, config.releaseNotes, config.dryRun)
      case None => -1
    }
  }

  def run(artefactName: String, rcVersion: ReleaseCandidateVersion, releaseType: ReleaseType.Value, gitHubName: String, releaseNotes: Option[String], dryRun: Boolean = false): Int = {
    val githubCredsFile = System.getProperty("user.home") + "/.github/.credentials"
    val bintrayCredsFile = System.getProperty("user.home") + "/.bintray/.credentials"

    val githubCredsOpt = CredentialsFinder.findGithubCredsInFile(new File(githubCredsFile).toPath)
    val bintrayCredsOpt = CredentialsFinder.findBintrayCredsInFile(new File(bintrayCredsFile).toPath)

    doReleaseWithCleanup { directories =>
      if (githubCredsOpt.isEmpty) {
        log.info(s"Didn't find github credentials in $githubCredsFile")
        -1
      } else if (bintrayCredsOpt.isEmpty) {
        log.info(s"Didn't find Bintray credentials in $bintrayCredsFile")
        -1
      } else {

        val releaserVersion = getClass.getPackage.getImplementationVersion
        val metaDataProvider = new ArtefactMetaDataProvider()
        val gitHubDetails = if (dryRun) GithubConnector.dryRun(githubCredsOpt.get, releaserVersion) else GithubConnector(githubCredsOpt.get, releaserVersion)
        val bintrayDetails = if (dryRun) BintrayRepoConnector.dryRun(bintrayCredsOpt.get, directories.workDir) else BintrayRepoConnector(bintrayCredsOpt.get, directories.workDir)
        val bintrayRepoConnector = new DefaultBintrayRepoConnector(directories.workDir, new BintrayHttp(bintrayCredsOpt.get), new FileDownloader)

        val coordinator = new Coordinator(directories.stageDir, metaDataProvider, gitHubDetails, bintrayRepoConnector)
        val result = coordinator.start(artefactName, Repo(gitHubName), rcVersion, releaseType, releaseNotes)

        result match {
          case Success(targetVersion) =>
            log.info(s"Releaser successfully released $artefactName $targetVersion")
            0
          case Failure(e) =>
            e.printStackTrace()
            log.info(s"Releaser failed to release $artefactName $rcVersion with error '${e.getMessage}'")
            1
        }
      }
    }
  }

  def doReleaseWithCleanup[T](f: ReleaseDirectories => T): T = {
    val directories = ReleaseDirectories()
    try {
      f(directories)
    } finally {
      log.info("cleaning releaser work directory")
      directories.delete().recover{case  t => log.warn(s"failed to delete releaser work directory ${t.getMessage}")}
    }

  }
}

case class ReleaseDirectories(tmpDirectory: Path = Files.createTempDirectory("releaser")) {

  lazy val workDir = Files.createDirectories(tmpDirectory.resolve("work"))
  lazy val stageDir = Files.createDirectories(tmpDirectory.resolve("stage"))

  def delete() = Try {
    FileUtils.forceDelete(tmpDirectory.toFile)
  }
} 
Example 17
Source File: FakeBintrayRepoConnector.scala    From releaser   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.releaser.bintray

import java.nio.file.{Path, Paths}

import scala.collection.mutable
import scala.util.{Failure, Success, Try}

class FakeBintrayRepoConnector(filesuffix:String  = "",
                               jarResource:Option[String],
                               bintrayFiles:Set[String],
                               targetExists:Boolean = false) extends BintrayRepoConnector {

  val downloadedFiles = mutable.Set[String]()
  val uploadedFiles = mutable.Set[(VersionDescriptor, Path, String)]()
  var lastPublishDescriptor: Option[VersionDescriptor] = None

  override def findJar(jarFileName: String, jarUrl: String, version: VersionDescriptor): Option[Path] =
    jarResource.map { x => Paths.get(this.getClass.getResource(filesuffix + x).toURI) }

  override def publish(version: VersionDescriptor): Try[Unit] = {
    lastPublishDescriptor = Some(version)
    Success(Unit)
  }

  override def findFiles(version: VersionDescriptor): Try[List[String]] = Success(bintrayFiles.toList ++ jarResource)

  override def downloadFile(url: String, fileName: String): Try[Path] = {
    downloadedFiles.add(url)
    val success = Success {
      val s = filesuffix + fileName
      val resource = this.getClass.getResource(s)
      val i = resource.toURI
      val path = Paths.get(i)
      path
    }
    success
  }

  override def uploadFile(version: VersionDescriptor, filePath: Path, url: String): Try[Unit] = {
    uploadedFiles.add((version, filePath, url))
    Success(Unit)
  }

  override def verifyTargetDoesNotExist(version: VersionDescriptor): Try[Unit] = targetExists match {
    case true => Failure(new IllegalArgumentException("Failed in test"))
    case false => Success(Unit)
  }

  override def getRepoMetaData(repoName: String, artefactName: String): Try[Unit] = Success(Unit)
} 
Example 18
Source File: package.scala    From theGardener   with Apache License 2.0 5 votes vote down vote up
import java.io.File

import play.api.Logging

import scala.concurrent._
import scala.util.control.NonFatal
import scala.util.{Failure, Try}

package object utils extends Logging {

  implicit class TryOps[T](t: Try[T]) {
    def logError(msg: => String): Try[T] = t.recoverWith {
      case e =>
        logger.error(msg, e)
        Failure(e)
    }
  }

  implicit class FutureOps[T](f: Future[T]) {
    def logError(msg: => String)(implicit ec: ExecutionContext): Future[T] = f.recoverWith {
      case NonFatal(e) => logger.error(msg, e)
        Future.failed(e)
    }
  }

  implicit class PathExt(path: String) {
    def fixPathSeparator: String = path.replace('/', File.separatorChar)
  }

} 
Example 19
Source File: WikiServiceImpl.scala    From BacklogMigration-Redmine   with MIT License 5 votes vote down vote up
package com.nulabinc.backlog.r2b.redmine.service

import java.net.URLEncoder

import javax.inject.Inject
import com.nulabinc.backlog.migration.common.utils.Logging
import com.nulabinc.backlog.r2b.redmine.conf.RedmineApiConfiguration
import com.taskadapter.redmineapi.{NotFoundException, RedmineFormatException, RedmineInternalError, RedmineManager}
import com.taskadapter.redmineapi.bean.{WikiPage, WikiPageDetail}

import scala.jdk.CollectionConverters._
import scala.util.Try


class WikiServiceImpl @Inject()(apiConfig: RedmineApiConfiguration, redmine: RedmineManager) extends WikiService with Logging {

  override def allWikis(): Seq[WikiPage] =
    try {
      redmine.getWikiManager.getWikiPagesByProject(apiConfig.projectKey).asScala.toSeq
    } catch {
      case e: Throwable =>
        logger.warn(e.getMessage, e)
        Seq.empty[WikiPage]
    }

  override def optWikiDetail(pageTitle: String): Option[WikiPageDetail] = {
    logger.debug("Get a wiki Title: " + pageTitle)
    try {
      val wiki = redmine.getWikiManager.getWikiPageDetailByProjectAndTitle(apiConfig.projectKey, pageTitle)
      Some(wiki)
    } catch {
      case e: RedmineInternalError if e.getMessage.contains("URISyntaxException") =>
        logger.warn(s"Failed to get wiki details. URISyntaxException: ${e.getMessage} Title: $pageTitle")
        None
      case e: NotFoundException =>
        logger.warn(s"Failed to get wiki details. NotFoundException: ${e.getMessage} Title: $pageTitle")
        None
      case e: RedmineFormatException =>
        val url = s"${apiConfig.url}/projects/${apiConfig.projectKey}/wiki/${encode(pageTitle)}.json?include=attachments&key=${apiConfig.key}"

        Try {
          scala.io.Source.fromURL(url, "UTF-8").mkString
        }.recover {
          case e: Throwable =>
            e.getMessage
        }.map { res =>
          logger.warn(s"Failed to get wiki details. RedmineFormatException: ${e.getMessage} Title: $pageTitle Raw response: $res")
        }
        None
      case e: Throwable =>
        throw e
    }
  }

  private def encode(str: String): String =
    URLEncoder.encode(str, "UTF-8")
} 
Example 20
Source File: Http4sMain.scala    From advanced-scala-code   with Apache License 2.0 5 votes vote down vote up
import java.util.UUID

import cats.effect.IO
import scala.util.Try

case class Person(name: String, age: Int)

object Endpoints {
  import org.http4s._
  import org.http4s.dsl.io._

  val helloWorldService = HttpRoutes.of[IO] {
    case GET -> Root / "hello" / IntVar(number) =>
      Ok(s"Hello, your number is $number")
  }

  val asyncRequest = HttpRoutes.of[IO] {
    case GET -> Root / "async" =>
      Ok {
        IO.async[String] { eitherCb =>
          import org.asynchttpclient.Dsl._
          val whenResponse = asyncHttpClient.
            prepareGet("https://httpbin.org/get").execute()
          whenResponse.toCompletableFuture.whenComplete((res, th) => {
            if (th != null) {
              eitherCb(Left(th))
            } else eitherCb(Right(res.getResponseBody))
          })
        }
      }
  }

  val jsonRequest = HttpRoutes.of[IO] {
    case GET -> Root / "json" =>
      import org.http4s.circe._         // EntityEncoder[IO, Json]
      import io.circe.generic.auto._    // automatic codecs for Person
      import io.circe.syntax._          // asJson method
      Ok {
        Person("Joe", 42).asJson
      }
  }

  val idService = HttpRoutes.of[IO] {
    case GET -> Root / "id" / UuidVar(id) =>
      Ok(s"Your ID is $id")
  }

  val timeService = HttpRoutes.of[IO] {
    case GET -> Root / "time" =>
      Ok(System.currentTimeMillis().toString)
  }

  object UuidVar {
    def unapply(s: String): Option[UUID] = {
      Try { UUID.fromString(s) }.toOption
    }
  }
}


import cats.effect.{ExitCode, IO, IOApp}
object Http4sMain extends IOApp {

  import Endpoints._
  import cats.implicits._
  import org.http4s.implicits._
  import org.http4s.server.blaze._
  import org.http4s.server.Router

  val api = helloWorldService <+> timeService <+> idService <+> asyncRequest <+> jsonRequest

  val httpApp = Router("/" -> api).orNotFound

  def run(args: List[String]): IO[ExitCode] =
    BlazeServerBuilder[IO]
      .bindHttp(8080)
      .withHttpApp(httpApp)
      .serve
      .compile
      .drain
      .as(ExitCode.Success)
} 
Example 21
Source File: StructType.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package com.truecar.mleap.runtime.types

import scala.util.{Failure, Success, Try}


  def dropIndex(index: Int): StructType = {
    StructType(fields.take(index) ++ fields.drop(index + 1))
  }

  def tryIndexOf(name: String): Try[Int] = {
    if(contains(name)) {
      Success(indexOf(name))
    } else {
      Failure(new Error(s"Field $name does not exist"))
    }
  }
} 
Example 22
Source File: LinearRegressionModel.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package com.truecar.mleap.runtime.transformer

import com.truecar.mleap.core.regression.LinearRegression
import com.truecar.mleap.runtime.attribute.{ContinuousAttribute, AttributeSchema}
import com.truecar.mleap.runtime.transformer.builder.TransformBuilder
import com.truecar.mleap.runtime.types.{DoubleType, VectorType}
import com.truecar.mleap.runtime.transformer.builder.TransformBuilder.Ops

import scala.util.Try


case class LinearRegressionModel(featuresCol: String,
                                 predictionCol: String,
                                 model: LinearRegression) extends Transformer {
  override def build[TB: TransformBuilder](builder: TB): Try[TB] = {
    builder.withInput(featuresCol, VectorType).flatMap {
      case(b, featuresIndex) =>
        b.withOutput(predictionCol, DoubleType)(row => model(row.getVector(featuresIndex)))
    }
  }

  override def transformAttributeSchema(schema: AttributeSchema): AttributeSchema = {
    schema.withField(predictionCol, ContinuousAttribute())
  }
} 
Example 23
Source File: RandomForestRegressionModel.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package com.truecar.mleap.runtime.transformer

import com.truecar.mleap.core.regression.RandomForestRegression
import com.truecar.mleap.runtime.attribute.{ContinuousAttribute, AttributeSchema}
import com.truecar.mleap.runtime.transformer.builder.TransformBuilder
import com.truecar.mleap.runtime.types.{VectorType, DoubleType}
import com.truecar.mleap.runtime.transformer.builder.TransformBuilder.Ops

import scala.util.Try


case class RandomForestRegressionModel(featuresCol: String,
                                       predictionCol: String,
                                       model: RandomForestRegression) extends Transformer {
  override def build[TB: TransformBuilder](builder: TB): Try[TB] = {
    builder.withInput(featuresCol, VectorType).flatMap {
      case (b, featuresIndex) =>
        b.withOutput(predictionCol, DoubleType)(row => model(row.getVector(featuresIndex)))
    }
  }

  override def transformAttributeSchema(schema: AttributeSchema): AttributeSchema = {
    schema.withField(predictionCol, ContinuousAttribute())
  }
} 
Example 24
Source File: TokenizerModel.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package com.truecar.mleap.runtime.transformer

import com.truecar.mleap.core.feature.Tokenizer
import com.truecar.mleap.runtime.attribute.{OtherAttribute, AttributeSchema}
import com.truecar.mleap.runtime.transformer.builder.TransformBuilder
import com.truecar.mleap.runtime.types.{StringArrayType, StringType}
import com.truecar.mleap.runtime.transformer.builder.TransformBuilder.Ops

import scala.util.Try


case class TokenizerModel(inputCol: String,
                         outputCol: String) extends Transformer {
  override def build[TB: TransformBuilder](builder: TB): Try[TB] = {
    builder.withInput(inputCol, StringType).flatMap {
      case (b, inputIndex) =>
        b.withOutput(outputCol, StringArrayType)(row => Tokenizer.defaultTokenizer(row.getString(inputIndex)))
    }
  }

  override def transformAttributeSchema(schema: AttributeSchema): AttributeSchema = schema.withField(outputCol, OtherAttribute())
} 
Example 25
Source File: SupportVectorMachineModel.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package com.truecar.mleap.runtime.transformer

import com.truecar.mleap.core.classification.SupportVectorMachine
import com.truecar.mleap.runtime.attribute.{AttributeSchema, CategoricalAttribute}
import com.truecar.mleap.runtime.transformer.builder.TransformBuilder
import com.truecar.mleap.runtime.types.{DoubleType, VectorType}
import com.truecar.mleap.runtime.transformer.builder.TransformBuilder.Ops

import scala.util.Try


case class SupportVectorMachineModel(featuresCol: String,
                                     predictionCol: String,
                                     model: SupportVectorMachine) extends Transformer {
  override def build[TB: TransformBuilder](builder: TB): Try[TB] = {
    builder.withInput(featuresCol, VectorType).flatMap {
      case(b, featuresIndex) =>
        b.withOutput(predictionCol, DoubleType)(row => model(row.getVector(featuresIndex)))
    }
  }

  override def transformAttributeSchema(schema: AttributeSchema): AttributeSchema = {
    schema.withField(predictionCol, CategoricalAttribute())
  }
} 
Example 26
Source File: HashingTermFrequencyModel.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package com.truecar.mleap.runtime.transformer

import com.truecar.mleap.core.feature.HashingTermFrequency
import com.truecar.mleap.runtime.attribute.{CategoricalAttribute, AttributeGroup, AttributeSchema}
import com.truecar.mleap.runtime.transformer.builder.TransformBuilder
import com.truecar.mleap.runtime.transformer.builder.TransformBuilder.Ops
import com.truecar.mleap.runtime.types.{DoubleType, StringType}

import scala.util.Try


case class HashingTermFrequencyModel(inputCol: String,
                                     outputCol: String,
                                     hashingTermFrequency: HashingTermFrequency) extends Transformer {
  override def build[TB: TransformBuilder](builder: TB): Try[TB] = {
    builder.withInput(inputCol, StringType).flatMap {
      case (b, inputIndex) =>
        b.withOutput(outputCol, DoubleType)(row => hashingTermFrequency(row.getString(inputIndex)))
    }
  }

  override def transformAttributeSchema(schema: AttributeSchema): AttributeSchema = {
    val attrGroup = AttributeGroup(Array.tabulate(hashingTermFrequency.numFeatures)(_ => CategoricalAttribute()))
    schema.withField(outputCol, attrGroup)
  }
} 
Example 27
Source File: StringIndexerModel.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package com.truecar.mleap.runtime.transformer

import com.truecar.mleap.core.feature.StringIndexer
import com.truecar.mleap.runtime.attribute.{CategoricalAttribute, AttributeSchema}
import com.truecar.mleap.runtime.transformer.builder.TransformBuilder
import com.truecar.mleap.runtime.types.DoubleType
import com.truecar.mleap.runtime.transformer.builder.TransformBuilder.Ops

import scala.util.Try


case class StringIndexerModel(inputCol: String,
                              outputCol: String,
                              indexer: StringIndexer) extends Transformer {
  override def build[TB: TransformBuilder](builder: TB): Try[TB] = {
    builder.withInput(inputCol).flatMap {
      case (b, inputIndex) =>
        b.withOutput(outputCol, DoubleType)(row => indexer(row.get(inputIndex).toString))
    }
  }

  override def transformAttributeSchema(schema: AttributeSchema): AttributeSchema = {
    schema.withField(outputCol, CategoricalAttribute())
  }

  def toReverse: ReverseStringIndexerModel = ReverseStringIndexerModel(inputCol,
    outputCol,
    indexer.toReverse)
} 
Example 28
Source File: StandardScalerModel.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package com.truecar.mleap.runtime.transformer

import com.truecar.mleap.core.feature.StandardScaler
import com.truecar.mleap.runtime.attribute.AttributeSchema
import com.truecar.mleap.runtime.transformer.builder.TransformBuilder
import com.truecar.mleap.runtime.types.VectorType
import com.truecar.mleap.runtime.transformer.builder.TransformBuilder.Ops

import scala.util.Try


case class StandardScalerModel(inputCol: String,
                               outputCol: String,
                               scaler: StandardScaler) extends Transformer {
  override def build[TB: TransformBuilder](builder: TB): Try[TB] = {
    builder.withInput(inputCol, VectorType).flatMap {
      case (b, inputIndex) =>
        b.withOutput(outputCol, VectorType)(row => scaler(row.getVector(inputIndex)))
    }
  }

  override def transformAttributeSchema(schema: AttributeSchema): AttributeSchema = {
    schema.withField(outputCol, schema(inputCol))
  }
} 
Example 29
Source File: VectorAssemblerModel.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package com.truecar.mleap.runtime.transformer

import com.truecar.mleap.core.feature.VectorAssembler
import com.truecar.mleap.runtime.attribute.{BaseAttribute, AttributeGroup, AttributeSchema}
import com.truecar.mleap.runtime.transformer.builder.TransformBuilder
import com.truecar.mleap.runtime.types.{StructType, VectorType}
import com.truecar.mleap.runtime.transformer.builder.TransformBuilder.Ops

import scala.util.Try


case class VectorAssemblerModel(inputCols: Array[String],
                                outputCol: String) extends Transformer {
  private val assembler: VectorAssembler = VectorAssembler.default

  override def build[TB: TransformBuilder](builder: TB): Try[TB] = {
    inputCols.foldLeft(Try((builder, Seq[Int]()))) {
      (result, col) => result.flatMap {
        case (b, indices) =>
          b.withInput(col)
            .map {
              case (b3, index) => (b3, indices :+ index)
            }
      }
    }.flatMap {
      case (b, indices) =>
        b.withOutput(outputCol, VectorType)(row => assembler(indices.map(row.get): _*))
    }
  }

  override def transformAttributeSchema(schema: AttributeSchema): AttributeSchema = {
    val attrs: Array[BaseAttribute] = inputCols.toArray.map(col => schema(col)).flatMap {
      case AttributeGroup(groupAttrs) => groupAttrs: Array[BaseAttribute]
      case attr: BaseAttribute => Array(attr): Array[BaseAttribute]
      case _ =>
        // TODO: better error here
        throw new Error("Unsupported attribute type")
    }

    schema.withField(outputCol, AttributeGroup(attrs))
  }
} 
Example 30
Source File: LeapFrameBuilder.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package com.truecar.mleap.runtime.transformer.builder

import com.truecar.mleap.runtime.{Row, LeapFrame}
import com.truecar.mleap.runtime.types.{DataType, StructField}

import scala.util.{Failure, Success, Try}


case class LeapFrameBuilder[T: LeapFrame](frame: T) extends Serializable
object LeapFrameBuilder {
  implicit def LeapFrameBuilderTransformBuilder[T: LeapFrame]: TransformBuilder[LeapFrameBuilder[T]] = {
    new TransformBuilder[LeapFrameBuilder[T]] {
      override def withInput(t: LeapFrameBuilder[T], name: String): Try[(LeapFrameBuilder[T], Int)] = {
        LeapFrame.schema(t.frame).tryIndexOf(name).map((t, _))
      }

      override def withInput(t: LeapFrameBuilder[T], name: String, dataType: DataType): Try[(LeapFrameBuilder[T], Int)] = {
        val schema = LeapFrame.schema(t.frame)
        schema.getField(name) match {
          case Some(field) =>
            if(field.dataType == dataType) {
              Success(t, schema.indexOf(name))
            } else {
              Failure(new Error(s"Field $name expected data type ${field.dataType} but found $dataType"))
            }
          case None =>
            Failure(new Error(s"Field $name does not exist"))
        }
      }

      override def withOutput(t: LeapFrameBuilder[T], name: String, dataType: DataType)(o: (Row) => Any): Try[LeapFrameBuilder[T]] = {
        LeapFrame.withField(t.frame, StructField(name, dataType), o).map {
          frame2 => t.copy(frame = frame2)
        }
      }
      override def withSelect(t: LeapFrameBuilder[T], fieldNames: Seq[String]): Try[LeapFrameBuilder[T]] = {
        LeapFrame.select(t.frame, fieldNames: _*).map(frame => LeapFrameBuilder(frame))
      }
      override def withDrop(t: LeapFrameBuilder[T], name: String): Try[LeapFrameBuilder[T]] = {
        LeapFrame.dropField(t.frame, name).map(frame => LeapFrameBuilder(frame))
      }
    }
  }
} 
Example 31
Source File: TransformBuilder.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package com.truecar.mleap.runtime.transformer.builder

import com.truecar.mleap.runtime.types.DataType
import com.truecar.mleap.runtime.Row

import scala.util.{Failure, Try}


trait TransformBuilder[T] extends Serializable {
  def withInput(t: T, name: String): Try[(T, Int)]
  def withInput(t: T, name: String, dataType: DataType): Try[(T, Int)]

  def withOutput(t: T, name: String, dataType: DataType)
                (o: (Row) => Any): Try[T]

  def withSelect(t: T, fieldNames: Seq[String]): Try[T]
  def withDrop(t: T, name: String): Try[T]
}

object TransformBuilder {
  implicit class Ops[T: TransformBuilder](t: T) {
    def withInput(name: String): Try[(T, Int)] = {
      implicitly[TransformBuilder[T]].withInput(t, name)
    }

    def withInput(name: String, dataType: DataType): Try[(T, Int)] = {
      implicitly[TransformBuilder[T]].withInput(t, name, dataType)
    }

    def withOutput(name: String, dataType: DataType)
                                       (o: (Row) => Any): Try[T] = {
      implicitly[TransformBuilder[T]].withOutput(t, name, dataType)(o)
    }

    def withSelect(fieldNames: Seq[String]): Try[T] = {
      implicitly[TransformBuilder[T]].withSelect(t, fieldNames)
    }
    def withDrop(name: String): Try[T] = {
      implicitly[TransformBuilder[T]].withDrop(t, name)
    }
  }
} 
Example 32
Source File: ReverseStringIndexerModel.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package com.truecar.mleap.runtime.transformer

import com.truecar.mleap.core.feature.ReverseStringIndexer
import com.truecar.mleap.runtime.attribute.{AttributeSchema, CategoricalAttribute}
import com.truecar.mleap.runtime.transformer.builder.TransformBuilder
import com.truecar.mleap.runtime.types.StringType
import com.truecar.mleap.runtime.transformer.builder.TransformBuilder.Ops

import scala.util.Try


case class ReverseStringIndexerModel(inputCol: String,
                                     outputCol: String,
                                     indexer: ReverseStringIndexer) extends Transformer {
  override def build[TB: TransformBuilder](builder: TB): Try[TB] = {
    builder.withInput(inputCol).flatMap {
      case (b, inputIndex) =>
        b.withOutput(outputCol, StringType)(row => indexer(row.getDouble(inputIndex).toInt))
    }
  }

  override def transformAttributeSchema(schema: AttributeSchema): AttributeSchema = {
    schema.withField(outputCol, CategoricalAttribute())
  }
} 
Example 33
Source File: RandomForestClassificationModel.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package com.truecar.mleap.runtime.transformer

import com.truecar.mleap.core.classification.RandomForestClassification
import com.truecar.mleap.runtime.attribute.{AttributeSchema, CategoricalAttribute}
import com.truecar.mleap.runtime.transformer.builder.TransformBuilder
import com.truecar.mleap.runtime.types.{DoubleType, VectorType}
import com.truecar.mleap.runtime.transformer.builder.TransformBuilder.Ops

import scala.util.Try


case class RandomForestClassificationModel(featuresCol: String,
                                           predictionCol: String,
                                           model: RandomForestClassification) extends Transformer {
  override def build[TB: TransformBuilder](builder: TB): Try[TB] = {
    builder.withInput(featuresCol, VectorType).flatMap {
      case (b, featuresIndex) =>
        b.withOutput(predictionCol, DoubleType)(row => model(row.getVector(featuresIndex)))
    }
  }

  override def transformAttributeSchema(schema: AttributeSchema): AttributeSchema = {
    schema.withField(predictionCol, CategoricalAttribute())
  }
} 
Example 34
Source File: Constraint.scala    From zipkin-mesos-framework   with Apache License 2.0 5 votes vote down vote up
package net.elodina.mesos.zipkin.components

import java.util.regex.{Pattern, PatternSyntaxException}

import net.elodina.mesos.zipkin.utils.Util

import scala.util.Try

trait Constraint {
  def matches(value: String, values: List[String] = Nil): Boolean
}

object Constraint {
  def apply(value: String): Constraint = {
    if (value.startsWith("like:")) Constraint.Like(value.substring("like:".length))
    else if (value.startsWith("unlike:")) Constraint.Like(value.substring("unlike:".length), negated = true)
    else if (value == "unique") Constraint.Unique()
    else if (value.startsWith("cluster")) {
      val tail = value.substring("cluster".length)
      val cluster = if (tail.startsWith(":")) Some(tail.substring(1)) else None
      Cluster(cluster)
    } else if (value.startsWith("groupBy")) {
      val tail = value.substring("groupBy".length)
      val groups = if (tail.startsWith(":")) Try(tail.substring(1).toInt).toOption.getOrElse(throw new IllegalArgumentException(s"invalid condition $value"))
      else 1

      GroupBy(groups)
    }
    else throw new IllegalArgumentException(s"Unsupported condition: $value")
  }

  def parse(constraints: String): Map[String, List[Constraint]] = {
    Util.parseList(constraints).foldLeft[Map[String, List[Constraint]]](Map()) { case (all, (name, value)) =>
      all.get(name) match {
        case Some(values) => all.updated(name, Constraint(value) :: values)
        case None => all.updated(name, List(Constraint(value)))
      }
    }
  }

  case class Like(regex: String, negated: Boolean = false) extends Constraint {
    val pattern = try {
      Pattern.compile(s"^$regex$$")
    } catch {
      case e: PatternSyntaxException => throw new IllegalArgumentException(s"Invalid $name: ${e.getMessage}")
    }

    private def name: String = if (negated) "unlike" else "like"

    def matches(value: String, values: List[String]): Boolean = negated ^ pattern.matcher(value).find()

    override def toString: String = s"$name:$regex"
  }

  case class Unique() extends Constraint {
    def matches(value: String, values: List[String]): Boolean = !values.contains(value)

    override def toString: String = "unique"
  }

  case class Cluster(value: Option[String]) extends Constraint {
    def matches(value: String, values: List[String]): Boolean = this.value match {
      case Some(v) => v == value
      case None => values.isEmpty || values.head == value
    }

    override def toString: String = "cluster" + value.map(":" + _).getOrElse("")
  }

  case class GroupBy(groups: Int = 1) extends Constraint {
    def matches(value: String, values: List[String]): Boolean = {
      val counts = values.groupBy(identity).mapValues(_.size)
      if (counts.size < groups) !counts.contains(value)
      else {
        val minCount = counts.values.reduceOption(_ min _).getOrElse(0)
        counts.getOrElse(value, 0) == minCount
      }
    }

    override def toString: String = "groupBy" + (if (groups > 1) s":$groups" else "")
  }

} 
Example 35
Source File: S3.scala    From teamcity-s3-plugin   with Apache License 2.0 5 votes vote down vote up
package com.gu.teamcity

import java.io.{InputStream, File}

import com.amazonaws.ClientConfiguration
import com.amazonaws.auth.{AWSCredentialsProviderChain, DefaultAWSCredentialsProviderChain}
import com.amazonaws.services.s3.AmazonS3Client
import com.amazonaws.services.s3.model.{ObjectMetadata, PutObjectRequest, CannedAccessControlList}
import com.amazonaws.services.s3.transfer.TransferManager
import jetbrains.buildServer.serverSide.SBuild

import scala.util.{Success, Try}

class S3(config: S3ConfigManager) {
  val credentialsProvider = {
    val provider = new AWSCredentialsProviderChain(config, new DefaultAWSCredentialsProviderChain())
    provider.setReuseLastProvider(false)
    provider
  }

  val transferManager = new TransferManager(
    new AmazonS3Client(credentialsProvider, new ClientConfiguration().withMaxErrorRetry(2))
  )

  def upload(bucket: String, build: SBuild, fileName: String, contents: InputStream, fileSize: Long): Try[Unit] =
    Try {
      val uploadDirectory = s"${S3Plugin.cleanFullName(build)}/${build.getBuildNumber}"
      val metadata = {
        val md = new ObjectMetadata()
        md.setContentLength(fileSize)
        md
      }
      val req = new PutObjectRequest(bucket, s"$uploadDirectory/$fileName", contents, metadata)
      req.withCannedAcl(CannedAccessControlList.BucketOwnerFullControl)
      val upload = transferManager.upload(req)
      upload.waitForUploadResult()
    }

  def upload(bucket: String, build: SBuild, fileName: String, file: File): Try[Unit] =
    Try {
      val uploadDirectory = s"${S3Plugin.cleanFullName(build)}/${build.getBuildNumber}"
      val req = new PutObjectRequest(bucket, s"$uploadDirectory/$fileName", file)
      req.withCannedAcl(CannedAccessControlList.BucketOwnerFullControl)
      val upload = transferManager.upload(req)
      upload.waitForUploadResult()
    }

} 
Example 36
Source File: ScalarDecoder.scala    From caliban   with Apache License 2.0 5 votes vote down vote up
package caliban.client

import scala.util.Try
import java.util.UUID

import caliban.client.CalibanClientError.DecodingError
import caliban.client.Value._
import io.circe.Json


trait ScalarDecoder[+A] {
  def decode(value: Value): Either[DecodingError, A]
}

object ScalarDecoder {
  implicit val int: ScalarDecoder[Int] = {
    case NumberValue(value) =>
      Try(value.toIntExact).toEither.left.map(ex => DecodingError(s"Can't build an Int from input $value", Some(ex)))
    case other => Left(DecodingError(s"Can't build an Int from input $other"))
  }
  implicit val long: ScalarDecoder[Long] = {
    case NumberValue(value) =>
      Try(value.toLongExact).toEither.left.map(ex => DecodingError(s"Can't build a Long from input $value", Some(ex)))
    case other => Left(DecodingError(s"Can't build a Long from input $other"))
  }
  implicit val bigInt: ScalarDecoder[BigInt] = {
    case NumberValue(value) =>
      Try(value.toBigIntExact).toEither.left
        .map(ex => DecodingError(s"Can't build a BigInt from input $value", Some(ex)))
        .flatMap {
          case None    => Left(DecodingError(s"Can't build a BigInt from input $value"))
          case Some(v) => Right(v)
        }
    case other => Left(DecodingError(s"Can't build a BigInt from input $other"))
  }
  implicit val float: ScalarDecoder[Float] = {
    case NumberValue(value) => Right(value.toFloat)
    case other              => Left(DecodingError(s"Can't build a Float from input $other"))
  }
  implicit val double: ScalarDecoder[Double] = {
    case NumberValue(value) => Right(value.toDouble)
    case other              => Left(DecodingError(s"Can't build a Double from input $other"))
  }
  implicit val bigDecimal: ScalarDecoder[BigDecimal] = {
    case NumberValue(value) => Right(value)
    case other              => Left(DecodingError(s"Can't build a BigDecimal from input $other"))
  }
  implicit val boolean: ScalarDecoder[Boolean] = {
    case BooleanValue(value) => Right(value)
    case other               => Left(DecodingError(s"Can't build a Boolean from input $other"))
  }
  implicit val string: ScalarDecoder[String] = {
    case StringValue(value) => Right(value)
    case other              => Left(DecodingError(s"Can't build a String from input $other"))
  }
  implicit val uuid: ScalarDecoder[UUID] = {
    case StringValue(value) =>
      Try(UUID.fromString(value)).toEither.left
        .map(ex => DecodingError(s"Can't build a UUID from input $value", Some(ex)))
    case other => Left(DecodingError(s"Can't build a UUID from input $other"))
  }
  implicit val unit: ScalarDecoder[Unit] = {
    case ObjectValue(Nil) => Right(())
    case other            => Left(DecodingError(s"Can't build Unit from input $other"))
  }
  implicit val json: ScalarDecoder[Json] = value => Right(Value.valueEncoder(value))
} 
Example 37
Source File: PlayJsonBackend.scala    From caliban   with Apache License 2.0 5 votes vote down vote up
package caliban.interop.play

import akka.http.scaladsl.unmarshalling.FromEntityUnmarshaller
import caliban._
import caliban.interop.play.json.parsingException
import de.heikoseeberger.akkahttpplayjson.PlayJsonSupport
import play.api.libs.json.{ JsObject, JsValue, Json }
import scala.util.Try


final class PlayJsonBackend extends JsonBackend with PlayJsonSupport {

  private def parseJson(s: String): Try[JsValue] =
    Try(Json.parse(s))

  def parseHttpRequest(
    query: Option[String],
    op: Option[String],
    vars: Option[String],
    exts: Option[String]
  ): Either[Throwable, GraphQLRequest] = {
    val variablesJs  = vars.flatMap(parseJson(_).toOption)
    val extensionsJs = exts.flatMap(parseJson(_).toOption)
    Json
      .obj(
        "query"         -> query,
        "operationName" -> op,
        "variables"     -> variablesJs,
        "extensions"    -> extensionsJs
      )
      .validate[GraphQLRequest]
      .asEither
      .left
      .map(parsingException)
  }

  def encodeGraphQLResponse(r: GraphQLResponse[Any]): String = Json.toJson(r).toString()

  def parseWSMessage(text: String): Either[Throwable, WSMessage] =
    parseJson(text).toEither.map { json =>
      PlayWSMessage(
        (json \ "id").validate[String].getOrElse(""),
        (json \ "type").validate[String].getOrElse(""),
        (json \ "payload").validate[JsObject].asOpt
      )
    }

  def encodeWSResponse[E](id: String, data: ResponseValue, errors: List[E]): String =
    Json.stringify(
      Json
        .obj(
          "id"      -> id,
          "type"    -> "data",
          "payload" -> GraphQLResponse(data, errors)
        )
    )

  def encodeWSError(id: String, error: Throwable): String =
    Json.stringify(
      Json
        .obj(
          "id"      -> id,
          "type"    -> "complete",
          "payload" -> error.toString
        )
    )

  def reqUnmarshaller: FromEntityUnmarshaller[GraphQLRequest] = implicitly
} 
Example 38
Source File: ReactElementContainer.scala    From slinky   with MIT License 5 votes vote down vote up
package slinky.core

import slinky.core.facade.ReactElement

import scala.collection.immutable.{Iterable, Queue}
import scala.concurrent.Future
import scala.scalajs.js
import scala.util.Try

trait ReactElementContainer[F[_]] extends Any { self =>
  def map[A](fa: F[A])(f: A => ReactElement): F[ReactElement]
}

object ReactElementContainer {
  def apply[F[_]: ReactElementContainer]: ReactElementContainer[F] = implicitly[ReactElementContainer[F]]

  @inline implicit def function0Container: ReactElementContainer[Function0] = new ReactElementContainer[Function0] {
    override def map[A](fa: () => A)(f: A => ReactElement): () => ReactElement = () => f(fa())
  }

  @inline implicit def futureContainer: ReactElementContainer[Future] = new ReactElementContainer[Future] {
    import scala.concurrent.ExecutionContext.Implicits.global
    override def map[A](fa: Future[A])(f: A => ReactElement): Future[ReactElement] = fa.map(f)
  }

  @inline implicit def iterableContainer: ReactElementContainer[Iterable] = new ReactElementContainer[Iterable] {
    override def map[A](fa: Iterable[A])(f: A => ReactElement): Iterable[ReactElement] = fa.map(f)
  }

  @inline implicit def jsUndefOrContainer: ReactElementContainer[js.UndefOr] = new ReactElementContainer[js.UndefOr] {
    override def map[A](fa: js.UndefOr[A])(f: A => ReactElement): js.UndefOr[ReactElement] = fa.map(f)
  }

  @inline implicit def listContainer: ReactElementContainer[List] = new ReactElementContainer[List] {
    override def map[A](fa: List[A])(f: A => ReactElement): List[ReactElement] = fa.map(f)
  }

  @inline implicit def optionContainer: ReactElementContainer[Option] = new ReactElementContainer[Option] {
    override def map[A](fa: Option[A])(f: A => ReactElement): Option[ReactElement] = fa.map(f)
  }

  @inline implicit def queueContainer: ReactElementContainer[Queue] = new ReactElementContainer[Queue] {
    override def map[A](fa: Queue[A])(f: A => ReactElement): Queue[ReactElement] = fa.map(f)
  }

  @inline implicit def seqContainer: ReactElementContainer[Seq] = new ReactElementContainer[Seq] {
    override def map[A](fa: Seq[A])(f: A => ReactElement): Seq[ReactElement] = fa.map(f)
  }

  @inline implicit def setContainer: ReactElementContainer[Set] = new ReactElementContainer[Set] {
    override def map[A](fa: Set[A])(f: A => ReactElement): Set[ReactElement] = fa.map(f)
  }

  @inline implicit def someContainer: ReactElementContainer[Some] = new ReactElementContainer[Some] {
    override def map[A](fa: Some[A])(f: A => ReactElement): Some[ReactElement] = Some(fa.map(f).get)
  }

  @inline implicit def tryContainer: ReactElementContainer[Try] = new ReactElementContainer[Try] {
    override def map[A](fa: Try[A])(f: A => ReactElement): Try[ReactElement] = fa.map(f)
  }

  @inline implicit def vectorContainer: ReactElementContainer[Vector] = new ReactElementContainer[Vector] {
    override def map[A](fa: Vector[A])(f: A => ReactElement): Vector[ReactElement] = fa.map(f)
  }
} 
Example 39
Source File: JSONSerializers.scala    From aardpfark   with Apache License 2.0 5 votes vote down vote up
package com.ibm.aardpfark.pfa.document

import scala.util.Try

import com.ibm.aardpfark.pfa.dsl._
import com.ibm.aardpfark.pfa.expression.PFAExpression
import com.ibm.aardpfark.spark.ml.tree.{TreeNode, Trees}
import org.apache.avro.Schema
import org.json4s.native.JsonMethods.parse
import org.json4s.{CustomSerializer, JValue}


object SchemaSerializer {

  def convert(s: Schema): JValue = {
    import Schema.Type._
    import org.json4s.JsonDSL._
    s.getType match {
      case DOUBLE | FLOAT | INT | LONG | STRING | BOOLEAN | BYTES | NULL  =>
        ("type" -> s.getType.getName)
      case _ =>
        parse(s.toString)
    }
  }
}

class SchemaSerializer extends CustomSerializer[Schema](format => (
  {
    case j: JValue =>
      new Schema.Parser().parse(j.toString)
  },
  {
    case s: Schema =>
      SchemaSerializer.convert(s)
  }
)
)

class PFAExpressionSerializer extends CustomSerializer[PFAExpression](format => (
  {
    case j: JValue =>
      throw new UnsupportedOperationException("cannot deserialize")
  },
  {
    case expr: PFAExpression =>
      expr.json
  }
)
)

class TreeSerializer extends CustomSerializer[TreeNode](format => (
  {
    case j: JValue =>
      throw new UnsupportedOperationException("cannot deserialize")
  },
  {
    case tree: TreeNode =>
      Trees.json(tree)
  }
)
)

class ParamSerializer extends CustomSerializer[Param](format => (
  {
    case j: JValue =>
      throw new UnsupportedOperationException("cannot deserialize")
  },
  {
    case p: Param =>
      import org.json4s.JsonDSL._
      if (p.simpleSchema) {
        (p.name -> p.`type`.getFullName)
      } else {
        val schemaSerializer = new SchemaSerializer().serialize(format)
        (p.name -> schemaSerializer(p.`type`))
      }

  }
)
) 
Example 40
Source File: VideoDisplay.scala    From jvm-toxcore-c   with GNU General Public License v3.0 5 votes vote down vote up
package im.tox.tox4j.av.callbacks.video

import java.io.Closeable

import im.tox.tox4j.av.data.{ Height, Width }
import im.tox.tox4j.testing.autotest.AutoTestSuite.timed
import org.scalatest.Assertions

import scala.util.Try

abstract class VideoDisplay[Parsed, Canvas] extends Assertions with Closeable {

  def width: Width
  def height: Height

  protected def canvas: Try[Canvas]
  protected def parse(
    y: Array[Byte], u: Array[Byte], v: Array[Byte],
    yStride: Int, uStride: Int, vStride: Int
  ): Parsed
  protected def displaySent(canvas: Canvas, frameNumber: Int, parsed: Parsed): Unit
  protected def displayReceived(canvas: Canvas, frameNumber: Int, parsed: Parsed): Unit

  final def displaySent(frameNumber: Int, y: Array[Byte], u: Array[Byte], v: Array[Byte]): Unit = {
    val width = this.width.value
    canvas.foreach(displaySent(_, frameNumber, parse(y, u, v, width, width / 2, width / 2)))
  }

  
  final def displayReceived(
    frameNumber: Int,
    y: Array[Byte], u: Array[Byte], v: Array[Byte],
    yStride: Int, uStride: Int, vStride: Int
  ): Option[(Int, Int)] = {
    canvas.toOption.map { canvas =>
      val (parseTime, parsed) = timed(parse(y, u, v, yStride, uStride, vStride))
      val displayTime = timed(displayReceived(canvas, frameNumber, parsed))

      (parseTime, displayTime)
    }
  }

} 
Example 41
Source File: ConsoleVideoDisplay.scala    From jvm-toxcore-c   with GNU General Public License v3.0 5 votes vote down vote up
package im.tox.tox4j.av.callbacks.video

import java.io.PrintStream

import im.tox.tox4j.av.data.{ Height, Width }

import scala.util.{ Success, Try }

final case class ConsoleVideoDisplay(width: Width, height: Height) extends VideoDisplay[Seq[String], PrintStream] {

  override protected def canvas: Try[PrintStream] = Success(System.out)

  override protected def displaySent(canvas: PrintStream, frameNumber: Int, senderImage: Seq[String]): Unit = {
    // Don't display the sent image in text mode.
  }

  override protected def displayReceived(canvas: PrintStream, frameNumber: Int, receiverImage: Seq[String]): Unit = {
    canvas.print("\u001b[H\u001b[2J")
    receiverImage.foreach(canvas.println)
  }

  override protected def parse(
    y: Array[Byte], u: Array[Byte], v: Array[Byte],
    yStride: Int, uStride: Int, vStride: Int
  ): Seq[String] = {
    val printable = ".-~:;/<>=()ot%!?@&O8SX$#"

    for (yPos <- 0 until height.value) yield {
      new String(y.slice(yPos * yStride, yPos * yStride + width.value).map {
        case b =>
          printable(((b & 0xff) / 255.0 * (printable.length - 1)).toInt)
      })
    }
  }

  override def close(): Unit = ()

} 
Example 42
Source File: AudioPlayback.scala    From jvm-toxcore-c   with GNU General Public License v3.0 5 votes vote down vote up
package im.tox.tox4j.av.callbacks.audio

import javax.sound.sampled._

import im.tox.tox4j.av.data.SamplingRate

import scala.util.Try

object AudioPlayback {

  def showWave(pcm: Array[Short], width: Int): String = {
    val height = width / 10

    val screen = (0 until height).map(_ => Array.fill[Char](width)(' '))

    val maxSampleValue = -Short.MinValue
    for ((sample, x) <- pcm.zipWithIndex) {
      val y = valueToRange(sample + maxSampleValue, maxSampleValue * 2, height)
      screen(y)(valueToRange(x, pcm.length, width)) = '#'
    }

    screen.map(new String(_)).mkString("\n")
  }

  private def valueToRange(value: Double, maxValue: Int, maxRange: Int): Int = {
    (value / maxValue * maxRange).toInt
  }

  private def serialiseAudioFrame(pcm: Array[Short]): Array[Byte] = {
    val buffer = Array.ofDim[Byte](pcm.length * 2)
    for (i <- buffer.indices by 2) {
      buffer(i) = (pcm(i / 2) >> 8).toByte
      buffer(i + 1) = pcm(i / 2).toByte
    }

    buffer
  }

}

final class AudioPlayback(samplingRate: SamplingRate) {

  def play(pcm: Array[Short]): Unit = {
    soundLine.foreach { soundLine =>
      val buffer = AudioPlayback.serialiseAudioFrame(pcm)
      soundLine.write(buffer, 0, buffer.length)
    }
  }

  def done(length: Int): Boolean = {
    soundLine.toOption.map(_.getLongFramePosition >= length).getOrElse(true)
  }

  private val soundLine = Try {
    val format = new AudioFormat(samplingRate.value, 16, 1, true, true)
    val info = new DataLine.Info(classOf[SourceDataLine], format)
    val soundLine = AudioSystem.getLine(info).asInstanceOf[SourceDataLine]
    soundLine.open(format, samplingRate.value)
    soundLine.start()
    soundLine
  }

} 
Example 43
Source File: KafkaFlowExample.scala    From kafka-scala-api   with Apache License 2.0 5 votes vote down vote up
package com.example.flow

import org.apache.spark.streaming.dstream.DStream._
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.joda.time.DateTime
import org.json4s.DefaultFormats
import org.json4s.jackson.JsonMethods._

import scala.util.Try

case class Purchase(item_id: String, amount: BigDecimal, time: Long)
case class Key(item_id: String, time: DateTime)
case class Summary(item_id: String, time: DateTime, total: BigDecimal)

object KafkaFlowExample {
  implicit val formats = DefaultFormats

  def extract(message: String): Option[(Key, BigDecimal)] = {
    for {
      parsed <- Try(parse(message)).toOption
      purchase <- parsed.extractOpt[Purchase]
    } yield {
      val datetime = new DateTime(purchase.time)
      val roundedTime = datetime.withMinuteOfHour(0).withSecondOfMinute(0).withMillisOfSecond(0)
      Key(purchase.item_id, roundedTime) -> purchase.amount
    }
  }

  def transformStream(stream: InputDStream[String]): DStream[Summary] = {
    stream
      .flatMap(extract)
      .reduceByKey(_ + _)
      .map { case (key, amount) =>
        Summary(key.item_id, key.time, amount)
      }
  }
} 
Example 44
Source File: TypeCast.scala    From spark-google-spreadsheets   with Apache License 2.0 5 votes vote down vote up
package com.github.potix2.spark.google.spreadsheets.util

import java.math.BigDecimal
import java.sql.{Date, Timestamp}
import java.text.NumberFormat
import java.util.Locale

import org.apache.spark.sql.types._

import scala.util.Try

object TypeCast {

  private[spreadsheets] def castTo(
                                   datum: String,
                                   castType: DataType,
                                   nullable: Boolean = true
                                 ): Any = {
    castType match {
      case _: ByteType => datum.toByte
      case _: ShortType => datum.toShort
      case _: IntegerType => datum.toInt
      case _: LongType => datum.toLong
      case _: FloatType => Try(datum.toFloat)
        .getOrElse(NumberFormat.getInstance(Locale.getDefault()).parse(datum).floatValue())
      case _: DoubleType => Try(datum.toFloat)
        .getOrElse(NumberFormat.getInstance(Locale.getDefault()).parse(datum).doubleValue())
      case _: BooleanType => datum.toBoolean
      case _: DecimalType => new BigDecimal(datum.replaceAll(",", ""))
      case _: TimestampType => Timestamp.valueOf(datum)
      case _: DateType => Date.valueOf(datum)
      case _: StringType => datum
      case _ => throw new RuntimeException(s"Unsupported type: ${castType.typeName}")

    }
  }
} 
Example 45
Source File: UpickleCustomizationSupport.scala    From akka-http-json   with Apache License 2.0 5 votes vote down vote up
package de.heikoseeberger.akkahttpupickle

import akka.http.javadsl.common.JsonEntityStreamingSupport
import akka.http.scaladsl.common.EntityStreamingSupport
import akka.http.scaladsl.marshalling.{ Marshaller, Marshalling, ToEntityMarshaller }
import akka.http.scaladsl.model.{ ContentTypeRange, HttpEntity, MediaType, MessageEntity }
import akka.http.scaladsl.model.MediaTypes.`application/json`
import akka.http.scaladsl.unmarshalling.{ FromEntityUnmarshaller, Unmarshal, Unmarshaller }
import akka.http.scaladsl.util.FastFuture
import akka.stream.scaladsl.{ Flow, Source }
import akka.util.ByteString
import UpickleCustomizationSupport._

import scala.collection.immutable.Seq
import scala.concurrent.Future
import scala.util.Try
import scala.util.control.NonFatal

// This companion object only exists for binary compatibility as adding methods with default implementations
// (including val's as they create synthetic methods) is not compatible.
private object UpickleCustomizationSupport {

  private def jsonStringUnmarshaller(support: UpickleCustomizationSupport) =
    Unmarshaller.byteStringUnmarshaller
      .forContentTypes(support.unmarshallerContentTypes: _*)
      .mapWithCharset {
        case (ByteString.empty, _) => throw Unmarshaller.NoContentException
        case (data, charset)       => data.decodeString(charset.nioCharset.name)
      }

  private def jsonSourceStringMarshaller(support: UpickleCustomizationSupport) =
    Marshaller.oneOf(support.mediaTypes: _*)(support.sourceByteStringMarshaller)

  private def jsonStringMarshaller(support: UpickleCustomizationSupport) =
    Marshaller.oneOf(support.mediaTypes: _*)(Marshaller.stringMarshaller)
}


  implicit def sourceMarshaller[A](implicit
      writes: apiInstance.Writer[A],
      support: JsonEntityStreamingSupport = EntityStreamingSupport.json()
  ): ToEntityMarshaller[SourceOf[A]] =
    jsonSourceStringMarshaller(this).compose(jsonSource[A])
} 
Example 46
Source File: TwitterSinkConnector.scala    From kafka-tweet-producer   with Apache License 2.0 5 votes vote down vote up
package com.eneco.trading.kafka.connect.twitter

import java.util

import org.apache.kafka.connect.connector.Task
import org.apache.kafka.connect.errors.ConnectException
import org.apache.kafka.connect.sink.SinkConnector

import scala.collection.JavaConverters._
import scala.util.{Failure, Try}

class TwitterSinkConnector extends SinkConnector with Logging {
  private var configProps : util.Map[String, String] = null

  
  override def start(props: util.Map[String, String]): Unit = {
    log.info(s"Starting Twitter sink task with ${props.toString}.")
    configProps = props
    Try(new TwitterSinkConfig(props)) match {
      case Failure(f) => throw new ConnectException("Couldn't start TwitterSinkConnector due to configuration error.", f)
      case _ =>
    }
  }

  override def stop(): Unit = {}
  override def version(): String = ""
} 
Example 47
Source File: TwitterSourceConnector.scala    From kafka-tweet-producer   with Apache License 2.0 5 votes vote down vote up
package com.eneco.trading.kafka.connect.twitter

import java.util
import org.apache.kafka.connect.connector.{Task, Connector}
import org.apache.kafka.connect.errors.ConnectException
import scala.collection.JavaConverters._
import scala.util.{Failure, Try}


  override def start(props: util.Map[String, String]): Unit = {
    log.info(s"Starting Twitter source task with ${props.toString}.")
    configProps = props
    Try(new TwitterSourceConfig(props)) match {
      case Failure(f) => throw new ConnectException("Couldn't start Twitter source due to configuration error: "
          + f.getMessage, f)
      case _ =>
    }
  }

  override def stop() = {}
  override def version(): String = ""
} 
Example 48
Source File: TestSinkTask.scala    From kafka-tweet-producer   with Apache License 2.0 5 votes vote down vote up
package com.eneco.trading.kafka.connect.twitter

import org.apache.kafka.connect.sink.SinkRecord
import scala.collection.JavaConverters._
import scala.util.{Success, Try}

class TestSinkTask extends TestTwitterBase {
  test("Strings put to to Task are tweeted") {
    val sinkTask = new TwitterSinkTask()
    val myTestTweet = "I tweet, ergo sum."
    sinkTask.writer = Some(new SimpleTwitterWriter {
      //TODO: use DI?
      def updateStatus(s: String): Try[Long] = {
        s shouldEqual myTestTweet
        Success(5)
      }
    })
    val sr = new SinkRecord("topic", 5, null, null, null, myTestTweet, 123)
    sinkTask.put(Seq(sr).asJava)
  }

} 
Example 49
Source File: CustomDerivations.scala    From zio-config   with Apache License 2.0 5 votes vote down vote up
package zio.config.examples.magnolia

import java.time.{ LocalDate, ZonedDateTime }
import scala.util.Try
import zio.config._
import zio.config.magnolia.DeriveConfigDescriptor._

import zio.config.typesafe._

object CustomDerivations extends App {
  case class AppConfig(jobName: String, details: Option[Detail], s3Path: S3Path)

  case class Detail(containerId: String, executionTime: Either[ZonedDateTime, LocalDate])

  case class S3Path(s: String)

  object S3Path {
    // For some reason you decided to check if the string inside s3Path is empty or not while writing back as well
    // If this implicit doesn't exist, zio-config-magnolia falls back to its default behaviour
    // and finds out an instance for S3Path as it is a simple case class.
    implicit val descriptorOfS3Path: Descriptor[S3Path] =
      Descriptor[String]
        .xmapEither(
          s => validateS3Path(s).toRight(s"Invalid s3 path: ${s}"),
          value => validateS3Path(value.s).map(_.s).toRight("Cannot write. Invalid S3 path.")
        )

    private def validateS3Path(s3Path: String): Option[S3Path] =
      if (s3Path.startsWith("s3://")) Some(S3Path(s3Path)) else None
  }

  // Good to keep implicit derivations within companion objects.
  // Preferable to give descriptions to enrich error reporting of zio-config.
  object Detail

  val config =
    """
    jobName : "spark"
    s3Path  : "s3://path"
    details : {
      containerId : abcdefg
      executionTime: "2020-06-20T17:15:23.601712+10:00[Australia/Sydney]"
    }
    """

  // Custom derivation for zoned date time. Since zonedDateTime is external,
  // we couldn't have a companion object to place this implicit, and hence placed
  // globally for the automatic derivation to work.
  implicit val descriptorOfZonedDateTime: Descriptor[ZonedDateTime] =
    Descriptor[String]
      .xmapEitherELeftPartial(
        x => Try(ZonedDateTime.parse(x)).toEither
      )(_.toString)(_.getMessage) ?? "time in zoned date time"

  val appConfigDesc =
    descriptor[AppConfig]

  val source = TypesafeConfigSource.fromHoconString(config) match {
    case Right(a) => a
    case Left(_)  => throw new Exception("bad hocon string")
  }

  val s = read(appConfigDesc from source)

  assert(
    s == Right(
      AppConfig(
        "spark",
        Some(Detail("abcdefg", Left(ZonedDateTime.parse("2020-06-20T17:15:23.601712+10:00[Australia/Sydney]")))),
        S3Path("s3://path")
      )
    )
  )
} 
Example 50
Source File: TypesafeConfigSource.scala    From zio-config   with Apache License 2.0 5 votes vote down vote up
package zio.config.typesafe

import java.io.File
import java.lang.{ Boolean => JBoolean }

import com.typesafe.config._
import zio.config.PropertyTree.{ Leaf, _ }
import zio.config.{ ConfigSource, _ }
import zio.{ IO, Task, ZIO }

import scala.collection.JavaConverters._
import scala.util.{ Failure, Success, Try }

object TypesafeConfigSource {
  def fromDefaultLoader: Either[String, ConfigSource] =
    fromTypesafeConfig(ConfigFactory.load.resolve)

  def fromHoconFile[A](
    file: File
  ): Task[ConfigSource] =
    IO.effect(ConfigFactory.parseFile(file).resolve)
      .flatMap(typesafeConfig => {
        ZIO
          .fromEither(fromTypesafeConfig(typesafeConfig))
          .mapError(str => new RuntimeException(str))
      })

  def fromHoconString(
    input: String
  ): Either[String, zio.config.ConfigSource] =
    fromTypesafeConfig(
      ConfigFactory.parseString(input).resolve
    )

  def fromTypesafeConfig(
    input: => com.typesafe.config.Config
  ): Either[String, ConfigSource] =
    Try {
      input
    } match {
      case Failure(exception) => Left(exception.getMessage)
      case Success(value) =>
        getPropertyTree(value) match {
          case Left(value)  => Left(value)
          case Right(value) => Right(ConfigSource.fromPropertyTree(value, "hocon", LeafForSequence.Invalid))
        }
    }

  private[config] def getPropertyTree(
    input: com.typesafe.config.Config
  ): Either[String, PropertyTree[String, String]] = {
    def loopBoolean(value: Boolean)         = Leaf(value.toString)
    def loopNumber(value: Number)           = Leaf(value.toString)
    val loopNull                            = PropertyTree.empty
    def loopString(value: String)           = Leaf(value)
    def loopList(values: List[ConfigValue]) = Sequence(values.map(loopAny))

    def loopConfig(config: ConfigObject) =
      Record(config.asScala.toVector.map { case (key, value) => key -> loopAny(value) }.toMap)

    def loopAny(value: ConfigValue): PropertyTree[String, String] = value.valueType() match {
      case ConfigValueType.OBJECT  => loopConfig(value.asInstanceOf[ConfigObject])
      case ConfigValueType.LIST    => loopList(value.asInstanceOf[ConfigList].asScala.toList)
      case ConfigValueType.BOOLEAN => loopBoolean(value.unwrapped().asInstanceOf[JBoolean])
      case ConfigValueType.NUMBER  => loopNumber(value.unwrapped().asInstanceOf[Number])
      case ConfigValueType.NULL    => loopNull
      case ConfigValueType.STRING  => loopString(value.unwrapped().asInstanceOf[String])
    }

    Try(loopConfig(input.root())) match {
      case Failure(t) =>
        Left(
          "Unable to form the zio.config.PropertyTree from Hocon string." +
            " This may be due to the presence of explicit usage of nulls in hocon string. " +
            t.getMessage
        )
      case Success(value) => Right(value)
    }
  }
} 
Example 51
Source File: GlobalConfig.scala    From sbt-api-builder   with MIT License 5 votes vote down vote up
package apibuilder.sbt

import java.io.File

import sbt.IO

import scala.util.Try

final case class GlobalConfig(profiles: Map[String, Profile] = Map.empty) extends AnyVal {
  override def toString: String = profiles.keys.mkString(", ")
}
final case class Profile(token: String) extends AnyVal

object GlobalConfig {
  private val ProfileM = "^\\s*\\[\\s*(profile\\s+|)(\\w+)\\s*\\]\\s*$".r
  private val TokenM   = "^\\s*token\\s*=\\s*(\\w+)$".r

  private[this] implicit final class Ext(val acc: List[(String, Option[Profile])]) extends AnyVal {
    def hasNotSeen(pn: String): Boolean = !acc.exists { case (pn0, _) => pn0 == pn }
  }

  def load(f: File): Either[Throwable, GlobalConfig] =
    Try {
      IO.reader(f) { r =>
        GlobalConfig(
          IO.foldLines(r, List.empty[(String, Option[Profile])]) {
              case (acc, ProfileM(_, pn)) if acc.hasNotSeen(pn) => (pn -> None) :: acc
              case ((cpn, None) :: rest, TokenM(t))             => (cpn -> Some(Profile(t))) :: rest
              case (acc, _)                                     => acc
            }
            .collect { case (profile, Some(config)) => profile -> config }
            .toMap
        )
      }
    }.toEither
} 
Example 52
Source File: ModelToServe.scala    From kafka-with-akka-streams-kafka-streams-tutorial   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.scala.modelServer.model

import com.lightbend.model.modeldescriptor.ModelDescriptor

import scala.util.Try



case class ModelToServe(name: String, description: String,
  modelType: ModelDescriptor.ModelType, model: Array[Byte], dataType: String) {}

case class ServingResult(processed : Boolean, result: Double = .0, duration: Long = 0l)

object ServingResult{
  val noModel = ServingResult(processed = false)
  def apply(result: Double, duration: Long): ServingResult = ServingResult(processed = true, result, duration)
}

object ModelToServe {
  def fromByteArray(message: Array[Byte]): Try[ModelToServe] = Try {
    val m = ModelDescriptor.parseFrom(message)
    if (m.messageContent.isData) {
      new ModelToServe(m.name, m.description, m.modeltype, m.getData.toByteArray, m.dataType)
    } else {
      throw new Exception("Location based is not yet supported")
    }
  }
} 
Example 53
Source File: DataRecord.scala    From kafka-with-akka-streams-kafka-streams-tutorial   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.scala.modelServer.model

import com.lightbend.model.winerecord.WineRecord

import scala.util.Try


object DataRecord {
  // We inject random parsing errors.
  val percentErrors = 5  // 5%
  val rand = new util.Random()

  // Exercise:
  // This implementation assumes `WineRecords`, of course. Can it be made more generic?
  def fromByteArray(message: Array[Byte]): Try[WineRecord] = Try {
    if (rand.nextInt(100) < percentErrors) throw new RuntimeException(s"FAKE parse error")
    else WineRecord.parseFrom(message)
  }
} 
Example 54
Source File: ModelWithDescriptor.scala    From kafka-with-akka-streams-kafka-streams-tutorial   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.scala.modelServer.model

import java.io.{DataInputStream, DataOutputStream}

import com.lightbend.model.modeldescriptor.ModelDescriptor

import scala.collection.Map
import com.lightbend.scala.modelServer.model.PMML.PMMLModel
import com.lightbend.scala.modelServer.model.tensorflow.TensorFlowModel

import scala.util.Try


case class ModelWithDescriptor(model: Model, descriptor: ModelToServe){}

object ModelWithDescriptor {

  private val factories = Map(
    ModelDescriptor.ModelType.PMML.name -> PMMLModel,
    ModelDescriptor.ModelType.TENSORFLOW.name -> TensorFlowModel
  )

  private val factoriesInt = Map(
    ModelDescriptor.ModelType.PMML.index -> PMMLModel,
    ModelDescriptor.ModelType.TENSORFLOW.index -> TensorFlowModel
  )

  def fromModelToServe(descriptor : ModelToServe): Try[ModelWithDescriptor] = Try{
    println(s"New model - $descriptor")
    factories.get(descriptor.modelType.name) match {
      case Some(factory) => ModelWithDescriptor(factory.create(descriptor),descriptor)
      case _ => throw new Throwable("Undefined model type")
    }
  }

  def readModel(input : DataInputStream) : Option[Model] = {
    input.readLong.toInt match{
      case length if length > 0 =>
        val `type` = input.readLong.toInt
        val bytes = new Array[Byte](length)
        input.read(bytes)
        factoriesInt.get(`type`) match {
          case Some(factory) => try {
            Some(factory.restore(bytes))
          }
          catch {
            case t: Throwable =>
              System.out.println("Error Deserializing model")
              t.printStackTrace()
              None
          }
          case _ => None
        }
      case _ => None
    }
  }

  def writeModel(output : DataOutputStream, model: Model) : Unit = {
    if(model == null)
      output.writeLong(0l)
    else {
      try {
        val bytes = model.toBytes
        output.writeLong(bytes.length)
        output.writeLong(model.getType)
        output.write(bytes)
      } catch {
        case t: Throwable =>
          System.out.println("Error Serializing model")
          t.printStackTrace()
      }
    }
  }
} 
Example 55
Source File: Mappers.scala    From kafka-with-akka-streams-kafka-streams-tutorial   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.scala.kafkastreams.modelserver

import com.lightbend.model.winerecord.WineRecord
import com.lightbend.scala.modelServer.model.{DataRecord, ModelToServe, ModelWithDescriptor, ServingResult}
import org.apache.kafka.streams.kstream.{Predicate, ValueMapper}

import scala.util.Try

class DataValueMapper extends ValueMapper[Array[Byte], Try[WineRecord]] {
  override def apply(value: Array[Byte]): Try[WineRecord] = DataRecord.fromByteArray(value)
}

class DataValueFilter extends Predicate[Array[Byte], Try[WineRecord]]{
  override def test(key: Array[Byte], value: Try[WineRecord]): Boolean = value.isSuccess
}

class ModelValueMapper extends ValueMapper[Array[Byte], Try[ModelToServe]] {
  override def apply(value: Array[Byte]): Try[ModelToServe] = ModelToServe.fromByteArray(value)
}

class ModelValueFilter extends Predicate[Array[Byte], Try[ModelToServe]]{
  override def test(key: Array[Byte], value: Try[ModelToServe]): Boolean = value.isSuccess
}

class ModelDescriptorMapper extends ValueMapper[Try[ModelToServe],  Try[ModelWithDescriptor]] {
  override def apply(value: Try[ModelToServe]):  Try[ModelWithDescriptor] = ModelWithDescriptor.fromModelToServe(value.get)
}

class ResultPrinter extends ValueMapper[ServingResult,  ServingResult] {
  override def apply(value: ServingResult):  ServingResult = {
    if(value.processed) println(s"Calculated quality - ${value.result} calculated in ${value.duration} ms")
    else println("No model available - skipping")
    value
  }
}

class ModelDescriptorFilter extends Predicate[Array[Byte], Try[ModelWithDescriptor]]{
  override def test(key: Array[Byte], value: Try[ModelWithDescriptor]): Boolean = value.isSuccess
} 
Example 56
Source File: DataSourceValidator.scala    From iep-apps   with Apache License 2.0 5 votes vote down vote up
package com.netflix.atlas.stream

import com.netflix.atlas.eval.stream.Evaluator.DataSource
import com.netflix.atlas.eval.stream.Evaluator.DataSources
import com.netflix.atlas.json.Json

import scala.collection.mutable
import scala.jdk.CollectionConverters._
import scala.util.Failure
import scala.util.Success
import scala.util.Try

// Parse and validation DataSource's
case class DataSourceValidator(maxDataSourcesPerSession: Int, validateFunc: DataSource => Unit) {

  def validate(input: String): Either[List[IdAndError], DataSources] = {
    val errorMap = mutable.Map[String, mutable.Set[String]]()
    var dataSourceList: List[DataSource] = List.empty[DataSource]
    try {
      dataSourceList = Json.decode[List[DataSource]](input)
    } catch {
      case e: Exception =>
        addError("_", s"failed to parse input: ${e.getMessage}", errorMap)
    }
    validate(dataSourceList, errorMap)
  }

  def validate(dataSourceList: List[DataSource]): Either[List[IdAndError], DataSources] = {
    validate(dataSourceList, mutable.Map.empty)
  }

  private def validate(
    dataSourceList: List[DataSource],
    errorMap: mutable.Map[String, mutable.Set[String]]
  ): Either[List[IdAndError], DataSources] = {

    // Validate size limit first
    if (dataSourceList.size > maxDataSourcesPerSession) {
      addError("_", s"number of DataSources cannot exceed $maxDataSourcesPerSession", errorMap)
    } else {
      // Validate each DataSource
      val visitedIds = mutable.Set[String]()
      dataSourceList.foreach(ds => {
        val id = ds.getId
        // Validate id
        if (id == null) {
          addError(id, "id cannot be null", errorMap)
        } else if (id.isEmpty) {
          addError(id, "id cannot be empty", errorMap)
        } else {
          if (visitedIds.contains(id)) {
            addError(id, "id cannot be duplicated", errorMap)
          } else {
            visitedIds.add(id)
          }
        }
        // Validate uri
        Try(validateFunc(ds)) match {
          case Success(_) =>
          case Failure(e) => addError(id, s"invalid uri: ${e.getMessage}", errorMap)
        }
      })
    }

    if (errorMap.nonEmpty) {
      Left(
        errorMap
          .map { case (id, errorList) => IdAndError(id, errorList.mkString("; ")) }
          .toList
          .sortBy(_.id)
      )
    } else {
      Right(new DataSources(dataSourceList.toSet.asJava))
    }
  }

  private def addError(
    id: String,
    value: String,
    errorMap: mutable.Map[String, mutable.Set[String]]
  ): Unit = {
    val normalizedId =
      if (id == null) {
        "<null_id>"
      } else if (id.isEmpty) {
        "<empty_id>"
      } else {
        id
      }
    errorMap.getOrElseUpdate(normalizedId, mutable.Set[String]()) += value
  }

  case class IdAndError(id: String, error: String)
} 
Example 57
Source File: EmbeddedKafkaSpecSupport.scala    From embedded-kafka-schema-registry   with MIT License 5 votes vote down vote up
package net.manub.embeddedkafka.schemaregistry

import java.net.{InetAddress, Socket}

import net.manub.embeddedkafka.schemaregistry.EmbeddedKafkaSpecSupport.{
  Available,
  NotAvailable,
  ServerStatus
}
import org.scalatest.Assertion
import org.scalatest.concurrent.{Eventually, IntegrationPatience}
import org.scalatest.matchers.should.Matchers
import org.scalatest.time.{Milliseconds, Seconds, Span}
import org.scalatest.wordspec.AnyWordSpecLike

import scala.util.{Failure, Success, Try}

trait EmbeddedKafkaSpecSupport
    extends AnyWordSpecLike
    with Matchers
    with Eventually
    with IntegrationPatience {

  implicit val config: PatienceConfig =
    PatienceConfig(Span(1, Seconds), Span(100, Milliseconds))

  def expectedServerStatus(port: Int, expectedStatus: ServerStatus): Assertion =
    eventually {
      status(port) shouldBe expectedStatus
    }

  private def status(port: Int): ServerStatus = {
    Try(new Socket(InetAddress.getByName("localhost"), port)) match {
      case Failure(_) => NotAvailable
      case Success(_) => Available
    }
  }
}

object EmbeddedKafkaSpecSupport {
  sealed trait ServerStatus
  case object Available    extends ServerStatus
  case object NotAvailable extends ServerStatus
} 
Example 58
Source File: HttpUtils.scala    From reactive-programming   with Apache License 2.0 5 votes vote down vote up
package com.github.dnvriend

import java.util.concurrent.Executor

import com.ning.http.client.AsyncHttpClient
import org.jsoup.Jsoup
import org.jsoup.nodes.Document
import org.jsoup.select.Elements
import scala.collection.JavaConverters._
import scala.concurrent.{ Promise, ExecutionContext, Future }
import scala.util.Try

object HttpClient {
  def apply(): AsyncHttpClient = new AsyncHttpClient

  implicit class HttpClientToScala(client: AsyncHttpClient) {
    def get(url: String)(implicit ec: Executor): Future[String] = {
      val f = client.prepareGet(url).execute
      val p = Promise[String]()
      f.addListener(new Runnable {
        override def run(): Unit = {
          val response = f.get
          if (response.getStatusCode < 400)
            p.success(response.getResponseBodyExcerpt(131072))
          else p.failure(new RuntimeException(s"BadStatus: ${response.getStatusCode}"))
        }
      }, ec)
      p.future
    }
  }
}

object HttpUtils {
  implicit class FindLinksFuture(self: Future[String])(implicit ec: ExecutionContext) {
    def links: Future[Option[Iterator[String]]] =
      self.map(body ⇒ findLinks(body))
  }

  def findLinks(body: String): Option[Iterator[String]] =
    Try(Jsoup.parse(body)).map { (document: Document) ⇒
      val links: Elements = document.select("a[href]")
      for (link ← links.iterator().asScala; if link.absUrl("href").startsWith("http://")) yield link.absUrl("href")
    }.toOption
} 
Example 59
Source File: TestSpec.scala    From reactive-programming   with Apache License 2.0 5 votes vote down vote up
package com.test

import java.io.IOException
import java.util.UUID

import akka.actor.{ ActorRef, ActorSystem, PoisonPill }
import akka.event.{ Logging, LoggingAdapter }
import akka.testkit.TestProbe
import akka.util.Timeout
import org.scalatest.concurrent.{ Eventually, ScalaFutures }
import org.scalatest.exceptions.TestFailedException
import org.scalatest._
import rx.lang.scala._

import scala.concurrent.duration._
import scala.concurrent.{ ExecutionContextExecutor, Future }
import scala.util.{ Random ⇒ Rnd, Try }

object Random {
  def apply(): Rnd = new Rnd()
}

trait TestSpec extends FlatSpec with Matchers with ScalaFutures with TryValues with OptionValues with Eventually with BeforeAndAfterAll {
  implicit val system: ActorSystem = ActorSystem("test")
  implicit val ec: ExecutionContextExecutor = system.dispatcher
  val log: LoggingAdapter = Logging(system, this.getClass)
  implicit val pc: PatienceConfig = PatienceConfig(timeout = 50.seconds)
  implicit val timeout = Timeout(50.seconds)

  override protected def afterAll(): Unit = {
    system.terminate()
  }

  
  def cleanup(actors: ActorRef*): Unit = {
    actors.foreach { (actor: ActorRef) ⇒
      actor ! PoisonPill
      probe watch actor
    }
  }

  implicit class PimpedByteArray(self: Array[Byte]) {
    def getString: String = new String(self)
  }

  implicit class PimpedFuture[T](self: Future[T]) {
    def toTry: Try[T] = Try(self.futureValue)
  }

  implicit class PimpedObservable[T](self: Observable[T]) {
    def waitFor: Unit = {
      self.toBlocking.toIterable.last
    }
  }

  implicit class MustBeWord[T](self: T) {
    def mustBe(pf: PartialFunction[T, Unit]): Unit =
      if (!pf.isDefinedAt(self)) throw new TestFailedException("Unexpected: " + self, 0)
  }

  object Socket { def apply() = new Socket }
  class Socket {
    def readFromMemory: Future[Array[Byte]] = Future {
      Thread.sleep(100) // sleep 100 millis
      "fromMemory".getBytes
    }

    def send(payload: Array[Byte], from: String, failed: Boolean): Future[Array[Byte]] =
      if (failed) Future.failed(new IOException(s"Network error: $from"))
      else {
        Future {
          Thread.sleep(250) // sleep 250 millis, not real life time, but hey
          s"${payload.getString}->$from".getBytes
        }
      }

    def sendToEurope(payload: Array[Byte], failed: Boolean = false): Future[Array[Byte]] =
      send(payload, "fromEurope", failed)

    def sendToUsa(payload: Array[Byte], failed: Boolean = false): Future[Array[Byte]] =
      send(payload, "fromUsa", failed)
  }
} 
Example 60
Source File: Pimpers.scala    From sparkplug   with MIT License 5 votes vote down vote up
package springnz.sparkplug.util

import com.typesafe.scalalogging.Logger

import scala.concurrent.{ ExecutionContext, Future }
import scala.language.implicitConversions
import scala.util.{ Failure, Try }

private[sparkplug] object Pimpers {

  implicit class TryPimper[A](t: Try[A]) {
    def withErrorLog(msg: String)(implicit log: Logger): Try[A] =
      t.recoverWith {
        case e ⇒
          log.error(msg, e)
          Failure(e)
      }

    def withFinally[T](block: ⇒ T): Try[A] = {
      block
      t
    }
  }

  implicit class FuturePimper[T](f: Future[T]) {
    def withErrorLog(msg: String)(implicit log: Logger, ec: ExecutionContext): Future[T] = {
      f.onFailure {
        case e ⇒ log.error(msg, e)
      }
      f
    }
  }

  implicit def map2Properties(map: Map[String, String]): java.util.Properties = {
    (new java.util.Properties /: map) { case (props, (k, v)) ⇒ props.put(k, v); props }
  }
} 
Example 61
Source File: Executable.scala    From sparkplug   with MIT License 5 votes vote down vote up
package springnz.sparkplug.core

import com.typesafe.config.Config

import scala.util.Try

trait Executable {
  def executor: Executor[Try[_]]
}

class LocalExecutable(
    appName: String,
    sparkMaster: Option[String] = None,
    config: Option[Config] = Some(LocalConfigurer.defaultConfig)) extends Executable {

  override def executor = new SparkExecutor {
    override val configurer: Configurer = new LocalConfigurer(appName, sparkMaster, config)
  }
} 
Example 62
Source File: Executor.scala    From sparkplug   with MIT License 5 votes vote down vote up
package springnz.sparkplug.core

import com.typesafe.scalalogging.LazyLogging
import org.apache.spark.SparkContext
import springnz.sparkplug.util.Pimpers._

import scala.util.Try

trait ExecutorU {
  protected def configurer: Configurer
  def execute[A](operation: SparkOperation[A]): Any
}

trait Executor[B] extends ExecutorU {
  def execute[A](operation: SparkOperation[A]): B
}

// A SparkExecutor creates a spark context on the fly and releases it
trait SparkExecutor extends Executor[Try[_]] with LazyLogging {
  def execute[A](operation: SparkOperation[A]): Try[A] = {
    logger.debug(s"SparkExecutor configuration:\n$configurer")
    configurer { cfg ⇒
      val ctx = new SparkContext(cfg)
      implicit lazy val log = logger
      Try {
        logger.debug("Executing operation: " + operation.getClass.getName)
        operation.run(ctx)
      }.withErrorLog("Error executing operation")
        .withFinally {
          ctx.stop()
        }
    }
  }
}

// A LongLivedExecutor creates a spark context and keeps it for as long as it lives
// The StopContext must be manually called (just like in Spark)
trait LongLivedExecutor extends Executor[Try[_]] with LazyLogging {

  lazy val sparkContext: SparkContext = configurer { cfg ⇒ new SparkContext(cfg) }

  def execute[A](operation: SparkOperation[A]): Try[A] = {
    implicit lazy val log = logger
    Try {
      logger.debug("Executing operation: " + operation.getClass.getName)
      operation.run(sparkContext)
    }.withErrorLog("Error executing operation")
  }

  def stopContext(): Unit = sparkContext.stop()

} 
Example 63
Source File: ClientExecutorTests.scala    From sparkplug   with MIT License 5 votes vote down vote up
package springnz.sparkplug.client

import org.scalatest._
import springnz.sparkplug.util.Logging

import scala.concurrent.duration._
import scala.concurrent.{ Await, Future }
import scala.util.Try

trait ClientExecutableFixture extends BeforeAndAfterEach { this: Suite ⇒

  var executor: ClientExecutor = null

  override def beforeEach() {
    try executor = ClientExecutor.create()
    finally super.beforeEach()
  }

  override def afterEach() {
    try super.afterEach()
    finally {
      executor.shutDown()
      // it seems to need to be give a chance to clean up (I think shutdown happens asnychronously)
    }
  }
}

class ClientExecutorTests extends WordSpec with ShouldMatchers with Logging with ClientExecutableFixture with Inspectors {
  implicit val ec = scala.concurrent.ExecutionContext.global

  "client executor" should {
    "Calculate a single job" in {
      val future = executor.execute[Any]("springnz.sparkplug.executor.LetterCountPlugin", None)
      val result = Await.result(future, 30.seconds)
      result shouldBe ((2, 2))
    }

    "Handle an error in a job request" in {
      val future = executor.execute[Any]("springnz.sparkplug.executor.InvalidPluginName", None)
      intercept[ClassNotFoundException] {
        Await.result(future, 30.seconds)
      }
    }

    "Still carry on working after a failure" in {
      val futureError = executor.execute[Any]("springnz.sparkplug.executor.InvalidPluginName", None)
      intercept[ClassNotFoundException] {
        Await.result(futureError, 30.seconds)
      }
      val futureOk = executor.execute[Any]("springnz.sparkplug.executor.LetterCountPlugin", None)
      val result = Await.result(futureOk, 30.seconds)
      result shouldBe ((2, 2))
    }

    "Calculate a sequence of job requests in parallel" in {
      val futures: List[Future[Any]] = List.fill(10) { executor.execute[Any]("springnz.sparkplug.executor.LetterCountPlugin", None) }

      implicit val ec = scala.concurrent.ExecutionContext.global
      val sequence: Future[List[Any]] = Future.sequence(futures)
      val results = Await.result(sequence, 30.seconds)

      // this way of executing does not return anything
      results shouldBe a[List[_]]

      results.length shouldBe 10

      forAll(results) {
        result ⇒ result shouldBe ((2, 2))
      }
    }

    "Handle immediate timeout" in {
      val future = executor.execute[Any]("springnz.sparkplug.executor.WaitPlugin", None)
      Try {
        Await.result(future, 0.seconds)
      }

    }

  }
}

class ClientExecutorSingleTests extends WordSpec with ShouldMatchers with Logging {
  "client executor" should {
    "Calculate a single job" in {
      implicit val ec = scala.concurrent.ExecutionContext.global
      val future = ClientExecutor[(Long, Long)]("springnz.sparkplug.executor.LetterCountPlugin", None)
      val result = Await.result(future, 30.seconds)
      result shouldBe ((2, 2))
    }

  }
} 
Example 64
Source File: RequestBroker.scala    From sparkplug   with MIT License 5 votes vote down vote up
package springnz.sparkplug.executor

import akka.actor.TypedActor.{ PostStop, PreStart }
import akka.actor._
import org.apache.spark.SparkContext
import springnz.sparkplug.core.SparkPlugException
import springnz.sparkplug.executor.InternalMessageTypes.RoutedRequest
import springnz.sparkplug.executor.MessageTypes._

import scala.util.Try

class RequestBroker(sparkClient: String, postStopAction: ⇒ Unit)(implicit sparkContext: SparkContext)
    extends Actor with PreStart with PostStop with ActorLogging {

  override def preStart() = {
    Try {
      log.info(s"RequestBroker preStart called. Logging Spark Configuration below:")
      // logging Spark Configuration
      val sparkConfDebug = sparkContext.getConf
        .toDebugString
        .lines
        .filterNot(_.contains("password"))
        .mkString("\n")
      log.info(s"Spark-Configuration:\n$sparkConfDebug\n")

      log.info(s"Notifying client at address '$sparkClient' of readiness.")
      if (sparkClient.nonEmpty) {
        log.info(s"Trying to contact sparkClient at $sparkClient")
        val clientActor = context.actorSelection(sparkClient)
        log.info(s"Sending ServerReady to contact sparkClient at $sparkClient")
        clientActor ! ServerReady
      }
      log.info("Finished loading RequestBroker. Ready for action.")
    }.recover {
      case reason ⇒
        log.error(s"Error initialising Request Broker.", reason)
        log.error(s"Sending the ShutDown message.")
        self ! ShutDown
    }
  }

  override def postStop() = {
    log.info("Running postStopAction")
    postStopAction
  }

  type JobRequestInfo = (ActorRef, JobRequest)

  def receive = receiveJobRequests(0, None, Map.empty[String, JobRequestInfo])

  def receiveJobRequests(jobCount: Int, clientRef: Option[ActorRef], processorMap: Map[String, JobRequestInfo]): Receive = {
    case job: JobRequest ⇒
      log.info(s"Creating new jobProcessor for '${job.factoryClassName}'.")
      val processor = context.actorOf(JobProcessor.props, s"jobProcessor-$jobCount")
      context.become(receiveJobRequests(jobCount + 1, clientRef, processorMap.updated(processor.path.toString, (sender, job))))
      context.watch(processor)
      processor ! RoutedRequest(job, sender)

    case ShutDown ⇒
      log.info(s"Shutting down...")
      context.system.shutdown()

    case ClientReady ⇒
      log.info("Received ClientReady message from Client. Deathwatching on client.")
      context.become(receiveJobRequests(jobCount, Some(sender), processorMap))
      context.watch(sender)

    case Terminated(terminatedRef) ⇒
      clientRef.foreach { clientRefInner ⇒
        if (terminatedRef.path.toString == clientRefInner.path.toString) {
          log.info("Client was terminated (or timed out). Sending the shutdown signal.")
          self ! ShutDown
        }
      }
      processorMap.get(terminatedRef.path.toString).foreach {
        case (requestor, job) ⇒
          val message = s"Processor '${self.path.toString}' terminated (or timed out) for job $job."
          log.error(message)
          requestor ! JobFailure(job, new SparkPlugException(message))
      }

    case CancelAllJobs ⇒
      log.info(s"Broker cancelling all Spark Jobs...")
      sparkContext.cancelAllJobs()
  }
} 
Example 65
Source File: LetterCountExamples.scala    From sparkplug   with MIT License 5 votes vote down vote up
package springnz.sparkplug.executor

import org.apache.spark.rdd.RDD
import springnz.sparkplug.core.{ SparkPlugin, _ }
import springnz.sparkplug.util.Logging

import scala.util.Try

object LetterCount extends LocalExecutable("LetterCount") with Logging {
  def main(args: Array[String]): Unit = {
    log.info(s"StartLetterCount...")
    val result: Try[(Long, Long)] = executor.execute((new LetterCount)())
    log.info(s"Result of LetterCount.main: $result")
  }
}

class LetterCountPlugin extends LetterCount with SparkPlugin {
  override def apply(input: Option[Any]): SparkOperation[(Long, Long)] = super.apply()
}

class LetterCount extends Logging {

  def apply(): SparkOperation[(Long, Long)] = SparkOperation { ctx ⇒

    val textRDDProvider = SparkOperation[RDD[String]] {
      ctx ⇒ ctx.makeRDD("There is nothing either good or bad, but thinking makes it so".split(' '))
    }

    val nums = for {
      // on-site decision what to plug in - different to VaultEmails for example
      logData ← textRDDProvider
      numAs = logData.filter(_.contains("a")).count()
      numBs = logData.filter(_.contains("b")).count()
    } yield {
      log.info(s"$numAs 'a's, $numBs 'b's")
      (numAs, numBs)
    }

    nums.run(ctx)
  }
}

object LetterCountFunctionStyle extends (() ⇒ SparkOperation[(Long, Long)]) with Logging {

  def apply(): SparkOperation[(Long, Long)] = SparkOperation { ctx ⇒
    val textRDDProvider = SparkOperation[RDD[String]] {
      ctx ⇒ ctx.makeRDD("There is nothing either good or bad, but thinking makes it so".split(' '))
    }

    val nums = for {
      // on-site decision what to plug in - different to VaultEmails for example
      logData ← textRDDProvider
      numAs = logData.filter(_.contains("a")).count()
      numBs = logData.filter(_.contains("b")).count()
    } yield {
      log.info(s"$numAs 'a's, $numBs 'b's")
      (numAs, numBs)
    }

    nums.run(ctx)
  }
} 
Example 66
Source File: ExecutorService.scala    From sparkplug   with MIT License 5 votes vote down vote up
package springnz.sparkplug.executor

import java.net.{ URLDecoder, URLEncoder }
import java.time.LocalDate

import akka.actor._
import com.typesafe.config.ConfigFactory
import springnz.sparkplug.core._
import springnz.sparkplug.util.Logging

import scala.util.{ Properties, Try }

object Constants {
  val defaultAkkaRemoteConfigSection = "akkaRemote"
  val actorSystemName = "sparkplugExecutorSystem"
  val brokerActorName = "sparkplugRequestBroker"
}

object ExecutorService extends Logging {
  import Constants._

  lazy val defaultRemoteAkkaConfig = ConfigFactory.load.getConfig(s"sparkplug.$defaultAkkaRemoteConfigSection")

  // TODO: proper command line parsing to allow richer config options
  def main(args: Array[String]): Unit = {
    if (args.length < 4)
      throw new IllegalArgumentException(s"Expected at least 4 arguments to ExecutorService. Args = : ${args.toList}")
    val appName = args(1)
    val sparkClientPath = args(3)

    log.info(s"Starting Sparkplug ExecutorService: SparkClient = $sparkClientPath: ${LocalDate.now()}")

    val remoteConfig = if (args.length == 6) {
      val urlEncodedConfig = args(5)
      val configString = URLDecoder.decode(urlEncodedConfig, "UTF-8")
      val config = ConfigFactory.parseString(configString)
      log.info(s"Using akka remote config:\n$configString")
      config
    } else {
      log.info(s"Using default akka remote config from config section 'sparkplug.$defaultAkkaRemoteConfigSection'")
      defaultRemoteAkkaConfig
    }

    import scala.collection.JavaConversions._
    def env = System.getenv().toMap
    log.debug(s"Environment:\n $env")

    val system = ActorSystem(actorSystemName, remoteConfig)

    val executorService = new ExecutorService(appName)
    executorService.start(system, sparkClientPath)
    log.info("Terminating the remote application.")
  }
}

class ExecutorService(appName: String, brokerName: String = Constants.brokerActorName) extends LongLivedExecutor with Logging {

  // Note that the SparkConf inherits all its settings from spark-submit
  override val configurer: Configurer = new LocalConfigurer(appName, Properties.envOrNone("SPARK_MASTER"), None)

  def start(system: ActorSystem, sparkClientPath: String): Try[Unit] = {

    val actorOperation = SparkOperation[Unit] { implicit sparkContext ⇒

      def postStopAction() = {
        log.info("Cancelling any jobs (if any are running).")
        sparkContext.cancelAllJobs()
        log.info("Stopping Spark context.")
        sparkContext.stop()
      }

      log.info("Creating requestBroker for ExecutorService.")
      system.actorOf(Props(new RequestBroker(sparkClientPath, postStopAction)), name = brokerName)
    }

    log.info("Executing container operation (everything happens inside this method).")
    val result = execute(actorOperation)
    log.info("Finished executing container operation (everything happens inside this method).")
    result
  }

} 
Example 67
Source File: StdAvroModelFactory.scala    From aloha   with MIT License 5 votes vote down vote up
package com.eharmony.aloha.factory.avro

import java.io.File

import org.apache.commons.{vfs => vfs1, vfs2}
import com.eharmony.aloha.io.vfs.{Vfs1, Vfs2}
import com.eharmony.aloha.audit.impl.avro.Score
import com.eharmony.aloha.factory.ModelFactory
import org.apache.avro.generic.GenericRecord

import scala.util.Try




  @deprecated(message = "Prefer StdAvroModelFactory.fromConfig(conf: FactoryConfig)", since = "4.0.1")
  def apply(modelDomainSchemaVfsUrl: String,
            modelCodomainRefInfoStr: String,
            imports: Seq[String] = Nil,
            classCacheDir: Option[File] = None,
            dereferenceAsOptional: Boolean = true,
            useVfs2: Boolean = true): Try[ModelFactory[GenericRecord, Score]] = {

    val vfs = url(modelDomainSchemaVfsUrl, useVfs2)

    vfs.flatMap { u =>
      UrlConfig(
        u,
        modelCodomainRefInfoStr,
        imports,
        classCacheDir,
        dereferenceAsOptional
      )()
    }
  }

  private[this] def url(modelDomainSchemaVfsUrl: String, useVfs2: Boolean) = {
    val u =
      if (useVfs2)
        Try { Vfs2(vfs2.VFS.getManager.resolveFile(modelDomainSchemaVfsUrl)) }
      else Try { Vfs1(vfs1.VFS.getManager.resolveFile(modelDomainSchemaVfsUrl)) }
    FactoryConfig.wrapException(u)
  }
} 
Example 68
Source File: StdAvroModelFactoryTest.scala    From aloha   with MIT License 5 votes vote down vote up
package com.eharmony.aloha.factory.avro

import com.eharmony.aloha.audit.impl.avro.Score
import com.eharmony.aloha.factory.ModelFactory
import com.eharmony.aloha.io.vfs.Vfs1
import com.eharmony.aloha.models.Model
import org.apache.avro.Schema
import org.apache.avro.generic.{GenericData, GenericRecord}
import org.apache.commons.io.IOUtils
import org.junit.Assert.assertEquals
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.BlockJUnit4ClassRunner

import scala.util.Try


  private[this] def record = {
    val r = new GenericData.Record(TheSchema)
    r.put("req_str_1", "smart handsome stubborn")
    r
  }
}

object StdAvroModelFactoryTest {
  private lazy val TheSchema = {
    val is = getClass.getClassLoader.getResourceAsStream(SchemaUrlResource)
    try new Schema.Parser().parse(is) finally IOUtils.closeQuietly(is)
  }

  private val ExpectedResult = 7d

  private val SchemaUrlResource = "avro/class7.avpr"

  private val SchemaUrl = s"res:$SchemaUrlResource"

  private val SchemaFile = new java.io.File(getClass.getClassLoader.getResource(SchemaUrlResource).getFile)

  private val SchemaVfs1FileObject = org.apache.commons.vfs.VFS.getManager.resolveFile(SchemaUrl)

  private val SchemaVfs2FileObject = org.apache.commons.vfs2.VFS.getManager.resolveFile(SchemaUrl)

  private val Imports = Seq("com.eharmony.aloha.feature.BasicFunctions._", "scala.math._")

  private val ReturnType = "Double"

  private val ModelJson =
    """
      |{
      |  "modelType": "Regression",
      |  "modelId": { "id": 0, "name": "" },
      |  "features" : {
      |    "my_attributes": "${req_str_1}.split(\"\\\\W+\").map(v => (s\"=$v\", 1.0))"
      |  },
      |  "weights": {
      |    "my_attributes=handsome": 1,
      |    "my_attributes=smart": 2,
      |    "my_attributes=stubborn": 4
      |  }
      |}
    """.stripMargin
} 
Example 69
Source File: PositiveLabelsFunction.scala    From aloha   with MIT License 5 votes vote down vote up
package com.eharmony.aloha.dataset.vw.multilabel

import com.eharmony.aloha.AlohaException
import com.eharmony.aloha.dataset.DvProducer
import com.eharmony.aloha.dataset.vw.multilabel.VwMultilabelRowCreator.{determineLabelNamespaces, LabelNamespaces}
import com.eharmony.aloha.reflect.RefInfo
import com.eharmony.aloha.semantics.compiled.CompiledSemantics
import com.eharmony.aloha.semantics.func.GenAggFunc

import scala.collection.breakOut
import scala.util.{Failure, Success, Try}
import scala.collection.{immutable => sci}


private[multilabel] abstract class PositiveLabelsFunction[A, K: RefInfo] { self: DvProducer =>

  private[multilabel] def positiveLabelsFn(
      semantics: CompiledSemantics[A],
      positiveLabels: String
  ): Try[GenAggFunc[A, sci.IndexedSeq[K]]] =
    getDv[A, sci.IndexedSeq[K]](
      semantics, "positiveLabels", Option(positiveLabels), Option(Vector.empty[K]))

  private[multilabel] def labelNamespaces(nss: List[(String, List[Int])]): Try[LabelNamespaces] = {
    val nsNames: Set[String] = nss.map(_._1)(breakOut)
    determineLabelNamespaces(nsNames) match {
      case Some(ns) => Success(ns)

      // If there are so many VW namespaces that all available Unicode characters are taken,
      // then a memory error will probably already have occurred.
      case None => Failure(new AlohaException(
        "Could not find any Unicode characters to as VW namespaces. Namespaces provided: " +
          nsNames.mkString(", ")
      ))
    }
  }
} 
Example 70
Source File: VwLabeledJson.scala    From aloha   with MIT License 5 votes vote down vote up
package com.eharmony.aloha.dataset.vw.labeled.json

import com.eharmony.aloha.dataset.json.{Namespace, SparseSpec}
import com.eharmony.aloha.dataset.vw.json.VwJsonLike
import spray.json.DefaultJsonProtocol

import scala.collection.{immutable => sci}
import scala.util.Try


final case class VwLabeledJson(
        imports: sci.Seq[String],
        features: sci.IndexedSeq[SparseSpec],
        namespaces: Option[Seq[Namespace]] = Some(Nil),
        normalizeFeatures: Option[Boolean] = Some(false),
        label: String,
        importance: Option[String] = Some("1"),
        tag: Option[String] = None)
extends VwJsonLike {

    def validateImportance(): Boolean = {
        importance.nonEmpty || Try {
            importance.get.trim.toDouble
        }.map {
            case d if d >= 0 => true
            case _ => false
        }.getOrElse(true)
    }
}

object VwLabeledJson extends DefaultJsonProtocol {
    implicit val labeledVwJsonFormat = jsonFormat7(VwLabeledJson.apply)
} 
Example 71
Source File: CovariateProducer.scala    From aloha   with MIT License 5 votes vote down vote up
package com.eharmony.aloha.dataset

import com.eharmony.aloha.dataset.density.{Dense, Sparse}
import com.eharmony.aloha.dataset.json.CovariateJson
import com.eharmony.aloha.reflect.RefInfo
import com.eharmony.aloha.semantics.compiled.CompiledSemantics
import com.eharmony.aloha.semantics.func.GenAggFunc

import scala.collection.immutable.IndexedSeq
import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{Failure, Success, Try}



sealed trait CovariateProducer[@specialized(Double) Density] { self: CompilerFailureMessages =>
    protected[this] def featExtFuncProd[A](successes: IndexedSeq[(String, GenAggFunc[A, Density])]): FeatureExtractorFunction[A, Density]
    protected[this] def refInfoB(): RefInfo[Density]

    protected[this] def getCovariates[A](semantics: CompiledSemantics[A], cj: CovariateJson[Density], defDefault: Option[Density] = None): Try[FeatureExtractorFunction[A, Density]] = {
        // Get a new semantics with the imports changed to reflect the imports from the Json Spec
        // Import of ExecutionContext.Implicits.global is necessary.
        val semanticsWithImports = semantics.copy[A](imports = cj.imports)

        def compile(it: Iterator[json.Spec[Density]], successes: List[(String, GenAggFunc[A, Density])]): Try[FeatureExtractorFunction[A, Density]] = {
            if (!it.hasNext)
                Success{ featExtFuncProd(successes.reverse.toIndexedSeq) }
            else {
                val spec = it.next()
                val f = semanticsWithImports.createFunction[Density](spec.spec, spec.defVal orElse defDefault)(refInfoB())
                f match {
                    case Left(msgs) => Failure { failure(spec.name, msgs) }
                    case Right(success) => compile(it, (spec.name, success) :: successes)
                }
            }
        }

        compile(cj.features.iterator, Nil)
    }
}

trait SparseCovariateProducer extends CovariateProducer[Iterable[(String, Double)]] { self: CompilerFailureMessages =>
    protected[this] final def featExtFuncProd[A](successes: IndexedSeq[(String, GenAggFunc[A, Sparse])]) = SparseFeatureExtractorFunction(successes)
    protected[this] final def refInfoB() = RefInfo[Sparse]
}

trait DenseCovariateProducer extends CovariateProducer[Double] { self: CompilerFailureMessages =>
    protected[this] final def featExtFuncProd[A](successes: IndexedSeq[(String, GenAggFunc[A, Dense])]) = DenseFeatureExtractorFunction(successes)
    protected[this] final def refInfoB() = RefInfo[Dense]
} 
Example 72
Source File: LibSvmLabelRowCreator.scala    From aloha   with MIT License 5 votes vote down vote up
package com.eharmony.aloha.dataset.libsvm.labeled

import com.eharmony.aloha.dataset.density.Sparse
import com.eharmony.aloha.dataset.libsvm.labeled.json.LibSvmLabeledJson
import com.eharmony.aloha.dataset.libsvm.unlabeled.LibSvmRowCreator
import com.eharmony.aloha.dataset._
import com.eharmony.aloha.semantics.compiled.CompiledSemantics
import com.eharmony.aloha.semantics.func.GenAggFunc
import com.eharmony.aloha.util.Logging
import com.eharmony.aloha.util.hashing.HashFunction
import spray.json.JsValue

import scala.util.Try

class LibSvmLabelRowCreator[-A](
        covariates: FeatureExtractorFunction[A, Sparse],
        label: GenAggFunc[A, String],
        hash: HashFunction,
        numBits: Int = LibSvmRowCreator.DefaultBits)
extends LibSvmRowCreator[A](covariates, hash, numBits)
   with LabelRowCreator[A, CharSequence] {

    override def apply(data: A): (MissingAndErroneousFeatureInfo, CharSequence) = {
        val (missing, iv) = super.apply(data)
        val lab = label(data)
        val sb = new StringBuilder().append(lab).append(" ").append(iv)
        (missing, sb)
    }

    override def stringLabel: GenAggFunc[A, Option[String]] = label.andThenGenAggFunc(Option.apply)
}

object LibSvmLabelRowCreator {
    final case class Producer[A]()
        extends RowCreatorProducer[A, CharSequence, LibSvmLabelRowCreator[A]]
        with RowCreatorProducerName
        with SparseCovariateProducer
        with DvProducer
        with CompilerFailureMessages
        with Logging {

        type JsonType = LibSvmLabeledJson
        def parse(json: JsValue): Try[LibSvmLabeledJson] = Try { json.convertTo[LibSvmLabeledJson] }
        def getRowCreator(semantics: CompiledSemantics[A], jsonSpec: LibSvmLabeledJson): Try[LibSvmLabelRowCreator[A]] = {
            val hash: HashFunction = jsonSpec.salt match {
                case Some(s) => new com.eharmony.aloha.util.hashing.MurmurHash3(s)
                case None    => com.eharmony.aloha.util.hashing.MurmurHash3
            }

            val spec =
                for {
                    label <- getLabel(semantics, jsonSpec)
                    cov <- getCovariates(semantics, jsonSpec)
                    spec = jsonSpec.numBits match {
                        case Some(b) => new LibSvmLabelRowCreator(cov, label, hash, b)
                        case _       => new LibSvmLabelRowCreator(cov, label, hash)
                    }
                } yield {
                    warn(hash.salts)
                    spec
                }

            spec
        }

        private[this] def getLabel(semantics: CompiledSemantics[A], jsonSpec: LibSvmLabeledJson): Try[GenAggFunc[A, String]] =
            getDv(semantics, "label", Option(jsonSpec.label), Option(""))
    }
} 
Example 73
Source File: modelFactoryPlaceholder.scala    From aloha   with MIT License 5 votes vote down vote up
package com.eharmony.aloha.factory

import java.io.File

import com.eharmony.aloha.factory.ex.AlohaFactoryException
import com.eharmony.aloha.io.StringReadable
import org.apache.commons.{vfs, vfs2}
import spray.json.{JsObject, pimpString}

import scala.util.{Failure, Try}


    def resolveFileContents(): Try[JsObject]
}

private[factory] case class Vfs2ImportedModelPlaceholder(fileDescriptor: String) extends ImportedModelPlaceholder {
    def resolveFileContents() = for {
        file <- Try {
            vfs2.VFS.getManager.resolveFile(fileDescriptor)
        } recoverWith {
            case f => Failure { new AlohaFactoryException(s"Couldn't resolve VFS2 file: $fileDescriptor", f) }
        }
        json <- Try {
            StringReadable.fromVfs2(file).parseJson.asJsObject
        } recoverWith {
            case f => Failure { new AlohaFactoryException(s"Couldn't get JSON for VFS2 file: $file", f) }
        }
    } yield json
}

private[factory] case class Vfs1ImportedModelPlaceholder(fileDescriptor: String) extends ImportedModelPlaceholder {
    def resolveFileContents() = for {
        file <- Try {
            vfs.VFS.getManager.resolveFile(fileDescriptor)
        } recoverWith {
            case f => Failure { new AlohaFactoryException(s"Couldn't resolve VFS1 file: $fileDescriptor", f) }
        }
        json <- Try {
            StringReadable.fromVfs1(file).parseJson.asJsObject
        } recoverWith {
            case f => Failure { new AlohaFactoryException(s"Couldn't get JSON for VFS1 file: $file", f) }
        }
    } yield json
}

private[factory] case class FileImportedModelPlaceholder(fileDescriptor: String) extends ImportedModelPlaceholder {
    def resolveFileContents() = for {
        file <- Try {
            new File(fileDescriptor)
        } recoverWith {
            case f => Failure { new AlohaFactoryException(s"Couldn't resolve file: $fileDescriptor", f) }
        }
        json <- Try {
            StringReadable.fromFile(file).parseJson.asJsObject
        } recoverWith {
            case f => Failure { new AlohaFactoryException(s"Couldn't get JSON for file: $file", f) }
        }
    } yield json
} 
Example 74
Source File: RuntimeClasspathScanning.scala    From aloha   with MIT License 5 votes vote down vote up
package com.eharmony.aloha.reflect

import com.eharmony.aloha
import org.reflections.Reflections

import scala.reflect.{classTag, ClassTag}
import scala.util.Try


  protected[this] def scanObjects[OBJ: ClassTag, A: ClassTag](
      methodName: String,
      packageToSearch: String = aloha.pkgName
  ): Seq[A] = {
    val reflections = new Reflections(aloha.pkgName)
    import scala.collection.JavaConversions.asScalaSet
    val objects = reflections.getSubTypesOf(classTag[OBJ].runtimeClass).toSeq

    val suffixLength = objectSuffix.length

    objects.flatMap {
      case o if isObject(o) =>
        Try {
          // This may have some classloading issues.
          val classObj = Class.forName(o.getCanonicalName.dropRight(suffixLength))
          classObj.getMethod(methodName).invoke(null) match {
            case a: A => a
            case _ => throw new IllegalStateException()
          }
        }.toOption
      case _ => None
    }
  }
} 
Example 75
Source File: simpleTypeSeq.scala    From aloha   with MIT License 5 votes vote down vote up
package com.eharmony.aloha.util

import com.eharmony.aloha.reflect.RefInfo
import spray.json._
import scala.util.{Failure, Try}

sealed trait SimpleTypeSeq {
  type A
  def refInfo: RefInfo[A]
  def values: Vector[A]
  require(values.distinct.size == values.size, "Elements must be distinct.")
}

object SimpleTypeSeq {
  implicit object SimpleTypeSeqFormat extends JsonFormat[SimpleTypeSeq] {
    import DefaultJsonProtocol.{vectorFormat, LongJsonFormat, DoubleJsonFormat, BooleanJsonFormat, StringJsonFormat, BigDecimalJsonFormat}

    override def read(json: JsValue): SimpleTypeSeq = readSafe(json).get

    override def write(seq: SimpleTypeSeq): JsValue = seq match {
      case LongSeq(s)    => implicitly[JsonFormat[Vector[Long]]].write(s)
      case DoubleSeq(s)  => implicitly[JsonFormat[Vector[Double]]].write(s)
      case BooleanSeq(s) => implicitly[JsonFormat[Vector[Boolean]]].write(s)
      case StringSeq(s)  => implicitly[JsonFormat[Vector[String]]].write(s)
    }

    private[this] def readSafe(json: JsValue) = json match {
      case jsa@JsArray(values) =>
        Try { jsa.convertTo[Vector[BigDecimal]] } flatMap {
          case a if a.forall(x => x.isValidLong && !x.toString().contains(".")) => Try { LongSeq(a.map(_.toLong)) }

          // TODO: Need to extract to version specific src dirs to avoid deprecation errors.
          // TODO: isValidDouble in 2.10 and isExactDouble in 2.11.
          case a if a.forall(_.isValidDouble)                                   => Try { DoubleSeq(a.map(_.toDouble)) }
          case a                                                                => Failure(new DeserializationException(""))
        } recoverWith {
          case _ => Try { BooleanSeq(jsa.convertTo[Vector[Boolean]]) }
        } recoverWith {
          case _ => Try { StringSeq(jsa.convertTo[Vector[String]]) }
        } recoverWith {
          case _ => Failure(new DeserializationException(s"Couldn't produce SimpleTypeSeq for array: ${errorObj(jsa)}"))
        }
      case x => Failure(new DeserializationException(s"Expected Array, found ${errorObj(json)}"))
    }

    private[this] def errorObj(js: JsValue, size: Int = 50) = {
      val compact = js.compactPrint
      if (compact.length < size) compact else compact.substring(0, size) + "..."
    }
  }
}

case class LongSeq(values: Vector[Long]) extends SimpleTypeSeq {
  type A = Long
  def refInfo = RefInfo[Long]
}

object LongSeq {
  def apply(values: Long*) = new LongSeq(values.toVector)
}

case class DoubleSeq(values: Vector[Double]) extends SimpleTypeSeq {
  type A = Double
  def refInfo = RefInfo[Double]
}

object DoubleSeq {
  def apply(values: Double*) = new DoubleSeq(values.toVector)
}

case class BooleanSeq(values: Vector[Boolean]) extends SimpleTypeSeq {
  type A = Boolean
  def refInfo = RefInfo[Boolean]
}

object BooleanSeq {
  def apply(values: Boolean*) = new BooleanSeq(values.toVector)
}

case class StringSeq(values: Vector[String]) extends SimpleTypeSeq {
  type A = String
  def refInfo = RefInfo[String]
}

object StringSeq {
  def apply(values: String*) = new StringSeq(values.toVector)
} 
Example 76
Source File: optionalHandler.scala    From aloha   with MIT License 5 votes vote down vote up
package com.eharmony.aloha.semantics.compiled.plugin.csv

import scala.collection.immutable
import scala.util.Try


sealed trait OptionalHandler {
    def produceOption[A](fieldName: String, f: Option[String => A], fields: Array[String], missing: String => Boolean): Option[A]
    def produceOptions[A](vals: Array[String], f: Option[String => A], missing: String => Boolean): immutable.IndexedSeq[Option[A]]
}

case class GracefulOptionalHandler(indices: Map[String, Int]) extends OptionalHandler {
    def produceOption[A](
            fieldName: String,
            f: Option[String => A],
            fields: Array[String],
            missing: String => Boolean
    ): Option[A] = {
        for {
            g <- f
            i <- indices.get(fieldName)
            field = fields(i) if !missing(field)
            x <- Try { g(field) }.toOption
        } yield x
    }

    def produceOptions[A](vals: Array[String], f: Option[String => A], missing: String => Boolean): immutable.IndexedSeq[Option[A]] = {
        f.map( g =>
            vals.map( v =>
                if (missing(v))
                    None
                else Try { g(v) }.toOption
            )(scala.collection.breakOut)
        ) getOrElse RepeatedIndexedSeq.fill(vals.length)(None)
    }
}

case class FailFastOptionalHandler(indices: Map[String, Int]) extends OptionalHandler {
    def produceOption[A](
            fieldName: String,
            f: Option[String => A],
            fields: Array[String],
            missing: String => Boolean
    ): Option[A] = {
        f.flatMap{ g =>
            val field = fields(indices(fieldName))
            if (missing(field))
                None
            else Option(g(field))
        }
    }

    def produceOptions[A](vals: Array[String], f: Option[String => A], missing: String => Boolean): immutable.IndexedSeq[Option[A]] = {
        f.map( g =>
            vals.map( v =>
                if (missing(v))
                    None
                else Some(g(v))
            )(scala.collection.breakOut)
        ) getOrElse RepeatedIndexedSeq.fill(vals.length)(None)
    }
}

case class RepeatedIndexedSeq[+A](length: Int, a: A) extends immutable.IndexedSeq[A] {
    def apply(i: Int): A =
        if (0 <= i && i < length)
            a
        else throw new ArrayIndexOutOfBoundsException(s"index $i not in range 0 ... ${length - 1}")
}

object RepeatedIndexedSeq {
    def fill[A](n: Int)(a: A) = RepeatedIndexedSeq(n, a)
} 
Example 77
Source File: AnySemanticsWithoutFunctionCreation.scala    From aloha   with MIT License 5 votes vote down vote up
package com.eharmony.aloha.models

import com.eharmony.aloha.reflect._
import com.eharmony.aloha.semantics.Semantics
import com.eharmony.aloha.semantics.func.GenFunc0

import scala.util.Try


object AnySemanticsWithoutFunctionCreation extends Semantics[Any] {
  def refInfoA: RefInfo[Any] = RefInfo[Any]
  def accessorFunctionNames: Seq[Nothing] = Nil
  def close(): Unit = {}
  def createFunction[B: RefInfo](codeSpec: String, default: Option[B]): Either[Seq[String], GenFunc0[Any, B]] = {
    val right = Try {
      val long = codeSpec.toLong
      Right(GenFunc0(codeSpec, (a: Any) => long.asInstanceOf[B]))
    }
    right.getOrElse(Left(Seq("createFunction not supported.")))
  }
} 
Example 78
Source File: ModelSerializabilityTestBase.scala    From aloha   with MIT License 5 votes vote down vote up
package com.eharmony.aloha

import scala.language.existentials
import com.eharmony.aloha
import com.eharmony.aloha.models.{Model, SubmodelBase}
import org.junit.Assert._
import org.junit.Test
import org.reflections.Reflections

import scala.collection.JavaConversions.asScalaSet
import scala.util.Try
import java.lang.reflect.{Method, Modifier}

import com.eharmony.aloha.util.Logging


abstract class ModelSerializabilityTestBase(pkgs: Seq[String], outFilters: Seq[String])
extends Logging {

  def this() = this(pkgs = Seq(aloha.pkgName), Seq.empty)

  @Test def testSerialization(): Unit = {
    val ref = new Reflections(pkgs:_*)
    val submodels = ref.getSubTypesOf(classOf[SubmodelBase[_, _, _, _]]).toSeq
    val models = ref.getSubTypesOf(classOf[Model[_, _]]).toSeq

    val modelClasses =
      (models ++ submodels).
        filterNot { _.isInterface }.
        filterNot { c =>
          val name = c.getName
          outFilters.exists(name.matches)
        }

    if (modelClasses.isEmpty) {
      fail(s"No models found to test for Serializability in packages: ${pkgs.mkString(",")}")
    }
    else {
      debug {
        modelClasses
          .map(_.getCanonicalName)
          .mkString("Models tested for Serializability:\n\t", "\n\t", "")
      }
    }

    modelClasses.foreach { c =>
      val m = for {
        testClass  <- getTestClass(c.getCanonicalName)
        testMethod <- getTestMethod(testClass)
        method     <- ensureTestMethodIsTest(testMethod)
      } yield method

      m.left foreach fail
    }
  }

  private[this] implicit class RightMonad[L, R](e: Either[L, R]) {
    def flatMap[R1](f: R => Either[L, R1]) = e.right.flatMap(f)
    def map[R1](f: R => R1) = e.right.map(f)
  }

  private[this] def getTestClass(modelClassName: String) = {
    val testName = modelClassName + "Test"
    Try {
      Class.forName(testName)
    } map {
      Right(_)
    } getOrElse Left("No test class exists for " + modelClassName)
  }

  private[this] def getTestMethod(testClass: Class[_]) = {
    val testMethodName = "testSerialization"
    lazy val msg = s"$testMethodName doesn't exist in ${testClass.getCanonicalName}."
    Try {
      Option(testClass.getMethod(testMethodName))
    } map {
      case Some(m) => Right(m)
      case None => Left(msg)
    } getOrElse Left(msg)
  }

  private[this] def ensureTestMethodIsTest(method: Method) = {
    if (!Modifier.isPublic(method.getModifiers))
      Left(s"testSerialization in ${method.getDeclaringClass.getCanonicalName} is not public")
    if (!method.getDeclaredAnnotations.exists(_.annotationType() == classOf[Test]))
      Left(s"testSerialization in ${method.getDeclaringClass.getCanonicalName} does not have a @org.junit.Test annotation.")
    else if (method.getReturnType != classOf[Void] && method.getReturnType != classOf[Unit])
      Left(s"testSerialization in ${method.getDeclaringClass.getCanonicalName} is not a void function. It returns: ${method.getReturnType}")
    else Right(method)
  }
} 
Example 79
Source File: ScalaKinesisProducer.scala    From kpl-scala   with Apache License 2.0 5 votes vote down vote up
package com.contxt.kinesis

import com.amazonaws.services.kinesis.producer.{ KinesisProducer, KinesisProducerConfiguration, UserRecordResult }
import com.google.common.util.concurrent.ListenableFuture
import com.typesafe.config.{ Config, ConfigFactory }
import java.nio.ByteBuffer
import scala.concurrent._
import scala.language.implicitConversions
import scala.util.Try
import collection.JavaConverters._
import scala.concurrent.ExecutionContext.Implicits.global


  def shutdown(): Future[Unit]
}

object ScalaKinesisProducer {
  def apply(
    streamName: String,
    kplConfig: KinesisProducerConfiguration,
    config: Config = ConfigFactory.load()
  ): ScalaKinesisProducer = {
    val producerStats = ProducerStats.getInstance(config)
    ScalaKinesisProducer(streamName, kplConfig, producerStats)
  }

  def apply(
    streamName: String,
    kplConfig: KinesisProducerConfiguration,
    producerStats: ProducerStats
  ): ScalaKinesisProducer = {
    val streamId = StreamId(kplConfig.getRegion, streamName)
    val producer = new KinesisProducer(kplConfig)
    new ScalaKinesisProducerImpl(streamId, producer, producerStats)
  }

  private[kinesis] implicit def listenableToScalaFuture[A](listenable: ListenableFuture[A]): Future[A] = {
    val promise = Promise[A]
    val callback = new Runnable {
      override def run(): Unit = promise.tryComplete(Try(listenable.get()))
    }
    listenable.addListener(callback, ExecutionContext.global)
    promise.future
  }
}

private[kinesis] class ScalaKinesisProducerImpl(
  val streamId: StreamId,
  private val producer: KinesisProducer,
  private val stats: ProducerStats
) extends ScalaKinesisProducer {
  import ScalaKinesisProducer.listenableToScalaFuture

  stats.reportInitialization(streamId)

  def send(partitionKey: String, data: ByteBuffer, explicitHashKey: Option[String]): Future[UserRecordResult] = {
    stats.trackSend(streamId, data.remaining) {
      producer.addUserRecord(streamId.streamName, partitionKey, explicitHashKey.orNull, data).map { result =>
        if (!result.isSuccessful) throwSendFailedException(result) else result
      }
    }
  }

  def shutdown(): Future[Unit] = shutdownOnce

  private lazy val shutdownOnce: Future[Unit] = {
    val allFlushedFuture = flushAll()
    val shutdownPromise = Promise[Unit]
    allFlushedFuture.onComplete { _ =>
      shutdownPromise.completeWith(destroyProducer())
    }
    val combinedFuture = allFlushedFuture.zip(shutdownPromise.future).map(_ => ())
    combinedFuture.onComplete(_ => stats.reportShutdown(streamId))
    combinedFuture
  }

  private def throwSendFailedException(result: UserRecordResult): Nothing = {
    val attemptCount = result.getAttempts.size
    val errorMessage = result.getAttempts.asScala.lastOption.map(_.getErrorMessage)
    throw new RuntimeException(
      s"Sending a record to $streamId failed after $attemptCount attempts, last error message: $errorMessage."
    )
  }

  private def flushAll(): Future[Unit] = {
    Future {
      blocking {
        producer.flushSync()
      }
    }
  }

  private def destroyProducer(): Future[Unit] = {
    Future {
      blocking {
        producer.destroy()
      }
    }
  }
} 
Example 80
Source File: Init.scala    From cave   with MIT License 5 votes vote down vote up
package init

import java.net.InetAddress
import java.util.UUID

import com.amazonaws.services.kinesis.clientlibrary.lib.worker.{InitialPositionInStream, KinesisClientLibConfiguration, Worker}
import com.amazonaws.services.kinesis.metrics.impl.NullMetricsFactory
import com.cave.metrics.data.AwsConfig
import com.cave.metrics.data.influxdb.{InfluxConfiguration, InfluxDataSink}
import com.cave.metrics.data.kinesis.RecordProcessorFactory
import com.typesafe.config.ConfigFactory
import org.apache.commons.logging.LogFactory
import play.api.Play

import scala.util.Try

object Init {

  // Docker should place the stream name in this environment variable
  final val EnvStreamName = "STREAM_NAME"

  // The name of this application for Kinesis Client Library
  final val ApplicationName = "cave-db-worker"

  // CloudWatch Reporter parameters
  final val MetricsNamespace = s"metrics-$ApplicationName"
  final val MetricsBufferTime = 1000L
  final val MetricsBufferSize = 200

  final val ThreadWaitTimeout = 10000L

  private val Log = LogFactory.getLog("db-writer-app")

  val worker = createWorker()
  val workerThread = new Thread(worker)

  def start(): Unit = {
    workerThread.start()
  }

  def shutdown(): Unit = {
    worker.shutdown()
    Try (workerThread.join(ThreadWaitTimeout)) recover {
      case e: Exception =>
        Log.info(s"Caught exception while joining worker thread: $e")
    }
  }

  
  private[this] def createWorker(): Worker = {
    val configuration = Play.current.configuration
    val serviceConfFile = configuration.getString("serviceConf").getOrElse("db-writer-service.conf")
    val kinesisAppName = configuration.getString("appName").getOrElse(ApplicationName)
    val appConfig = ConfigFactory.load(serviceConfFile).getConfig("db-writer")
    val awsConfig = new AwsConfig(appConfig)

    val streamName = System.getenv(EnvStreamName) match {
      case "processed" => awsConfig.processedStreamName
      case _ => awsConfig.rawStreamName
    }

    val workerId = s"${InetAddress.getLocalHost.getCanonicalHostName}:${UUID.randomUUID()}"

    Log.info(s"Running $ApplicationName for stream $streamName as worker $workerId")

    // a connection to the InfluxDB backend
    val influxConfig = appConfig.getConfig("influx")

    new Worker(
      // a factory for record processors
      new RecordProcessorFactory(
        awsConfig,
        new InfluxDataSink(InfluxConfiguration(influxConfig))),

      // a client library instance
      new KinesisClientLibConfiguration(kinesisAppName, streamName, awsConfig.awsCredentialsProvider, workerId)
        .withInitialPositionInStream(InitialPositionInStream.TRIM_HORIZON),

      new NullMetricsFactory)
      // TODO: check out the possibility to use CloudWatch Metrics
      // new CWMetricsFactory(awsConfig.awsCredentialsProvider, MetricsNamespace, MetricsBufferTime, MetricsBufferSize))
  }
} 
Example 81
Source File: Checker.scala    From cave   with MIT License 5 votes vote down vote up
package worker

import java.util.concurrent.Executor

import akka.actor.{Actor, ActorLogging, Status}
import akka.pattern.pipe
import com.cave.metrics.data._
import com.cave.metrics.data.evaluator.{CheckEvaluator, DataFetcher}
import init.Init

import scala.concurrent.{ExecutionContext, Future}
import scala.util.Try

object Checker {
  type Result = Try[Boolean]

  case class Done(alarm: Result)
  case class Aborted(reason: String)
}

class Checker(check: Check) extends Actor with ActorLogging {

  implicit val exec = context.dispatcher.asInstanceOf[Executor with ExecutionContext]
  val evaluator = new CheckEvaluator(check)
  def fetcher = new DataFetcher(Init.influxClientFactory)

  this run check pipeTo self

  def receive = {
    case alarm: Checker.Result =>
      context.parent ! Checker.Done(alarm)
      stop()

    case x: Status.Failure =>
      context.parent ! Checker.Aborted(x.cause.getMessage)
      stop()
  }

  def stop(): Unit = {
    context stop self
  }

  private[worker] def run(check: Check)(implicit ec: ExecutionContext): Future[Try[Boolean]] = {
    val result = evaluator.evaluate(fetcher)
    result map { v =>
      log.warning("Result of evaluation: " + v)
    }
    result
  }
} 
Example 82
Source File: CheckerSpec.scala    From cave   with MIT License 5 votes vote down vote up
package worker

import akka.actor.{ActorSystem, Props}
import akka.testkit.TestKit
import com.cave.metrics.data.evaluator.DataFetcher
import com.cave.metrics.data.influxdb.InfluxClientFactory
import com.cave.metrics.data.{AlertJsonData, Check}
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{BeforeAndAfterAll, WordSpecLike}

import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Try, Success}

class CheckerSpec extends TestKit(ActorSystem()) with WordSpecLike with BeforeAndAfterAll with AlertJsonData with MockitoSugar {

  override def afterAll() = {
    system.shutdown()
  }

  final val SomeReason = "BOOM!"
  val mockClientFactory = mock[InfluxClientFactory]

  def fakeChecker(check: Check): Props = Props(new Checker(check) {
    override def fetcher = new DataFetcher(mockClientFactory)

    override def run(check: Check)(implicit ec: ExecutionContext): Future[Try[Boolean]] = {
      if (check.schedule.alert.description == AlertDescription) Future.successful(Success(true))
      else if (check.schedule.alert.description == AlertFiveDescription) Future.successful(Success(false))
      else Future.failed(new RuntimeException(SomeReason))
    }
  })


  "A checker" must {
    "send Done(true) if an alarm condition has been detected" in {
      val checker = system.actorOf(Props(new StepParent(fakeChecker(InsufficientOrders), testActor)), "alarm")

      expectMsg(Checker.Done(alarm = Success(true)))
      watch(checker)
      expectTerminated(checker)
    }

    "send Done(false) if no alarm condition has been detected" in {
      val checker = system.actorOf(Props(new StepParent(fakeChecker(InsufficientOrdersFive), testActor)), "notAlarm")

      expectMsg(Checker.Done(alarm = Success(false)))
      watch(checker)
      expectTerminated(checker)
    }

    "properly finish in case of error" in {
      val checker = system.actorOf(Props(new StepParent(fakeChecker(OrdersLessThanPredicted), testActor)), "error")

      expectMsg(Checker.Aborted(SomeReason))
      watch(checker)
      expectTerminated(checker)
    }
  }
} 
Example 83
Source File: CheckEvaluator.scala    From cave   with MIT License 5 votes vote down vote up
package com.cave.metrics.data.evaluator

import com.cave.metrics.data.Check
import org.joda.time.DateTime

import scala.concurrent.duration.FiniteDuration
import scala.concurrent.{ExecutionContext, Future}
import scala.util.Try

class CheckEvaluator(check: Check) extends AbstractEvaluator(check.schedule.alert.condition) {

  def evaluate(fetcher: DataFetcher)(implicit ec: ExecutionContext): Future[Try[Boolean]] = {
    evaluateRange(clusterName = check.schedule.clusterName,
                  databaseName = check.schedule.databaseName,
                  end = check.timestamp)(fetcher, ec)
  }

  override def getData(clusterName: Option[String], databaseName: String, metricName: String,
                       metricTags: Map[String, String], repeats: Int, delay: FiniteDuration, end: DateTime)
                      (implicit fetcher: DataFetcher, ec: ExecutionContext): Future[Option[List[Double]]] =
    fetcher.fetchData(clusterName, databaseName, metricName, metricTags, repeats, delay, end)(ec)

  override def getData(clusterName: Option[String], databaseName: String, metricName: String,
                       metricTags: Map[String, String], duration: FiniteDuration, end: DateTime)
                      (implicit fetcher: DataFetcher, ec: ExecutionContext): Future[Option[List[Double]]] =
    fetcher.fetchData(clusterName, databaseName, metricName, metricTags, duration, end)(ec)

  override def getData(clusterName: Option[String], databaseName: String, agg: AggregatedSource, repeats: Int, delay: FiniteDuration, end: DateTime)
                      (implicit fetcher: DataFetcher, ec: ExecutionContext): Future[Option[List[Double]]] =
    fetcher.fetchData(clusterName, databaseName, agg.toString, Map.empty[String, String], repeats, delay, end)(ec)
} 
Example 84
Source File: AbstractEvaluator.scala    From cave   with MIT License 5 votes vote down vote up
package com.cave.metrics.data.evaluator

import org.joda.time.DateTime

import scala.concurrent.duration.FiniteDuration
import scala.concurrent.{ExecutionContext, Future}
import scala.util.Try

abstract class AbstractEvaluator(conditionStr: String) extends AlertParser {

  private val condition = parseAll(anyAlert, conditionStr) match {
    case Success(SimpleAlert(leftOperand, operator, rightOperand, repeatCount, delay), _) =>
      Left((leftOperand, operator, rightOperand, repeatCount, delay))

    case Success(MissingDataAlert(metricSource, duration), _) =>
      Right((metricSource, duration))

    case _ => sys.error("Unsupported check condition: " + conditionStr)
  }

  def evaluateRange(clusterName: Option[String], databaseName: String, end: DateTime)
                   (implicit fetcher: DataFetcher, ec: ExecutionContext): Future[Try[Boolean]] = {
    condition match {
      case Left((left, operator, right, repeats, delay)) =>
        val results = for {
          leftResult <- evaluateSource(clusterName, databaseName, end, left, repeats, delay)(fetcher, ec)
          rightResult <- evaluateSource(clusterName, databaseName, end, right, repeats, delay)(fetcher, ec)
        } yield (leftResult, rightResult)

        results map {
          case (Some(l), Some(r)) =>
            val zipped = l.zip(r)
            implicit val op = operator
            scala.util.Success((zipped.size == repeats) && (zipped forall evaluatePair))

          case _ =>
            scala.util.Failure(new RuntimeException("Failed to evaluate: at least one series does not exist."))
        }

      case Right((metricSrc, duration)) =>
        getData(clusterName, databaseName, metricSrc.metric, metricSrc.tags, duration, end)(fetcher, ec) map {
          case Some(values) =>
            scala.util.Success(values.size == 0)

          case None => util.Failure(new RuntimeException("Cannot evaluate: series does not exist!"))
        }
    }
  }

  def evaluateSource(clusterName: Option[String], databaseName: String, end: DateTime,
                     source: Source, repeats: Int, delay: FiniteDuration)
                    (implicit fetcher: DataFetcher, ec: ExecutionContext): Future[Option[List[Double]]] =
    source match {
      case ValueSource(num) => Future.successful(Some(List.fill(repeats)(num)))
      case MetricSource(name, tags) => getData(clusterName, databaseName, name, tags, repeats, delay, end)(fetcher, ec)
      case a: AggregatedSource => getData(clusterName, databaseName, a, repeats, delay, end)(fetcher, ec)

      case FactoredSource(src, factor) => src match {
        case ValueSource(num) =>
          Future.successful(Some(List.fill(repeats)(num * factor)))

        case MetricSource(name, tags) =>
          getData(clusterName, databaseName, name, tags, repeats, delay, end)(fetcher, ec) map(_.map(_.map(_ * factor)))

        case a: AggregatedSource =>
          getData(clusterName, databaseName, a, repeats, delay, end)(fetcher, ec) map(_.map(_.map(_ * factor)))

        case _ => Future.failed(new RuntimeException("Impossible to evaluate."))
      }
    }

  def getData(clusterName: Option[String], databaseName: String, metricName: String,
                       metricTags: Map[String, String], repeats: Int, delay: FiniteDuration, end: DateTime)
                      (implicit fetcher: DataFetcher, ec: ExecutionContext): Future[Option[List[Double]]]

  def getData(clusterName: Option[String], databaseName: String, metricName: String,
                       metricTags: Map[String, String], duration: FiniteDuration, end: DateTime)
                      (implicit fetcher: DataFetcher, ec: ExecutionContext): Future[Option[List[Double]]]

  def getData(clusterName: Option[String], databaseName: String, agg: AggregatedSource, repeats: Int, delay: FiniteDuration, end: DateTime)
                      (implicit fetcher: DataFetcher, ec: ExecutionContext): Future[Option[List[Double]]]

  def evaluatePair(values: (Double, Double))(implicit op: Operator.Value): Boolean = op match {
    case Operator.LessThan            => values._1 <  values._2
    case Operator.LessThanOrEqual     => values._1 <= values._2
    case Operator.GreaterThan         => values._1 >  values._2
    case Operator.GreaterThanOrEqual  => values._1 >= values._2
    case Operator.Equal               => values._1 == values._2
    case Operator.NotEqual            => values._1 != values._2
  }
} 
Example 85
Source File: KinesisDataSink.scala    From cave   with MIT License 5 votes vote down vote up
package com.cave.metrics.data.kinesis

import java.nio.ByteBuffer

import com.amazonaws.services.kinesis.AmazonKinesisAsyncClient
import com.amazonaws.services.kinesis.model.PutRecordRequest
import com.cave.metrics.data.{AwsConfig, Metric, SeqDataSink}
import org.apache.commons.logging.LogFactory
import play.api.libs.json.Json

import scala.util.Try
import scala.util.control.Exception._
import scala.util.control.NonFatal

class KinesisDataSink(config: AwsConfig, streamName: String) extends SeqDataSink {

  val log = LogFactory.getLog(classOf[KinesisDataSink])

  var client: Option[AmazonKinesisAsyncClient] = None

  
  override def sendMetric(metric: Metric): Unit = {

    def createRequest: PutRecordRequest = {
      val data = Json.toJson(metric).toString()
      log.info(s"Sending $data ...")

      val request = new PutRecordRequest
      request.setStreamName(streamName)
      request.setData(ByteBuffer.wrap(data.getBytes))
      request.setPartitionKey(metric.partitionKey)
      request
    }

    client foreach { c =>
      Try(c.putRecord(createRequest)) recover {
        case NonFatal(e) =>
          log.warn(s"Caught exception while talking to Kinesis: $e")
          throw e
      }
    }
  }
} 
Example 86
Source File: RecordProcessor.scala    From cave   with MIT License 5 votes vote down vote up
package com.cave.metrics.data.kinesis

import java.util.{List => JList}

import com.amazonaws.services.kinesis.clientlibrary.interfaces.{IRecordProcessor, IRecordProcessorCheckpointer}
import com.amazonaws.services.kinesis.clientlibrary.types.ShutdownReason
import com.amazonaws.services.kinesis.model.Record
import com.cave.metrics.data._
import org.apache.commons.logging.LogFactory
import play.api.libs.json.Json

import scala.collection.JavaConverters._
import scala.util.{Success, Try}

class RecordProcessor(config: AwsConfig, sink: DataSink) extends IRecordProcessor with ExponentialBackOff {

  private[this] var shardId: String = _
  private var nextCheckpointTimeMillis: Long = _

  private[this] val log = LogFactory.getLog(classOf[RecordProcessor])

  // Back off and retry settings for checkpoint
  override val MaxBackOffTimeInMillis = 10000L
  override val ShouldLogErrors: Boolean = true
  private val NumRetries = 10
  private val CheckpointIntervalInMillis = 1000L

  override def initialize(shardId: String): Unit = {
    this.shardId = shardId
  }

  override def shutdown(check: IRecordProcessorCheckpointer, reason: ShutdownReason): Unit = {
    if (reason == ShutdownReason.TERMINATE) {
      checkpoint(check)
    }
  }

  override def processRecords(records: JList[Record], check: IRecordProcessorCheckpointer): Unit = {
    val metrics = (records.asScala map convert).filter(_.isSuccess)
    if (metrics.size == records.size()) {
      // all metrics successfully converted
      log.info(s"Received $metrics")
      sink.sendMetrics(for (Success(metric) <- metrics) yield metric)
    } else {
      log.error("Failed to parse records into Metric objects.")
    }

    if (System.currentTimeMillis() > nextCheckpointTimeMillis) {
      checkpoint(check)
      nextCheckpointTimeMillis = System.currentTimeMillis() + CheckpointIntervalInMillis
    }
  }

  private[this] def convert(record: Record): Try[Metric] =
    Try (Json.parse(new String(record.getData.array())).as[Metric])

  private[this] def checkpoint(check: IRecordProcessorCheckpointer): Unit = {
    Try {
      retryUpTo(NumRetries) {
        check.checkpoint()
      }
    } recover {
      case e: Exception =>
        log.warn(s"Failed to checkpoint shard $shardId: ${e.getMessage}")
    }
  }
} 
Example 87
Source File: PostgresCacheDataManagerImpl.scala    From cave   with MIT License 5 votes vote down vote up
package com.cave.metrics.data.postgresql

import com.cave.metrics.data._

import scala.slick.driver.PostgresDriver.simple._
import scala.util.Try

class PostgresCacheDataManagerImpl(awsConfig: AwsConfig) extends PostgresDataManagerImpl(awsConfig) with CacheDataManager {

  
  override def getEnabledAlerts(): Try[Map[String, List[Schedule]]] = {
    Try {
      val data = db.withTransaction { implicit session =>

        val result = for {
          ((org, alert), team) <- organizationsTable.filter(_.deletedAt.isEmpty) leftJoin alertsTable.filter( a => a.deletedAt.isEmpty && a.status) on (_.id === _.organizationId) leftJoin teamsTable.filter(_.deletedAt.isEmpty) on (_._2.teamId === _.id)
        } yield (org, team.?, alert.?)

        result.list.map { case (organization, maybeTeam, maybeAlert) =>
          val alert = maybeAlert map { row =>
              Alert(Some(row.id.toString), row.description, row.status.getOrElse(false), row.period, row.condition, row.handbookUrl, Alert.routingFromString(row.routing))
          }

          organization.name -> alert.map(model => Schedule(organization.name, maybeTeam.map(_.name), maybeTeam.map(_.cluster) getOrElse(organization.cluster), organization.notificationUrl, model))
        }
      }

      data.groupBy(_._1).mapValues(_.map(_._2).flatten)
    }
  }
} 
Example 88
Source File: EncodingUtils.scala    From scala-openrtb   with Apache License 2.0 5 votes vote down vote up
package com.powerspace.openrtb.json.util

import com.powerspace.openrtb.json.OpenRtbExtensions.ExtensionRegistry
import io.circe._
import io.circe.generic.extras.Configuration
import io.circe.generic.extras.decoding.ConfiguredDecoder
import io.circe.generic.extras.encoding.ConfiguredAsObjectEncoder
import scalapb.{ExtendableMessage, GeneratedEnumCompanion, UnknownFieldSet}
import shapeless.Lazy

import scala.reflect.ClassTag
import scala.util.Try

object EncodingUtils {

  import PrimitivesUtils._
  import io.circe.generic.extras.semiauto._

  import scala.reflect.runtime.universe.TypeTag
  import scala.reflect.runtime.{currentMirror => cm}

  private implicit val customConfig: Configuration = Configuration.default.withSnakeCaseMemberNames.withDefaults

  def extendedEncoder[Ext <: ExtendableMessage[Ext]](
    implicit encoder: Lazy[ConfiguredAsObjectEncoder[Ext]],
    er: ExtensionRegistry,
    tag: ClassTag[Ext]): Encoder[Ext] =
    er.encoderWithExtensions[Ext](baseEncoder = openRtbEncoder)

  def openRtbEncoder[A](implicit encoder: Lazy[ConfiguredAsObjectEncoder[A]]): Encoder[A] =
    deriveConfiguredEncoder[A](encoder).cleanRtb

  def extendedDecoder[Ext <: ExtendableMessage[Ext]](
    implicit encoder: Lazy[ConfiguredDecoder[Ext]],
    er: ExtensionRegistry,
    tag: ClassTag[Ext]): Decoder[Ext] =
    er.decoderWithExtensions[Ext](baseDecoder = openRtbDecoder)

  def openRtbDecoder[A](implicit decoder: Lazy[ConfiguredDecoder[A]]): Decoder[A] = deriveConfiguredDecoder[A](decoder)

  
  def protobufOneofEncoder[Oneof <: _root_.scalapb.GeneratedOneof](
    partialFunction: PartialFunction[Oneof, Json]): Encoder[Oneof] = { oneOf: Oneof =>
    {
      if (oneOf.isEmpty) Json.Null
      else partialFunction.apply(oneOf)
    }
  }

  implicit val booleanDecoder: Decoder[Boolean] = Decoder.decodeBoolean.prepare(cursor => {
    cursor.withFocus(
      _.asNumber
        .map(
          number =>
            number.toInt
              .map(_.toBoolean)
              .map(Json.fromBoolean)
              getOrElse Json.False
        )
        .getOrElse(Json.False))
  })

} 
Example 89
Source File: BadDataHandler.scala    From model-serving-tutorial   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.modelserving.flink.wine

import org.apache.flink.api.common.functions.FlatMapFunction
import org.apache.flink.util.Collector

import scala.util.{Failure, Success, Try}

object BadDataHandler {
  def apply[T] = new BadDataHandler[T]
}


class BadDataHandler[T] extends FlatMapFunction[Try[T], T] {
  override def flatMap(t: Try[T], out: Collector[T]): Unit = {
    t match {
      case Success(t) => out.collect(t)
      case Failure(e) => println(s"BAD DATA: ${e.getMessage}")
    }
  }
} 
Example 90
Source File: Sql.scala    From ksql-streams   with Apache License 2.0 5 votes vote down vote up
package com.landoop.kstreams.sql.transform

import com.landoop.sql.Field
import org.apache.calcite.config.Lex
import org.apache.calcite.sql.parser.SqlParser
import org.apache.calcite.sql.{SqlIdentifier, SqlInsert, SqlSelect}

import scala.util.{Failure, Success, Try}

object Sql {
  private val config = SqlParser.configBuilder
    .setLex(Lex.MYSQL)
    .setCaseSensitive(false)
    .setIdentifierMaxLength(250)
    .build

  def parseSelect(sql: String): SelectTransformContext = {
    val withStructure: Boolean = sql.trim.toLowerCase().endsWith("withstructure")
    val query = if (withStructure) {
      sql.trim.dropRight("withstructure".length)
    } else sql

    val parser = SqlParser.create(query, config)
    Try(parser.parseQuery()) match {
      case Failure(e) => throw new IllegalArgumentException(s"Query is not valid.Needs to be `SELECT ... FROM $$sourceTopic`.${e.getMessage}")
      case Success(select: SqlSelect) =>
        validate(select)
        SelectTransformContext(
          select.getFrom.asInstanceOf[SqlIdentifier].getSimple,
          Field.from(select),
          withStructure)

      case Success(other) =>
        throw new IllegalArgumentException("Invalid statement. Needs to be `SELECT ... FROM $sourceTopic`")
    }
  }

  def validate(select: SqlSelect): Unit = {
    require(select.getFrom.isInstanceOf[SqlIdentifier], s"${select.getFrom} is not valid.")
  }

  def parseInsert(sql: String): TransformContext = {
    val withStructure: Boolean = sql.trim.toLowerCase().endsWith("withstructure")
    val query = if (withStructure) {
      sql.trim.dropRight("withstructure".length)
    } else sql

    val parser = SqlParser.create(query, config)
    Try(parser.parseQuery()) match {
      case Failure(e) => throw new IllegalArgumentException(s"Query is not valid.Needs to be `INSERT INTO A SELECT ... FROM A`.${e.getMessage}")
      case Success(sqlInsert: SqlInsert) =>
        validate(sqlInsert)
        val target = sqlInsert.getTargetTable.asInstanceOf[SqlIdentifier].getSimple
        val sqlSource = sqlInsert.getSource.asInstanceOf[SqlSelect]
        TransformContext(target,
          sqlSource.getFrom.asInstanceOf[SqlIdentifier].getSimple,
          Field.from(sqlSource),
          withStructure)

      case Success(other) =>
        throw new IllegalArgumentException("Invalid statement. Needs to be `INSERT INTO A SELECT ... FROM A`")
    }
  }

  def validate(insert: SqlInsert): Unit = {
    require(insert != null, "Null instances are invalid")
    require(insert.getTargetTable.isInstanceOf[SqlIdentifier], "Invalid target specified")
    insert.getSource match {
      case select: SqlSelect =>
        validate(select)
      case other => throw new IllegalArgumentException("Invalid source. Needs to be a SELECT .. FROM A")
    }
  }
}

case class TransformContext(target: String, from: String, fields: Seq[Field], withStructure: Boolean) {
  require(target != null && target.nonEmpty, s"'$target' is not valid")
  require(from != null && from.nonEmpty, s"'$from' is not valid")
  require(fields != null && fields.nonEmpty, "You need to specify what fields you want to select")
}


case class SelectTransformContext(from: String, fields: Seq[Field], withStructure: Boolean) {
  require(from != null && from.nonEmpty, s"'$from' is not valid")
  require(fields != null && fields.nonEmpty, "You need to specify what fields you want to select")
} 
Example 91
Source File: JacksonInstances.scala    From circe-jackson   with Apache License 2.0 5 votes vote down vote up
package io.circe.jackson

import cats.Eq
import cats.instances.list._
import cats.instances.map._
import io.circe.{ Json, JsonBigDecimal, JsonBiggerDecimal, JsonDecimal, JsonDouble, JsonFloat, JsonLong, JsonNumber }
import io.circe.Json.{ JArray, JNumber, JObject, JString }
import io.circe.numbers.BiggerDecimal
import io.circe.testing.ArbitraryInstances
import org.scalacheck.Arbitrary
import scala.util.matching.Regex
import scala.util.Try
import java.nio.ByteBuffer

trait JacksonInstances { this: ArbitraryInstances =>

  
  def cleanNumber(n: JsonNumber): JsonNumber = n.toString match {
    case SigExpPattern(exp) if !Try(exp.toLong).toOption.exists(_ <= Short.MaxValue.toLong) => replacement
    case _ =>
      n match {
        case v @ JsonDecimal(_) => cleanNumber(JsonBiggerDecimal(v.toBiggerDecimal, v.toString))
        case v @ JsonBiggerDecimal(value, _) =>
          value.toBigDecimal.map(BigDecimal(_)).fold(replacement) { d =>
            val fromBigDecimal = BiggerDecimal.fromBigDecimal(d.bigDecimal)

            if (fromBigDecimal == value && d.abs <= BigDecimal(Double.MaxValue)) v
            else JsonBiggerDecimal(fromBigDecimal, fromBigDecimal.toString)
          }
        case v @ JsonBigDecimal(_) => v
        case v @ JsonDouble(_)     => v
        case v @ JsonFloat(_)      => v
        case v @ JsonLong(_)       => v
      }
  }

  def cleanNumbers(json: Json): Json =
    json.mapNumber(cleanNumber).mapArray(_.map(cleanNumbers)).mapObject(_.mapValues(cleanNumbers))

  val arbitraryCleanedJson: Arbitrary[Json] = Arbitrary(Arbitrary.arbitrary[Json].map(cleanNumbers))
} 
Example 92
Source File: Utils.scala    From scala-clippy   with Apache License 2.0 5 votes vote down vote up
package com.softwaremill.clippy

import java.io.{ByteArrayOutputStream, InputStream}
import java.io.Closeable
import scala.util.control.NonFatal
import scala.util.{Failure, Try}

object Utils {

  
  def runNonDaemon(t: => Unit) = {
    val shutdownHook = new Thread() {
      private val lock             = new Object
      @volatile private var didRun = false

      override def run() =
        lock.synchronized {
          if (!didRun) {
            t
            didRun = true
          }
        }
    }

    Runtime.getRuntime.addShutdownHook(shutdownHook)
    try shutdownHook.run()
    finally Runtime.getRuntime.removeShutdownHook(shutdownHook)
  }

  def inputStreamToBytes(is: InputStream): Array[Byte] =
    try {
      val baos = new ByteArrayOutputStream()
      val buf  = new Array[Byte](512)
      var read = 0
      while ({ read = is.read(buf, 0, buf.length); read } != -1) {
        baos.write(buf, 0, read)
      }
      baos.toByteArray
    } finally is.close()

  object TryWith {
    def apply[C <: Closeable, R](resource: => C)(f: C => R): Try[R] =
      Try(resource).flatMap(resourceInstance => {
        try {
          val returnValue = f(resourceInstance)
          Try(resourceInstance.close()).map(_ => returnValue)
        } catch {
          case NonFatal(exceptionInFunction) =>
            try {
              resourceInstance.close()
              Failure(exceptionInFunction)
            } catch {
              case NonFatal(exceptionInClose) =>
                exceptionInFunction.addSuppressed(exceptionInClose)
                Failure(exceptionInFunction)
            }
        }
      })
  }
} 
Example 93
Source File: Template.scala    From scala-clippy   with Apache License 2.0 5 votes vote down vote up
package com.softwaremill.clippy

import java.util.regex.Pattern

import scala.util.Try
import scala.util.matching.Regex

sealed trait Template {
  def v: String
}

case class ExactT(v: String) extends Template {
  override def toString = v
}

case class RegexT(v: String) extends Template {
  lazy val regex                  = Try(new Regex(v)).getOrElse(new Regex("^$"))
  def matches(e: ExactT): Boolean = regex.pattern.matcher(e.v).matches()
  override def toString           = v
}
object RegexT {

  
  def fromPattern(pattern: String): RegexT = {
    val regexp = pattern
      .split("\\*", -1)
      .map(el => if (el != "") Pattern.quote(el) else el)
      .flatMap(el => List(".*", el))
      .tail
      .filter(_.nonEmpty)
      .mkString("")

    RegexT.fromRegex(regexp)
  }

  def fromRegex(v: String): RegexT =
    new RegexT(v)

  def setMatches(rr: Set[RegexT], ee: Set[ExactT]): Boolean =
    if (rr.size != ee.size) false
    else {
      rr.toList.forall { r =>
        ee.exists(r.matches)
      }
    }
} 
Example 94
Source File: Futures.scala    From courscala   with Apache License 2.0 5 votes vote down vote up
package org.coursera.common.concurrent

import scala.concurrent.ExecutionContext
import scala.concurrent.Future
import scala.concurrent.Promise
import scala.util.Failure
import scala.util.Success
import scala.util.Try
import scala.util.control.NonFatal

object Futures extends FutureExtractors {

  
  def findMatch[T, U](
      futures: TraversableOnce[Future[T]])
      (pf: PartialFunction[T, U])
      (implicit ec: ExecutionContext): Future[Option[U]] = {

    Future.find(futures)(pf.isDefinedAt).map(_.map(pf))
  }

  def option[T](option: Option[Future[T]])(implicit ec: ExecutionContext): Future[Option[T]] =
    option.map(_.map(Some(_))).getOrElse(Future.successful(None))

  def map[K, V](m: Map[K, Future[V]])(implicit ec: ExecutionContext): Future[Map[K, V]] = {
    val elementFutures = m.map { case (key, valueFuture) =>
      valueFuture.map(key -> _)
    }
    Future.sequence(elementFutures).map(_.toMap)
  }

  object Implicits {

    implicit class FutureOps[T](future: Future[T]) {

      def toTry(implicit ec: ExecutionContext): Future[Try[T]] = {
        future
          .map(Success.apply)
          .recover(PartialFunction(Failure.apply))
      }

    }

  }

} 
Example 95
Source File: RefreshingSideInputExample.scala    From scio   with Apache License 2.0 5 votes vote down vote up
// Example: Demonstrates a streaming job with periodically refreshing side input
// Usage:

// `sbt "scio-examples/runMain com.spotify.scio.examples.extra.RefreshingSideInputExample
// --project=[PROJECT] --runner=[RUNNER] --zone=[ZONE] --input=[PUBSUB_SUBSCRIPTION]"`
package com.spotify.scio.examples.extra

import com.spotify.scio._
import com.spotify.scio.values.WindowOptions
import org.apache.beam.sdk.io.GenerateSequence
import org.apache.beam.sdk.options.StreamingOptions
import org.apache.beam.sdk.transforms.windowing.Window.ClosingBehavior
import org.apache.beam.sdk.transforms.windowing.{AfterPane, Repeatedly}
import org.apache.beam.sdk.values.WindowingStrategy.AccumulationMode
import org.joda.time.{Duration, Instant}
import org.slf4j.LoggerFactory

import scala.util.{Random, Success, Try}


object RefreshingSideInputExample {
  case class LotteryTicket(numbers: Seq[Int])
  case class LotteryResult(
    eventTime: Instant,
    processTime: Instant,
    isWinner: Boolean,
    ticket: Seq[Int],
    winningNumbers: Seq[Int]
  )

  private lazy val logger = LoggerFactory.getLogger(this.getClass)

  private val ticketSize = 5

  def main(cmdlineArgs: Array[String]): Unit = {
    val (sc, args) = ContextAndArgs(cmdlineArgs)
    sc.optionsAs[StreamingOptions].setStreaming(true)

    // An unbounded input that produces a sequence of 5 randomly generated winning lottery numbers,
    // refreshed every 10 seconds. Materialized as a singleton `SideInput`.
    val winningLotteryNumbers = sc
      .customInput(
        "winningLotteryNumbers",
        GenerateSequence
          .from(0)
          .withRate(1, Duration.standardSeconds(10))
      )
      .withFixedWindows(
        duration = Duration.standardSeconds(10),
        offset = Duration.ZERO,
        options = WindowOptions(
          trigger = Repeatedly.forever(AfterPane.elementCountAtLeast(1)),
          accumulationMode = AccumulationMode.DISCARDING_FIRED_PANES,
          closingBehavior = ClosingBehavior.FIRE_IF_NON_EMPTY,
          allowedLateness = Duration.standardSeconds(0)
        )
      )
      .map(_ => Seq.fill(ticketSize)(Random.nextInt(100)))
      // A default is needed in case an empty pane is fired
      .asSingletonSideInput(Seq.fill(ticketSize)(-1))

    // Sample PubSub topic modeling lottery tickets as a comma-separated list of numbers.
    // For example, a message might contain the string "10,7,3,1,9"
    sc.pubsubTopic[String](args("input"))
      .flatMap(toLotteryTicket)
      .withFixedWindows(Duration.standardSeconds(5))
      .withTimestamp
      .withSideInputs(winningLotteryNumbers)
      .map {
        case ((lotteryTicket, eventTime), side) =>
          val currentWinningNumbers = side(winningLotteryNumbers)

          val isWinner = lotteryTicket.numbers == currentWinningNumbers
          val result = LotteryResult(
            eventTime,
            Instant.now(),
            isWinner,
            lotteryTicket.numbers,
            currentWinningNumbers
          )

          logger.info(s"Lottery result: $result")
      } // Can save output to PubSub, BigQuery, etc.

    sc.run()
    ()
  }

  private def toLotteryTicket(message: String): Option[LotteryTicket] =
    Try(LotteryTicket(message.split(",").map(_.toInt))) match {
      case Success(s) if s.numbers.size == ticketSize => Some(s)
      case _ =>
        logger.error(s"Malformed message: $message")
        None
    }
} 
Example 96
Source File: CheckpointTest.scala    From scio   with Apache License 2.0 5 votes vote down vote up
package com.spotify.scio.extra.checkpoint

import java.nio.file.Files

import com.spotify.scio.{ContextAndArgs, ScioMetrics}
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers

import scala.reflect.io.File
import scala.util.Try

object CheckpointMetrics {
  def runJob(checkpointArg: String, tempLocation: String = null): (Long, Long) = {
    val elemsBefore = ScioMetrics.counter("elemsBefore")
    val elemsAfter = ScioMetrics.counter("elemsAfter")

    val (sc, args) = ContextAndArgs(
      Array(s"--checkpoint=$checkpointArg") ++
        Option(tempLocation).map(e => s"--tempLocation=$e")
    )
    sc.checkpoint(args("checkpoint")) {
      sc.parallelize(1 to 10)
        .map { x => elemsBefore.inc(); x }
    }.map { x => elemsAfter.inc(); x }
    val r = sc.run().waitUntilDone()
    (Try(r.counter(elemsBefore).committed.get).getOrElse(0), r.counter(elemsAfter).committed.get)
  }
}

class CheckpointTest extends AnyFlatSpec with Matchers {
  import CheckpointMetrics._

  "checkpoint" should "work on path" in {
    val tmpDir =
      Files.createTempDirectory("checkpoint-").resolve("checkpoint").toString
    runJob(tmpDir) shouldBe ((10L, 10L))
    runJob(tmpDir) shouldBe ((0L, 10L))
    File(tmpDir).deleteRecursively()
    runJob(tmpDir) shouldBe ((10L, 10L))
  }

  it should "work on name/file" in {
    val checkpointName = "c1"
    val tempLocation = Files.createTempDirectory("temp-location-").toString
    runJob(checkpointName, tempLocation) shouldBe ((10L, 10L))
    runJob(checkpointName, tempLocation) shouldBe ((0L, 10L))
    File(s"$tempLocation/$checkpointName").deleteRecursively()
    runJob(checkpointName, tempLocation) shouldBe ((10L, 10L))
  }
} 
Example 97
Source File: IndexAdmin.scala    From scio   with Apache License 2.0 5 votes vote down vote up
package com.spotify.scio.elasticsearch

import org.apache.http.HttpHost
import org.elasticsearch.client._
import org.elasticsearch.client.indices.{CreateIndexRequest, CreateIndexResponse}
import org.elasticsearch.common.xcontent.XContentType

import scala.util.Try

object IndexAdmin {
  private def indicesClient[A](esOptions: ElasticsearchOptions)(f: IndicesClient => A): Try[A] = {
    val client = new RestHighLevelClient(RestClient.builder(esOptions.nodes: _*))

    val result = Try(f(client.indices()))
    client.close()
    result
  }

  
  private def ensureIndex(
    index: String,
    mappingSource: String,
    client: IndicesClient
  ): CreateIndexResponse =
    client.create(
      new CreateIndexRequest(index).source(mappingSource, XContentType.JSON),
      RequestOptions.DEFAULT
    )
} 
Example 98
Source File: IndexAdmin.scala    From scio   with Apache License 2.0 5 votes vote down vote up
package com.spotify.scio.elasticsearch

import java.net.InetSocketAddress

import org.elasticsearch.action.admin.indices.create.CreateIndexResponse
import org.elasticsearch.client.AdminClient
import org.elasticsearch.common.settings.Settings
import org.elasticsearch.common.transport.InetSocketTransportAddress
import org.elasticsearch.common.xcontent.XContentType
import org.elasticsearch.transport.client.PreBuiltTransportClient

import scala.util.Try

object IndexAdmin {
  private def adminClient[A](esOptions: ElasticsearchOptions)(f: AdminClient => A): Try[A] = {
    val settings: Settings =
      Settings.builder.put("cluster.name", esOptions.clusterName).build

    val transportAddresses: Seq[InetSocketTransportAddress] = esOptions.servers
      .map(addr => new InetSocketTransportAddress(addr))

    val client = new PreBuiltTransportClient(settings)
      .addTransportAddresses(transportAddresses: _*)

    val result = Try(f(client.admin()))
    client.close()
    result
  }

  
  private def ensureIndex(
    index: String,
    mappingSource: String,
    client: AdminClient
  ): CreateIndexResponse =
    client
      .indices()
      .prepareCreate(index)
      .setSource(mappingSource, XContentType.JSON)
      .get()
} 
Example 99
Source File: IndexAdmin.scala    From scio   with Apache License 2.0 5 votes vote down vote up
package com.spotify.scio.elasticsearch

import java.net.InetSocketAddress

import org.elasticsearch.action.admin.indices.create.CreateIndexResponse
import org.elasticsearch.client.AdminClient
import org.elasticsearch.common.settings.Settings
import org.elasticsearch.common.transport.TransportAddress
import org.elasticsearch.common.xcontent.XContentType
import org.elasticsearch.transport.client.PreBuiltTransportClient

import scala.util.Try

object IndexAdmin {
  private def adminClient[A](esOptions: ElasticsearchOptions)(f: AdminClient => A): Try[A] = {
    val settings: Settings =
      Settings.builder.put("cluster.name", esOptions.clusterName).build

    val transportAddresses: Seq[TransportAddress] = esOptions.servers
      .map(addr => new TransportAddress(addr))

    val client = new PreBuiltTransportClient(settings)
      .addTransportAddresses(transportAddresses: _*)

    val result = Try(f(client.admin()))
    client.close()
    result
  }

  
  private def ensureIndex(
    index: String,
    mappingSource: String,
    client: AdminClient
  ): CreateIndexResponse =
    client
      .indices()
      .prepareCreate(index)
      .setSource(mappingSource, XContentType.JSON)
      .get()
} 
Example 100
Source File: OverrideTypeProviderFinder.scala    From scio   with Apache License 2.0 5 votes vote down vote up
package com.spotify.scio.bigquery.validation

import scala.util.control.NonFatal
import scala.util.{Failure, Success, Try}


object OverrideTypeProviderFinder {
  var typeProvider: String = System.getProperty("override.type.provider", "")

  var provider: OverrideTypeProvider = instance()

  def instance(): OverrideTypeProvider = {
    // Load the class dynamically at compile time and runtime
    val classInstance = Try(
      Class
        .forName(System.getProperty("override.type.provider", ""))
        .getConstructor()
        .newInstance()
        .asInstanceOf[OverrideTypeProvider]
    )
    classInstance match {
      case Success(value)       => value
      case Failure(NonFatal(_)) => new DummyOverrideTypeProvider
    }
  }

  def getProvider: OverrideTypeProvider = {
    val thisInstance = System.getProperty("override.type.provider", "")
    if (typeProvider != thisInstance) {
      typeProvider = thisInstance
      provider = instance()
    }
    provider
  }
} 
Example 101
Source File: BigQueryPartitionUtil.scala    From scio   with Apache License 2.0 5 votes vote down vote up
package com.spotify.scio.bigquery

import java.util.regex.Pattern

import com.google.api.services.bigquery.model.TableReference
import com.spotify.scio.bigquery.client.BigQuery
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryHelpers

import scala.util.Try

private[bigquery] object BigQueryPartitionUtil {
  // Ported from com.google.cloud.dataflow.sdk.io.BigQueryHelpers

  private[this] val PROJECT_ID_REGEXP = "[a-z][-a-z0-9:.]{4,61}[a-z0-9]"
  private[this] val DATASET_REGEXP = "[-\\w.]{1,1024}"
  private[this] val TABLE_REGEXP = "[-\\w$@]{1,1024}($LATEST)?"
  private[this] val DATASET_TABLE_REGEXP_LEGACY =
    s"((?<PROJECT>$PROJECT_ID_REGEXP):)?(?<DATASET>$DATASET_REGEXP)\\.(?<TABLE>$TABLE_REGEXP)"
  private[this] val DATASET_TABLE_REGEXP_STANDARD =
    s"((?<PROJECT>$PROJECT_ID_REGEXP).)?(?<DATASET>$DATASET_REGEXP)\\.(?<TABLE>$TABLE_REGEXP)"
  private[this] val QUERY_TABLE_SPEC_LEGACY =
    Pattern.compile(s"(?<=\\[)$DATASET_TABLE_REGEXP_LEGACY(?=\\])")
  private[this] val QUERY_TABLE_SPEC_STANDARD =
    Pattern.compile(s"(?<=\\`)$DATASET_TABLE_REGEXP_STANDARD(?=\\`)")

  private def extractTables(sqlQuery: String): Map[String, TableReference] = {
    val b = Map.newBuilder[String, TableReference]
    val m1 = QUERY_TABLE_SPEC_LEGACY.matcher(sqlQuery)
    while (m1.find()) {
      val t = m1.group(0)
      b += (s"[$t]" -> BigQueryHelpers.parseTableSpec(t))
    }
    val m2 = QUERY_TABLE_SPEC_STANDARD.matcher(sqlQuery)
    while (m2.find()) {
      val t = m2.group(0)
      b += (s"`$t`" -> BigQueryHelpers.parseTableSpec(t.replaceFirst("\\.", ":")))
    }
    b.result()
  }

  private def getPartitions(bq: BigQuery, tableRef: TableReference): Set[String] = {
    val prefix = tableRef.getTableId.split('$')(0)
    bq.tables
      .tableReferences(tableRef.getProjectId, tableRef.getDatasetId)
      .filter(_.getTableId.startsWith(prefix))
      .map(_.getTableId.substring(prefix.length))
      .toSet
      // get all table with prefix and filter only the day/date partitioned tables. Current
      // format for date partition is YYYYMMDD, thus all numeric.
      .filter(e => Try(e.toLong).isSuccess)
  }

  def latestQuery(bq: BigQuery, sqlQuery: String): String = {
    val tables =
      extractTables(sqlQuery).filter(_._2.getTableId.endsWith("$LATEST"))
    if (tables.isEmpty) {
      sqlQuery
    } else {
      val overlaps = tables
        .map(t => getPartitions(bq, t._2))
        .reduce(_ intersect _)
      require(
        overlaps.nonEmpty,
        "Cannot find latest common partition for " + tables.keys.mkString(", ")
      )
      val latest = overlaps.max
      tables.foldLeft(sqlQuery) {
        case (q, (spec, _)) =>
          q.replace(spec, spec.replace("$LATEST", latest))
      }
    }
  }

  def latestTable(bq: BigQuery, tableSpec: String): String = {
    val ref = BigQueryHelpers.parseTableSpec(tableSpec)
    if (ref.getTableId.endsWith("$LATEST")) {
      val partitions = getPartitions(bq, ref)
      require(partitions.nonEmpty, s"Cannot find latest partition for $tableSpec")
      tableSpec.replace("$LATEST", partitions.max)
    } else {
      tableSpec
    }
  }
} 
Example 102
Source File: TableRowSyntax.scala    From scio   with Apache License 2.0 5 votes vote down vote up
package com.spotify.scio.bigquery.syntax

// import com.google.api.services.bigquery.model.{TableRow => GTableRow}
import com.spotify.scio.bigquery.{Date, DateTime, TableRow, Time, Timestamp}
import org.joda.time.{Instant, LocalDate, LocalDateTime, LocalTime}

import scala.jdk.CollectionConverters._

import scala.util.Try


final class TableRowOps(private val r: TableRow) extends AnyVal {
  def getBoolean(name: AnyRef): Boolean =
    this.getValue(name, _.toString.toBoolean, false)

  def getBooleanOpt(name: AnyRef): Option[Boolean] =
    this.getValueOpt(name, _.toString.toBoolean)

  def getLong(name: AnyRef): Long = this.getValue(name, _.toString.toLong, 0L)

  def getLongOpt(name: AnyRef): Option[Long] =
    this.getValueOpt(name, _.toString.toLong)

  def getDouble(name: AnyRef): Double =
    this.getValue(name, _.toString.toDouble, 0.0)

  def getDoubleOpt(name: AnyRef): Option[Double] =
    this.getValueOpt(name, _.toString.toDouble)

  def getString(name: AnyRef): String = this.getValue(name, _.toString, null)

  def getStringOpt(name: AnyRef): Option[String] =
    this.getValueOpt(name, _.toString)

  def getTimestamp(name: AnyRef): Instant =
    this.getValue(name, v => Timestamp.parse(v.toString), null)

  def getTimestampOpt(name: AnyRef): Option[Instant] =
    this.getValueOpt(name, v => Timestamp.parse(v.toString))

  def getDate(name: AnyRef): LocalDate =
    this.getValue(name, v => Date.parse(v.toString), null)

  def getDateOpt(name: AnyRef): Option[LocalDate] =
    this.getValueOpt(name, v => Date.parse(v.toString))

  def getTime(name: AnyRef): LocalTime =
    this.getValue(name, v => Time.parse(v.toString), null)

  def getTimeOpt(name: AnyRef): Option[LocalTime] =
    this.getValueOpt(name, v => Time.parse(v.toString))

  def getDateTime(name: AnyRef): LocalDateTime =
    this.getValue(name, v => DateTime.parse(v.toString), null)

  def getDateTimeOpt(name: AnyRef): Option[LocalDateTime] =
    this.getValueOpt(name, v => DateTime.parse(v.toString))

  def getRepeated(name: AnyRef): Seq[AnyRef] =
    this.getValue(name, x => x.asInstanceOf[java.util.List[AnyRef]].iterator().asScala.toSeq, null)

  def getRecord(name: AnyRef): Map[String, AnyRef] =
    r.get(name).asInstanceOf[java.util.Map[String, AnyRef]].asScala.toMap

  private def getValue[T](name: AnyRef, fn: AnyRef => T, default: T): T = {
    val o = r.get(name)
    if (o == null) {
      default
    } else {
      fn(o)
    }
  }

  private def getValueOpt[T](name: AnyRef, fn: AnyRef => T): Option[T] = {
    val o = r.get(name)
    if (o == null) {
      None
    } else {
      Try(fn(o)).toOption
    }
  }
}

trait TableRowSyntax {
  implicit def bigQueryTableRowOps(tr: TableRow): TableRowOps = new TableRowOps(tr)
} 
Example 103
Source File: BigQueryConfig.scala    From scio   with Apache License 2.0 5 votes vote down vote up
package com.spotify.scio.bigquery.client

import java.nio.file.{Path, Paths}

import com.google.api.services.bigquery.BigqueryScopes
import com.spotify.scio.CoreSysProps
import com.spotify.scio.bigquery.BigQuerySysProps
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.TypedRead.QueryPriority

import scala.util.Try

object BigQueryConfig {

  
  private[this] val PriorityDefault: QueryPriority = QueryPriority.BATCH

  private[this] val DefaultScopes = List(BigqueryScopes.BIGQUERY)

  private[this] val DefaultLocation = "US"

  def location: String = DefaultLocation

  def scopes: Seq[String] = DefaultScopes

  def isCacheEnabled: Boolean =
    BigQuerySysProps.CacheEnabled.valueOption
      .flatMap(x => Try(x.toBoolean).toOption)
      .getOrElse(CacheEnabledDefault)

  def cacheDirectory: Path =
    BigQuerySysProps.CacheDirectory.valueOption.map(Paths.get(_)).getOrElse(CacheDirectoryDefault)

  def connectTimeoutMs: Option[Int] =
    BigQuerySysProps.ConnectTimeoutMs.valueOption.map(_.toInt)

  def readTimeoutMs: Option[Int] =
    BigQuerySysProps.ReadTimeoutMs.valueOption.map(_.toInt)

  def priority: QueryPriority = {
    lazy val isCompilingOrTesting = Thread
      .currentThread()
      .getStackTrace
      .exists { e =>
        e.getClassName.startsWith("scala.tools.nsc.interpreter.") ||
        e.getClassName.startsWith("org.scalatest.tools.")
      }

    BigQuerySysProps.Priority.valueOption.map(_.toUpperCase) match {
      case Some("INTERACTIVE")       => QueryPriority.INTERACTIVE
      case Some("BATCH")             => QueryPriority.BATCH
      case _ if isCompilingOrTesting => QueryPriority.INTERACTIVE
      case _                         => PriorityDefault
    }
  }
} 
Example 104
Source File: Cache.scala    From scio   with Apache License 2.0 5 votes vote down vote up
package com.spotify.scio.bigquery.client

import java.io.File

import com.google.api.services.bigquery.model.{TableReference, TableSchema}
import com.spotify.scio.bigquery.BigQueryUtil
import org.apache.beam.sdk.io.gcp.{bigquery => bq}
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Charsets
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.hash.Hashing
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.io.Files

import scala.util.Try
import org.apache.avro.Schema

private[client] object Cache {
  sealed trait Show[T] {
    def show(t: T): String
  }

  object Show {
    @inline final def apply[T](implicit t: Show[T]): Show[T] = t

    implicit val showTableSchema: Show[TableSchema] = new Show[TableSchema] {
      override def show(t: TableSchema): String = t.toPrettyString()
    }

    implicit val showTableRef: Show[TableReference] = new Show[TableReference] {
      override def show(table: TableReference): String =
        bq.BigQueryHelpers.toTableSpec(table)
    }

    implicit val showAvroSchema: Show[Schema] = new Show[Schema] {
      override def show(t: Schema): String = t.toString()
    }
  }

  sealed trait Read[T] {
    def read(s: String): Option[T]
  }

  object Read {
    @inline final def apply[T](implicit t: Read[T]): Read[T] = t

    implicit val readTableSchema: Read[TableSchema] = new Read[TableSchema] {
      override def read(s: String): Option[TableSchema] =
        Try(BigQueryUtil.parseSchema(s)).toOption
    }

    implicit val readTableRef: Read[TableReference] = new Read[TableReference] {
      override def read(table: String): Option[TableReference] =
        Try(bq.BigQueryHelpers.parseTableSpec(table)).toOption
    }

    implicit val readAvroSchema: Read[Schema] = new Read[Schema] {
      override def read(s: String): Option[Schema] =
        Try {
          new Schema.Parser().parse(s)
        }.toOption
    }
  }

  private[this] def isCacheEnabled: Boolean = BigQueryConfig.isCacheEnabled

  def getOrElse[T: Read: Show](key: String, f: String => File)(method: => T): T =
    if (isCacheEnabled) {
      get(key, f) match {
        case Some(schema) => schema
        case None =>
          val schema = method
          set(key, schema, f)
          schema
      }
    } else {
      method
    }

  def set[T: Show](key: String, t: T, f: String => File): Unit =
    Files
      .asCharSink(f(key), Charsets.UTF_8)
      .write(Show[T].show(t))

  def get[T: Read](key: String, f: String => File): Option[T] =
    Try(scala.io.Source.fromFile(f(key)).mkString).toOption.flatMap(Read[T].read)

  val SchemaCache: String => File = key => cacheFile(key, ".schema.json")

  val TableCache: String => File = key => cacheFile(key, ".table.txt")

  private[this] def cacheFile(key: String, suffix: String): File = {
    val cacheDir = BigQueryConfig.cacheDirectory
    val filename = Hashing.murmur3_128().hashString(key, Charsets.UTF_8).toString + suffix
    val cacheFile = cacheDir.resolve(filename).toFile()
    Files.createParentDirs(cacheFile)
    cacheFile
  }
} 
Example 105
Source File: ScalaAsyncLookupDoFn.scala    From scio   with Apache License 2.0 5 votes vote down vote up
package com.spotify.scio.transforms

import com.spotify.scio.transforms.BaseAsyncLookupDoFn.{CacheSupplier, NoOpCacheSupplier}

import scala.concurrent.Future
import scala.util.{Failure, Success, Try}


abstract class ScalaAsyncLookupDoFn[A, B, C](
  maxPendingRequests: Int,
  cacheSupplier: CacheSupplier[A, B, _]
) extends BaseAsyncLookupDoFn[A, B, C, Future[B], Try[B]](maxPendingRequests, cacheSupplier)
    with ScalaFutureHandlers[B] {
  def this() {
    this(1000, new NoOpCacheSupplier[A, B])
  }

  def this(maxPendingRequests: Int) {
    this(maxPendingRequests, new NoOpCacheSupplier[A, B])
  }

  override def success(output: B): Try[B] = Success(output)
  override def failure(throwable: Throwable): Try[B] = Failure(throwable)
} 
Example 106
Source File: PubSubAdmin.scala    From scio   with Apache License 2.0 5 votes vote down vote up
package com.spotify.scio.pubsub

import com.google.pubsub.v1.PublisherGrpc.PublisherBlockingStub
import com.google.pubsub.v1.SubscriberGrpc.SubscriberBlockingStub
import com.google.pubsub.v1.{
  GetSubscriptionRequest,
  GetTopicRequest,
  PublisherGrpc,
  SubscriberGrpc,
  Subscription,
  Topic
}
import io.grpc.ManagedChannel
import io.grpc.auth.MoreCallCredentials
import io.grpc.netty.{GrpcSslContexts, NegotiationType, NettyChannelBuilder}
import org.apache.beam.sdk.io.gcp.pubsub.PubsubOptions

import scala.util.Try

object PubSubAdmin {
  private object GrpcClient {
    private def newChannel: ManagedChannel =
      NettyChannelBuilder
        .forAddress("pubsub.googleapis.com", 443)
        .negotiationType(NegotiationType.TLS)
        .sslContext(GrpcSslContexts.forClient.ciphers(null).build)
        .build

    def subscriber[A](pubsubOptions: PubsubOptions)(f: SubscriberBlockingStub => A): Try[A] = {
      val channel = newChannel
      val client = SubscriberGrpc
        .newBlockingStub(channel)
        .withCallCredentials(MoreCallCredentials.from(pubsubOptions.getGcpCredential))

      val result = Try(f(client))
      channel.shutdownNow()
      result
    }

    def publisher[A](pubsubOptions: PubsubOptions)(f: PublisherBlockingStub => A): Try[A] = {
      val channel = newChannel
      val client = PublisherGrpc
        .newBlockingStub(channel)
        .withCallCredentials(MoreCallCredentials.from(pubsubOptions.getGcpCredential))

      val result = Try(f(client))
      channel.shutdownNow()
      result
    }
  }

  
  def subscription(pubsubOptions: PubsubOptions, name: String): Try[Subscription] =
    GrpcClient.subscriber(pubsubOptions) { client =>
      val subRequest = GetSubscriptionRequest.newBuilder().setSubscription(name).build()
      client.getSubscription(subRequest)
    }
} 
Example 107
Source File: ScioUtil.scala    From scio   with Apache License 2.0 5 votes vote down vote up
package com.spotify.scio.util

import java.net.URI
import java.util.UUID

import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.spotify.scio.ScioContext
import org.apache.beam.sdk.extensions.gcp.options.GcpOptions
import org.apache.beam.sdk.extensions.gcp.util.Transport
import org.apache.beam.sdk.{PipelineResult, PipelineRunner}
import org.slf4j.LoggerFactory

import scala.reflect.ClassTag
import scala.util.{Failure, Success, Try}

private[scio] object ScioUtil {
  @transient private lazy val log = LoggerFactory.getLogger(this.getClass)
  @transient lazy val jsonFactory = Transport.getJsonFactory

  def isLocalUri(uri: URI): Boolean =
    uri.getScheme == null || uri.getScheme == "file"

  def isRemoteUri(uri: URI): Boolean = !isLocalUri(uri)

  def isLocalRunner(runner: Class[_ <: PipelineRunner[_ <: PipelineResult]]): Boolean = {
    require(runner != null, "Pipeline runner not set!")
    // FIXME: cover Flink, Spark, etc. in local mode
    runner.getName == "org.apache.beam.runners.direct.DirectRunner"
  }

  def isRemoteRunner(runner: Class[_ <: PipelineRunner[_ <: PipelineResult]]): Boolean =
    !isLocalRunner(runner)

  def classOf[T: ClassTag]: Class[T] =
    implicitly[ClassTag[T]].runtimeClass.asInstanceOf[Class[T]]

  def getScalaJsonMapper: ObjectMapper =
    new ObjectMapper().registerModule(DefaultScalaModule)

  def addPartSuffix(path: String, ext: String = ""): String =
    if (path.endsWith("/")) s"${path}part-*$ext" else s"$path/part-*$ext"

  def getTempFile(context: ScioContext, fileOrPath: String = null): String = {
    val fop = Option(fileOrPath).getOrElse("scio-materialize-" + UUID.randomUUID().toString)
    val uri = URI.create(fop)
    if ((ScioUtil.isLocalUri(uri) && uri.toString.startsWith("/")) || uri.isAbsolute) {
      fop
    } else {
      val filename = fop
      val tmpDir = if (context.options.getTempLocation != null) {
        context.options.getTempLocation
      } else {
        val m =
          "Specify a temporary location via --tempLocation or PipelineOptions.setTempLocation."
        Try(context.optionsAs[GcpOptions].getGcpTempLocation) match {
          case Success(l) =>
            log.warn(
              "Using GCP temporary location as a temporary location to materialize data. " + m
            )
            l
          case Failure(_) =>
            throw new IllegalArgumentException("No temporary location was specified. " + m)
        }
      }
      tmpDir + (if (tmpDir.endsWith("/")) "" else "/") + filename
    }
  }

  def pathWithShards(path: String): String =
    path.replaceAll("\\/+$", "") + "/part"
} 
Example 108
Source File: AvroSerializer.scala    From scio   with Apache License 2.0 5 votes vote down vote up
package com.spotify.scio.coders.instances.kryo

import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.io.{Input, Output}
import com.twitter.chill.KSerializer
import org.apache.avro.Schema
import org.apache.avro.generic.GenericRecord
import org.apache.avro.specific.SpecificRecordBase
import org.apache.beam.sdk.coders.AvroCoder

import scala.collection.mutable.{Map => MMap}
import scala.util.Try

private[coders] class GenericAvroSerializer extends KSerializer[GenericRecord] {
  private lazy val cache: MMap[String, AvroCoder[GenericRecord]] = MMap()

  private def getCoder(schemaStr: String): AvroCoder[GenericRecord] =
    cache.getOrElseUpdate(schemaStr, AvroCoder.of(new Schema.Parser().parse(schemaStr)))
  private def getCoder(schemaStr: String, schema: Schema): AvroCoder[GenericRecord] =
    cache.getOrElseUpdate(schemaStr, AvroCoder.of(schema))

  override def write(kryo: Kryo, out: Output, obj: GenericRecord): Unit = {
    val schemaStr = obj.getSchema.toString
    val coder = this.getCoder(schemaStr, obj.getSchema)
    // write schema before every record in case it's not in reader serializer's cache
    out.writeString(schemaStr)
    coder.encode(obj, out)
  }

  override def read(kryo: Kryo, in: Input, cls: Class[GenericRecord]): GenericRecord = {
    val coder = this.getCoder(in.readString())
    coder.decode(in)
  }
}

private[coders] class SpecificAvroSerializer[T <: SpecificRecordBase] extends KSerializer[T] {
  private lazy val cache: MMap[Class[T], AvroCoder[T]] = MMap()

  private def getCoder(cls: Class[T]): AvroCoder[T] =
    cache.getOrElseUpdate(
      cls,
      Try(cls.getConstructor().newInstance().getSchema)
        .map(AvroCoder.of(cls, _))
        .getOrElse(AvroCoder.of(cls))
    )

  override def write(kser: Kryo, out: Output, obj: T): Unit =
    this.getCoder(obj.getClass.asInstanceOf[Class[T]]).encode(obj, out)

  override def read(kser: Kryo, in: Input, cls: Class[T]): T =
    this.getCoder(cls).decode(in)
} 
Example 109
Source File: Pretty.scala    From scio   with Apache License 2.0 5 votes vote down vote up
package com.spotify.scio.testing

import org.apache.avro.generic.GenericRecord
import org.apache.avro.specific.SpecificRecordBase
import scala.jdk.CollectionConverters._
import com.spotify.scio.{registerSysProps, SysProp}
import scala.util.Try

@registerSysProps
object PrettySysProps {
  val PrettyPrint =
    SysProp("tests.prettyprint.colors", "Should pretty printed values be rendered with colors")
}

object Pretty {
  import pprint.Tree
  import fansi.{Color, Str}

  private def renderFieldName(n: String) =
    Tree.Lazy(ctx => List(Color.LightBlue(n).toString).iterator)

  private def renderGenericRecord: PartialFunction[GenericRecord, Tree] = {
    case g =>
      val renderer =
        new pprint.Renderer(
          printer.defaultWidth,
          printer.colorApplyPrefix,
          printer.colorLiteral,
          printer.defaultIndent
        )
      def render(tree: Tree): Str =
        Str.join(renderer.rec(tree, 0, 0).iter.toSeq: _*)
      Tree.Lazy { ctx =>
        val fields =
          for {
            f <- g.getSchema().getFields().asScala
          } yield Str.join(
            render(renderFieldName(f.name)),
            ": ",
            render(treeifyAvro(g.get(f.name())))
          )
        List(
          Color.LightGray("{ ").toString +
            fields.reduce((a, b) => Str.join(a, ", ", b)) +
            Color.LightGray(" }")
        ).iterator
      }
  }

  private def renderSpecificRecord: PartialFunction[SpecificRecordBase, Tree] = {
    case x =>
      val fs =
        for {
          f <- x.getSchema().getFields().asScala
        } yield Tree.Infix(renderFieldName(f.name), "=", treeifyAvro(x.get(f.name())))
      Tree.Apply(x.getClass().getSimpleName(), fs.iterator)
  }

  private def treeifyAvro: PartialFunction[Any, Tree] = {
    case x: SpecificRecordBase =>
      renderSpecificRecord(x)
    case g: GenericRecord =>
      renderGenericRecord(g)
    case x =>
      printer.treeify(x)
  }

  private val handlers: PartialFunction[Any, Tree] = {
    case x: GenericRecord => treeifyAvro(x)
  }

  private val useColors =
    PrettySysProps.PrettyPrint.valueOption
      .flatMap(x => Try(x.toBoolean).toOption)
      .getOrElse {
        // Crude test to check if the terminal seems to support colors
        (System.console() != null) && (System.getenv().get("TERM") != null)
      }

  val printer =
    if (useColors) {
      pprint.PPrinter(
        additionalHandlers = handlers
      )
    } else {
      pprint.PPrinter(
        additionalHandlers = handlers,
        colorLiteral = fansi.Attrs.Empty,
        colorApplyPrefix = fansi.Attrs.Empty
      )
    }
} 
Example 110
Source File: JodaSerializerTest.scala    From scio   with Apache License 2.0 5 votes vote down vote up
package com.spotify.scio.coders.instances.kryo

import com.spotify.scio.coders.{CoderTestUtils, KryoAtomicCoder, KryoOptions}
import org.joda.time.{DateTime, DateTimeZone, LocalDate, LocalDateTime, LocalTime}
import org.scalacheck._
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatestplus.scalacheck.Checkers

import scala.jdk.CollectionConverters._
import scala.util.Try

class JodaSerializerTest extends AnyFlatSpec with Checkers {
  // TODO: remove this once https://github.com/scalatest/scalatest/issues/1090 is addressed
  implicit override val generatorDrivenConfig: PropertyCheckConfiguration =
    PropertyCheckConfiguration(minSuccessful = 100)

  implicit val dateTimeArb = Arbitrary {
    for {
      year <- Gen.choose(-292275054, 292278993)
      month <- Gen.choose(1, 12)
      maxDayOfMonth <- Try {
        Gen.const(new LocalDateTime(year, month, 1, 0, 0).dayOfMonth().getMaximumValue)
      }.getOrElse(Gen.fail)
      day <- Gen.choose(1, maxDayOfMonth)
      hour <- Gen.choose(0, 23)
      minute <- Gen.choose(0, 59)
      second <- Gen.choose(0, 59)
      ms <- Gen.choose(0, 999)
      tz <- Gen.oneOf(DateTimeZone.getAvailableIDs.asScala.toSeq)
      attempt <- Try {
        val ldt = new DateTime(year, month, day, hour, minute, second, ms, DateTimeZone.forID(tz))
        Gen.const(ldt)
      }.getOrElse(Gen.fail)
    } yield attempt
  }

  implicit val localDateTimeArb = Arbitrary {
    Arbitrary.arbitrary[DateTime].map(_.toLocalDateTime)
  }

  implicit val localTimeArb = Arbitrary {
    Arbitrary.arbitrary[LocalDateTime].map(_.toLocalTime)
  }

  implicit val localDateArb = Arbitrary {
    Arbitrary.arbitrary[LocalDateTime].map(_.toLocalDate)
  }

  val coder = new KryoAtomicCoder[Any](KryoOptions())

  def roundTripProp[T](value: T): Prop = Prop.secure {
    CoderTestUtils.testRoundTrip(coder, value)
  }

  "KryoAtomicCoder" should "roundtrip LocalDate" in {
    check(roundTripProp[LocalDate] _)
  }

  it should "roundtrip LocalTime" in {
    check(roundTripProp[LocalTime] _)
  }

  it should "roundtrip LocalDateTime" in {
    check(roundTripProp[LocalDateTime] _)
  }

  it should "roundtrip DateTime" in {
    check(roundTripProp[DateTime] _)
  }
} 
Example 111
Source File: YamlFile.scala    From sope   with Apache License 2.0 5 votes vote down vote up
package com.sope.etl.yaml

import com.fasterxml.jackson.databind.JsonMappingException
import com.sope.etl.transform.exception.YamlDataTransformException
import com.sope.etl.transform.model.Failed
import com.sope.etl.utils.RedactUtil
import com.sope.etl.yaml.YamlParserUtil._
import com.sope.utils.Logging

import scala.util.{Failure, Success, Try}


  def deserialize: T = Try {
    val yamlStr = text
    logInfo(s"Parsing $getYamlFileName YAML file :-\n $redactedText")
    parseYAML(yamlStr, modelClass)
  } match {
    case Success(t) => logInfo(s"Successfully parsed $getYamlFileName YAML File"); t
    case Failure(e) => e match {
      case e: JsonMappingException =>
        Option(e.getLocation) match {
          case Some(location) =>
            val errorMessage = getParseErrorMessage(location.getLineNr, location.getColumnNr)
            logError(errorMessage + s"\n${e.getMessage}")
          case None => e.getCause match {
            case YamlDataTransformException(_, failures) =>
              failures.foreach {
                case Failed(msg, line, index) =>
                  val errorMessage = getParseErrorMessage(line, index)
                  logError(errorMessage + s"\n$msg")
              }
            case _ =>
          }
        }
        throw e
      case _ => throw e
    }
  }

} 
Example 112
Source File: CommonClient.scala    From twitter4s   with Apache License 2.0 5 votes vote down vote up
package com.danielasfregola.twitter4s.http.clients

import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.{HttpEntity, HttpRequest, HttpResponse}
import akka.stream.Materializer
import com.danielasfregola.twitter4s.exceptions.{Errors, TwitterException}
import com.danielasfregola.twitter4s.http.serializers.JsonSupport
import com.typesafe.scalalogging.LazyLogging
import org.json4s.native.Serialization

import scala.concurrent.Future
import scala.concurrent.duration._
import scala.util.Try

private[twitter4s] trait CommonClient extends JsonSupport with LazyLogging {

  def withLogRequest: Boolean
  def withLogRequestResponse: Boolean

  protected def connection(implicit request: HttpRequest, system: ActorSystem) = {
    val scheme = request.uri.scheme
    val host = request.uri.authority.host.toString
    val port = request.uri.effectivePort
    if (scheme == "https") Http().outgoingConnectionHttps(host, port)
    else Http().outgoingConnection(host, port)
  }

  protected def unmarshal[T](requestStartTime: Long, f: HttpResponse => Future[T])(implicit request: HttpRequest,
                                                                                   response: HttpResponse,
                                                                                   materializer: Materializer) = {
    implicit val ec = materializer.executionContext
    if (withLogRequestResponse) logRequestResponse(requestStartTime)

    if (response.status.isSuccess) f(response)
    else parseFailedResponse(response).flatMap(Future.failed)
  }

  protected def parseFailedResponse(response: HttpResponse)(implicit materializer: Materializer) = {
    implicit val ec = materializer.executionContext
    response.entity.toStrict(50 seconds).map { sink =>
      val body = sink.data.utf8String
      val errors = Try {
        Serialization.read[Errors](body)
      } getOrElse Errors(body)
      TwitterException(response.status, errors)
    }
  }

  // TODO - logRequest, logRequestResponse customisable?
  def logRequest(implicit request: HttpRequest, materializer: Materializer): HttpRequest = {
    implicit val ec = materializer.executionContext
    logger.info(s"${request.method.value} ${request.uri}")
    if (logger.underlying.isDebugEnabled) {
      for {
        requestBody <- toBody(request.entity)
      } yield logger.debug(s"${request.method.value} ${request.uri} | $requestBody")
    }
    request
  }

  def logRequestResponse(requestStartTime: Long)(implicit request: HttpRequest,
                                                 materializer: Materializer): HttpResponse => HttpResponse = {
    response =>
      implicit val ec = materializer.executionContext
      val elapsed = System.currentTimeMillis - requestStartTime
      logger.info(s"${request.method.value} ${request.uri} (${response.status}) | ${elapsed}ms")
      if (logger.underlying.isDebugEnabled) {
        for {
          responseBody <- toBody(response.entity)
        } yield
          logger.debug(
            s"${request.method.value} ${request.uri} (${response.status}) | ${response.headers.mkString(", ")} | $responseBody")
      }
      response
  }

  private def toBody(entity: HttpEntity)(implicit materializer: Materializer): Future[String] = {
    implicit val ec = materializer.executionContext
    entity.toStrict(5 seconds).map(_.data.decodeString("UTF-8"))
  }
} 
Example 113
Source File: Examples.scala    From scala-tutorials   with MIT License 5 votes vote down vote up
package com.baeldung.scala.exceptionhandling

import scala.util.{Try, Success, Failure}
import scala.util.control.Exception._

object Examples {
  import CalculatorExceptions._
  def tryCatch(a: Int, b: Int): Int = {
    try {
      return Calculator.sum(a, b)
      // println(s"${a} + ${b} = ${result}")
    } catch {
      case e: IntOverflowException    => -1
      case e: NegativeNumberException => -2
    } finally {
      // This block will always be invoked
      println("Calculation done!")
    }
  }

  def trySuccessFailure(a: Int, b: Int): Try[Int] = Try {
    Calculator.sum(a, b)
  }

  def catchObjects(a: Int, b: Int): Try[Int] = allCatch.withTry {
    Calculator.sum(a, b)
  }

  val myCustomCatcher = catching(classOf[NegativeNumberException])

  def customCatchObjects(a: Int, b: Int): Try[Int] = myCustomCatcher.withTry {
    Calculator.sum(a, b)
  }
} 
Example 114
Source File: VectorEndpoint.scala    From spark-vector   with Apache License 2.0 5 votes vote down vote up
package com.actian.spark_vector.datastream

import scala.util.Try

import com.actian.spark_vector.util.Logging
import com.actian.spark_vector.vector.VectorJDBC


  def fromDataStreamsTable(cxn: VectorJDBC): IndexedSeq[VectorEndpoint] = {
    logDebug(s"Running sql query ${getVectorEndPointSql} to get the datastream endpoints' info.")
    val resultSet = cxn.query(getVectorEndPointSql)
    val ret = resultSet
      .map(VectorEndpoint(_, cxn.getIngresHost))
      .flatten
    logDebug(s"Got the following VectorEndPoints from the datastreams table: ${ret.map(_.toString).mkString(",")}")
    ret.toIndexedSeq
  }
} 
Example 115
Source File: DataGens.scala    From spark-vector   with Apache License 2.0 5 votes vote down vote up
package com.actian.spark_vector

import java.math.BigDecimal
import java.{ sql => jsql }
import java.util.Calendar

import scala.collection.Seq
import scala.util.Try

import org.apache.spark.sql.Row
import org.apache.spark.sql.types._
import org.scalacheck.Gen

import com.actian.spark_vector.colbuffer.util.MillisecondsInDay
import java.math.RoundingMode

object DataGens {
  import com.actian.spark_vector.DataTypeGens._
  import org.scalacheck.Arbitrary._
  import org.scalacheck.Gen._
  import scala.collection.JavaConverters._

  val DefaultMaxRows = 500

  val booleanGen: Gen[Boolean] = arbitrary[Boolean]

  val byteGen: Gen[Byte] = arbitrary[Byte]

  val shortGen: Gen[Short] = arbitrary[Short]

  val intGen: Gen[Int] = arbitrary[Int]

  val longGen: Gen[Long] = arbitrary[Long]

  // FIXME allow arbitrary doubles (and filter externally for vector tests)
  val floatGen: Gen[Float] = arbitrary[Float].map(f => if (f.abs > 1e-38) f else 0.0f)

  // FIXME allow arbitrary doubles (and filter externally for vector tests)
  val doubleGen: Gen[Double] = for {
    neg <- arbitrary[Boolean]
    digits <- listOfN(12, choose(0, 9))
  } yield s"${if (neg) "-" else ""}1.${digits.mkString("")}".toDouble

  val decimalGen: Gen[BigDecimal] = arbitrary[scala.BigDecimal].retryUntil(bd =>
    bd.scale <= 12 && bd.scale >= 0 && bd.precision <= 26 &&
    Try { new BigDecimal(bd.toString) }.isSuccess).map(bd => new BigDecimal(bd.toString))

  private val dateValueGen: Gen[Long] =
    choose(-3600L * 1000 * 24 * 100000L, 3600L * 1000 * 24 * 100000L)

  // @note normalize getTime so that we don't have diffs more than 1 day in between our {JDBC,Spark}results
  val dateGen: Gen[jsql.Date] = dateValueGen.map(d => new jsql.Date(d / MillisecondsInDay * MillisecondsInDay))

  val timestampGen: Gen[jsql.Timestamp] = for (ms <- dateValueGen) yield new jsql.Timestamp(ms)

  // FIXME allow empty strings (and filter externally for vector tests)
  // @note we do not allow invalid UTF8 chars to be generated (from D800 to DFFF incl)
  val stringGen: Gen[String] =
    listOfN(choose(1, 512).sample.getOrElse(1), arbitrary[Char]).map(_.mkString).map( s => s.filter(c => Character.isDefined(c) && c != '\u0000' && (c < '\uD800' || c > '\uDFFF')) )

  def valueGen(dataType: DataType): Gen[Any] = dataType match {
    case BooleanType => booleanGen
    case ByteType => byteGen
    case ShortType => shortGen
    case IntegerType => intGen
    case LongType => longGen
    case FloatType => floatGen
    case DoubleType => doubleGen
    case TimestampType => timestampGen
    case DateType => dateGen
    case StringType => stringGen
    case _: DecimalType => decimalGen
    case _ => throw new Exception("Invalid data type.")
  }

  def nullableValueGen(field: StructField): Gen[Any] = {
    val gen = valueGen(field.dataType)
    if (field.nullable) frequency(1 -> gen, 10 -> const(null)) else gen
  }

  def rowGen(schema: StructType): Gen[Row] =
    sequence(schema.fields.map(f => nullableValueGen(f))).map(l => Row.fromSeq(l.asScala)) // TODO Huh? Why ju.ArrayList?!?

  def dataGenFor(schema: StructType, maxRows: Int): Gen[Seq[Row]] = for {
    numRows <- choose(1, maxRows)
    rows <- listOfN(numRows, rowGen(schema))
  } yield rows

  case class TypedData(dataType: StructType, data: Seq[Row])

  val dataGen: Gen[TypedData] = for {
    schema <- schemaGen
    data <- dataGenFor(schema, DefaultMaxRows)
  } yield TypedData(schema, data)
  
  val allDataGen: Gen[TypedData] = for {
    schema <- allTypesSchemaGen
    data <- dataGenFor(schema, DefaultMaxRows)
  } yield TypedData(schema, data)
  
} 
Example 116
Source File: TwitterSinkConnector.scala    From kafka-connect-twitter   with Apache License 2.0 5 votes vote down vote up
package com.eneco.trading.kafka.connect.twitter

import java.util

import org.apache.kafka.connect.connector.Task
import org.apache.kafka.connect.errors.ConnectException
import org.apache.kafka.connect.sink.SinkConnector

import scala.collection.JavaConverters._
import scala.util.{Failure, Try}

class TwitterSinkConnector extends SinkConnector with Logging {
  private var configProps : util.Map[String, String] = null

  
  override def start(props: util.Map[String, String]): Unit = {
    log.info(s"Starting Twitter sink task with ${props.toString}.")
    configProps = props
    Try(new TwitterSinkConfig(props)) match {
      case Failure(f) => throw new ConnectException("Couldn't start TwitterSinkConnector due to configuration error.", f)
      case _ =>
    }
  }

  override def stop(): Unit = {}
  override def version(): String = ""
} 
Example 117
Source File: TwitterSourceConnector.scala    From kafka-connect-twitter   with Apache License 2.0 5 votes vote down vote up
package com.eneco.trading.kafka.connect.twitter

import java.util
import org.apache.kafka.connect.connector.{Task, Connector}
import org.apache.kafka.connect.errors.ConnectException
import scala.collection.JavaConverters._
import scala.util.{Failure, Try}


  override def start(props: util.Map[String, String]): Unit = {
    log.info(s"Starting Twitter source task with ${props.toString}.")
    configProps = props
    Try(new TwitterSourceConfig(props)) match {
      case Failure(f) => throw new ConnectException("Couldn't start Twitter source due to configuration error: "
          + f.getMessage, f)
      case _ =>
    }
  }

  override def stop() = {}
  override def version(): String = ""
} 
Example 118
Source File: TestSinkTask.scala    From kafka-connect-twitter   with Apache License 2.0 5 votes vote down vote up
package com.eneco.trading.kafka.connect.twitter

import org.apache.kafka.connect.sink.SinkRecord
import scala.collection.JavaConverters._
import scala.util.{Success, Try}

class TestSinkTask extends TestTwitterBase {
  test("Strings put to to Task are tweeted") {
    val sinkTask = new TwitterSinkTask()
    val myTestTweet = "I tweet, ergo sum."
    sinkTask.writer = Some(new SimpleTwitterWriter {
      //TODO: use DI?
      def updateStatus(s: String): Try[Long] = {
        s shouldEqual myTestTweet
        Success(5)
      }
    })
    val sr = new SinkRecord("topic", 5, null, null, null, myTestTweet, 123)
    sinkTask.put(Seq(sr).asJava)
  }

} 
Example 119
Source File: TapirJsonJsoniter.scala    From tapir   with Apache License 2.0 5 votes vote down vote up
package sttp.tapir.json.jsoniter

import com.github.plokhotnyuk.jsoniter_scala.core.JsonValueCodec
import sttp.tapir.Codec.JsonCodec
import sttp.tapir.DecodeResult.{Error, Value}
import sttp.tapir.{EndpointIO, Schema, Validator, anyFromUtf8StringBody}
import com.github.plokhotnyuk.jsoniter_scala.core._

import scala.util.{Failure, Success, Try}

trait TapirJsonJsoniter {
  def jsonBody[T: JsonValueCodec: Schema: Validator]: EndpointIO.Body[String, T] = anyFromUtf8StringBody(jsoniterCodec[T])

  implicit def jsoniterCodec[T: JsonValueCodec: Schema: Validator]: JsonCodec[T] =
    sttp.tapir.Codec.json { s =>
      Try(readFromString[T](s)) match {
        case Failure(error) => Error(s, error)
        case Success(v)     => Value(v)
      }
    } { t => writeToString[T](t) }
} 
Example 120
Source File: TapirJsonuPickle.scala    From tapir   with Apache License 2.0 5 votes vote down vote up
package sttp.tapir.json.upickle

import scala.util.{Try, Success, Failure}
import sttp.tapir._
import sttp.tapir.Codec.JsonCodec
import sttp.tapir.DecodeResult.{Error, Value}
import upickle.default.{ReadWriter, read, write}

trait TapirJsonuPickle {

  def jsonBody[T: ReadWriter: Schema: Validator]: EndpointIO.Body[String, T] = anyFromUtf8StringBody(readWriterCodec[T])

  implicit def readWriterCodec[T: ReadWriter: Schema: Validator]: JsonCodec[T] =
    Codec.json[T] { s =>
      Try(read[T](s)) match {
        case Success(v) => Value(v)
        case Failure(e) => Error("upickle decoder failed", e)
      }
    } { t => write(t) }
} 
Example 121
Source File: TapirJsonSpray.scala    From tapir   with Apache License 2.0 5 votes vote down vote up
package sttp.tapir.json.spray

import spray.json._
import sttp.tapir.Codec.JsonCodec
import sttp.tapir.DecodeResult.{Error, Value}
import sttp.tapir.SchemaType._
import sttp.tapir._

import scala.util.{Failure, Success, Try}

trait TapirJsonSpray {
  def jsonBody[T: JsonFormat: Schema: Validator]: EndpointIO.Body[String, T] = anyFromUtf8StringBody(jsonFormatCodec[T])

  implicit def jsonFormatCodec[T: JsonFormat: Schema: Validator]: JsonCodec[T] =
    Codec.json { s =>
      Try(s.parseJson.convertTo[T]) match {
        case Success(v) => Value(v)
        case Failure(e) => Error("spray json decoder failed", e)
      }
    } { t => t.toJson.toString() }

  implicit val schemaForSprayJsValue: Schema[JsValue] = Schema(
    SProduct(
      SObjectInfo("spray.json.JsValue"),
      List.empty
    )
  )
} 
Example 122
Source File: MonadError.scala    From tapir   with Apache License 2.0 5 votes vote down vote up
package sttp.tapir.monad

import scala.concurrent.{ExecutionContext, Future}
import scala.language.higherKinds
import scala.reflect.ClassTag
import scala.util.{Failure, Success, Try}

trait MonadError[F[_]] {
  def unit[T](t: T): F[T]
  def map[T, T2](fa: F[T])(f: T => T2): F[T2]
  def flatMap[T, T2](fa: F[T])(f: T => F[T2]): F[T2]

  def error[T](t: Throwable): F[T]
  protected def handleWrappedError[T](rt: F[T])(h: PartialFunction[Throwable, F[T]]): F[T]
  def handleError[T](rt: => F[T])(h: PartialFunction[Throwable, F[T]]): F[T] = {
    Try(rt) match {
      case Success(v)                     => handleWrappedError(v)(h)
      case Failure(e) if h.isDefinedAt(e) => h(e)
      case Failure(e)                     => error(e)
    }
  }
}

object MonadError {
  def recoverErrors[I, E, O, F[_]](
      f: I => F[O]
  )(implicit eClassTag: ClassTag[E], eIsThrowable: E <:< Throwable): MonadError[F] => I => F[Either[E, O]] = { implicit monad => i =>
    import sttp.tapir.monad.syntax._
    monad.handleError(f(i).map(Right(_): Either[E, O])) {
      case e if eClassTag.runtimeClass.isInstance(e) => (Left(e.asInstanceOf[E]): Either[E, O]).unit
    }
  }
}

class FutureMonadError(implicit ec: ExecutionContext) extends MonadError[Future] {
  override def unit[T](t: T): Future[T] = Future.successful(t)
  override def map[T, T2](fa: Future[T])(f: (T) => T2): Future[T2] = fa.map(f)
  override def flatMap[T, T2](fa: Future[T])(f: (T) => Future[T2]): Future[T2] = fa.flatMap(f)
  override def error[T](t: Throwable): Future[T] = Future.failed(t)
  override protected def handleWrappedError[T](rt: Future[T])(h: PartialFunction[Throwable, Future[T]]): Future[T] = rt.recoverWith(h)
} 
Example 123
Source File: EncodeOutputs.scala    From tapir   with Apache License 2.0 5 votes vote down vote up
package sttp.tapir.server.internal

import java.nio.charset.Charset

import sttp.model.{HeaderNames, StatusCode}
import sttp.tapir.internal.{Params, ParamsAsAny, SplitParams}
import sttp.tapir.{CodecFormat, EndpointIO, EndpointOutput, Mapping, RawBodyType, StreamingEndpointIO}

import scala.util.Try

class EncodeOutputs[B](encodeOutputBody: EncodeOutputBody[B]) {
  def apply(output: EndpointOutput[_], value: Params, ov: OutputValues[B]): OutputValues[B] = {
    output match {
      case s: EndpointOutput.Single[_]                => applySingle(s, value, ov)
      case s: EndpointIO.Single[_]                    => applySingle(s, value, ov)
      case EndpointOutput.Pair(left, right, _, split) => applyPair(left, right, split, value, ov)
      case EndpointIO.Pair(left, right, _, split)     => applyPair(left, right, split, value, ov)
      case EndpointOutput.Void()                      => throw new IllegalArgumentException("Cannot encode a void output!")
    }
  }

  private def applyPair(
      left: EndpointOutput[_],
      right: EndpointOutput[_],
      split: SplitParams,
      params: Params,
      ov: OutputValues[B]
  ): OutputValues[B] = {
    val (leftParams, rightParams) = split(params)
    apply(right, rightParams, apply(left, leftParams, ov))
  }

  private def applySingle(output: EndpointOutput.Single[_], value: Params, ov: OutputValues[B]): OutputValues[B] = {
    def encoded[T]: T = output._mapping.asInstanceOf[Mapping[T, Any]].encode(value.asAny)
    output match {
      case EndpointIO.Empty(_, _)                   => ov
      case EndpointOutput.FixedStatusCode(sc, _, _) => ov.withStatusCode(sc)
      case EndpointIO.FixedHeader(header, _, _)     => ov.withHeader(header.name -> header.value)
      case EndpointIO.Body(rawValueType, codec, _)  => ov.withBody(encodeOutputBody.rawValueToBody(encoded, codec.format, rawValueType))
      case EndpointIO.StreamBodyWrapper(StreamingEndpointIO.Body(codec, _, charset)) =>
        ov.withBody(encodeOutputBody.streamValueToBody(encoded, codec.format, charset))
      case EndpointIO.Header(name, _, _) =>
        encoded[List[String]].foldLeft(ov) { case (ovv, headerValue) => ovv.withHeader((name, headerValue)) }
      case EndpointIO.Headers(_, _)           => encoded[List[sttp.model.Header]].foldLeft(ov)((ov2, h) => ov2.withHeader((h.name, h.value)))
      case EndpointIO.MappedPair(wrapped, _)  => apply(wrapped, ParamsAsAny(encoded), ov)
      case EndpointOutput.StatusCode(_, _, _) => ov.withStatusCode(encoded[StatusCode])
      case EndpointOutput.OneOf(mappings, _) =>
        val enc = encoded[Any]
        val mapping = mappings
          .find(mapping => mapping.appliesTo(enc))
          .getOrElse(throw new IllegalArgumentException(s"No status code mapping for value: $enc, in output: $output"))
        apply(mapping.output, ParamsAsAny(enc), mapping.statusCode.map(ov.withStatusCode).getOrElse(ov))
      case EndpointOutput.MappedPair(wrapped, _) => apply(wrapped, ParamsAsAny(encoded), ov)
    }
  }
}

case class OutputValues[B](body: Option[B], headers: Vector[(String, String)], statusCode: Option[StatusCode]) {
  def withBody(b: B): OutputValues[B] = {
    if (body.isDefined) {
      throw new IllegalArgumentException("Body is already defined")
    }

    copy(body = Some(b))
  }

  def withHeader(h: (String, String)): OutputValues[B] = copy(headers = headers :+ h)

  def withStatusCode(sc: StatusCode): OutputValues[B] = copy(statusCode = Some(sc))

  def contentLength: Option[Long] =
    headers
      .collectFirst {
        case (k, v) if HeaderNames.ContentLength.equalsIgnoreCase(k) => v
      }
      .flatMap(v => Try(v.toLong).toOption)
}
object OutputValues {
  def empty[B]: OutputValues[B] = OutputValues[B](None, Vector.empty, None)
}

trait EncodeOutputBody[B] {
  def rawValueToBody(v: Any, format: CodecFormat, bodyType: RawBodyType[_]): B
  def streamValueToBody(v: Any, format: CodecFormat, charset: Option[Charset]): B
} 
Example 124
Source File: HivePartitionFetcher.scala    From flamy   with Apache License 2.0 5 votes vote down vote up
package com.flaminem.flamy.exec.hive

import com.flaminem.flamy.conf.FlamyContext
import com.flaminem.flamy.model._
import com.flaminem.flamy.model.names.{SchemaName, TableName, TablePartitionName}
import com.flaminem.flamy.model.partitions.{PartitionWithInfo, TablePartitioningInfo}
import com.flaminem.flamy.utils.logging.Logging
import org.apache.commons.configuration.ConfigurationException

import scala.util.Try


  def apply(context: FlamyContext): HivePartitionFetcher = {
    val fetcherType = context.HIVE_META_FETCHER_TYPE.getProperty.toLowerCase
    logger.debug(f"Getting new HiveMetaDataFetcher of type $fetcherType")
    fetcherType match {
      case "direct" =>
        new DirectHivePartitionFetcher(context)
      case "client" =>
        new ClientHivePartitionFetcher(context)
      case "default" =>
        Try(new DirectHivePartitionFetcher(context))
        .getOrElse(new ClientHivePartitionFetcher(context))
      case _ => throw new ConfigurationException(f"${context.HIVE_META_FETCHER_TYPE.propertyKey} is not well defined.\n" +
        f"Developers: this error should have been prevented by a Configuration Validator")
    }
  }

} 
Example 125
Source File: ItemName.scala    From flamy   with Apache License 2.0 5 votes vote down vote up
package com.flaminem.flamy.model.names

import com.flaminem.flamy.model.exceptions.FlamyException
import org.rogach.scallop.ValueConverter

import scala.collection.TraversableLike
import scala.collection.generic.CanBuildFrom
import scala.language.{higherKinds, implicitConversions}
import scala.reflect.runtime.universe._
import scala.util.{Failure, Success, Try}


  def tryParse(s: String): Try[ItemName] = {
    Try{
      SchemaName.parse(s)
      .orElse{
        TableName.parse(s)
      }
      .orElse{
        TablePartitionName.parse(s)
      }
      .getOrElse {
        throw new IllegalArgumentException("Wrong item name : " + s)
      }
    }
  }

  def tryParse(s: String, allowedTypes: Set[Class[_]]): Try[ItemName] = {
    tryParse(s).flatMap {
      case item if allowedTypes.contains(item.getClass) => Success(item)
      case item =>
        Failure(
          new IllegalArgumentException(
            s"Item $item is a ${item.getClass.getSimpleName}, " +
            s"but only the following item types are allowed: ${allowedTypes.map{_.getSimpleName}.mkString("[", ", ", "]")}"
          )
        )
    }
  }

  implicit def fromStringTraversableLike[T[Z] <: TraversableLike[Z, T[Z]]]
  (l: T[String])(implicit bf: CanBuildFrom[T[String], ItemName, T[ItemName]]) : T[ItemName] = {
    l.map{tryParse(_).get}
  }

  private def fromArgs(args: Seq[String]): Either[String, Option[List[ItemName]]] = {
    val tries: Seq[Try[ItemName]] = args.map{tryParse}
    if(tries.forall{_.isSuccess}){
      Right(Some(tries.map{_.get}.toList))
    }
    else {
      val firstFailureIndex = tries.indexWhere(_.isFailure)
      Left(s"Could not parse the item name ${args(firstFailureIndex)}")
    }
  }

  implicit val scallopConverterList: ValueConverter[List[ItemName]] = {
    new ValueConverter[List[ItemName]] {
      override def parse(s: List[(String, List[String])]): Either[String, Option[List[ItemName]]] = {
        s match {
          case l if l.nonEmpty => fromArgs(l.flatMap{_._2})
          case Nil => Right(None)
        }
      }
      override val tag: TypeTag[List[ItemName]] = typeTag[List[ItemName]]
      override val argType = org.rogach.scallop.ArgType.LIST
    }
  }

} 
Example 126
Source File: streamDMJob.scala    From streamDM   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.streamdm

import org.apache.spark._
import org.apache.spark.streamdm.tasks.Task
import org.apache.spark.streaming._
import com.github.javacliparser.ClassOption

import scala.util.Try


object streamDMJob {

  def main(args: Array[String]) {

    //configuration and initialization of model
    val conf = new SparkConf().setAppName("streamDM")

    var paramsArgs = args.clone()
    var batchInterval: Int = 1000
    if(args.length > 0){
      val firstArg = args(0)
      if(Try(firstArg.toInt).isSuccess){
        if(firstArg.toInt > 0 && firstArg.toInt < Int.MaxValue){
          batchInterval = firstArg.toInt
        }
        paramsArgs = paramsArgs.drop(1)
      }
    }

    val ssc = new StreamingContext(conf, Milliseconds(batchInterval))

    //run task
    val string = if (paramsArgs.length > 0) paramsArgs.mkString(" ")
    else "EvaluatePrequential"
    val task:Task = ClassOption.cliStringToObject(string, classOf[Task], null)
    task.run(ssc)

    //start the loop
    ssc.start()
    ssc.awaitTermination()
  }
} 
Example 127
Source File: Tar.scala    From libisabelle   with Apache License 2.0 5 votes vote down vote up
package info.hupel.isabelle.setup

import java.net.URL
import java.nio.file._
import java.nio.file.attribute.PosixFilePermissions

import scala.util.Try

import org.apache.commons.compress.archivers.tar.{TarArchiveEntry, TarArchiveInputStream}
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream
import org.apache.commons.lang3.SystemUtils


object Tar {

  val execPermissions = PosixFilePermissions.fromString("rwxr-xr-x")

  def download(url: URL): Try[TarArchiveInputStream] =
    Try(new TarArchiveInputStream(new GzipCompressorInputStream(url.openStream())))

  def extractTo(path: Path, tar: TarArchiveInputStream): Try[Path] = Try {
    def next() = Option(tar.getNextTarEntry())

    @annotation.tailrec
    def go(entry: Option[TarArchiveEntry], paths: List[Path]): List[Path] = entry match {
      case None =>
        paths.reverse
      case Some(entry) =>
        val name = entry.getName
        val subpath = path.resolve(name).normalize

        if (subpath.startsWith(path) && !Files.exists(subpath, LinkOption.NOFOLLOW_LINKS)) {
          Files.createDirectories(subpath.getParent)
          if (entry.isDirectory)
            Files.createDirectory(subpath)
          else if (entry.isSymbolicLink)
            Files.createSymbolicLink(subpath, Paths.get(entry.getLinkName))
          else if (entry.isLink)
            Files.createLink(subpath, path.resolve(Paths.get(entry.getLinkName)))
          else if (entry.isFile) {
            Files.copy(tar, subpath)
            if (!SystemUtils.IS_OS_WINDOWS && (entry.getMode % 2 == 1))
              Files.setPosixFilePermissions(subpath, execPermissions)
          }
          else
            sys.error("unknown tar file entry")
        }
        else
          sys.error("malicious tar file or file already exists")

        val p = if (entry.isDirectory) List(subpath) else Nil

        go(next(), p ::: paths)
    }

    go(next(), Nil).foldLeft(List.empty[Path]) { (roots, path) =>
      if (roots.exists(path.startsWith))
        roots
      else
        path :: roots
    } match {
      case List(root) => root
      case _ => sys.error("untarring created more than one root directory")
    }
  }

} 
Example 128
Source File: FutureCodec.scala    From aws-lambda-scala   with MIT License 5 votes vote down vote up
package io.github.mkotsur.aws.codecs

import java.io.ByteArrayOutputStream
import java.nio.charset.Charset

import io.circe.Encoder
import io.github.mkotsur.aws.handler.CanEncode
import io.github.mkotsur.aws.proxy.ProxyResponse
import io.circe.generic.auto._
import io.circe.syntax._
import cats.syntax.either.catsSyntaxEither

import scala.concurrent.{Await, Future}
import scala.concurrent.duration._
import scala.language.postfixOps
import scala.util.{Failure, Success, Try}

private[aws] trait FutureCodec {
  implicit def canEncodeFuture[I: Encoder](implicit canEncode: Encoder[I]) =
    CanEncode.instance[Future[I]]((os, responseEither, ctx) => {
      (for {
        response     <- responseEither.toTry
        futureResult <- Try(Await.result(response, ctx.getRemainingTimeInMillis millis))
        json         <- Try(canEncode(futureResult).noSpaces.getBytes)
        _            <- Try(os.write(json))
      } yield {
        ()
      }) match {
        case Success(v) => Right(v)
        case Failure(e) => Left(e)
      }
    })

  implicit def canEncodeProxyResponse[T](implicit canEncode: CanEncode[T]) = CanEncode.instance[ProxyResponse[T]](
    (output, proxyResponseEither, ctx) => {

      def writeBody(bodyOption: Option[T]): Either[Throwable, Option[String]] =
        bodyOption match {
          case None => Right(None)
          case Some(body) =>
            val os     = new ByteArrayOutputStream()
            val result = canEncode.writeStream(os, Right(body), ctx)
            os.close()
            result.map(_ => Some(os.toString()))
        }

      val proxyResposeOrError = for {
        proxyResponse <- proxyResponseEither
        bodyOption    <- writeBody(proxyResponse.body)
      } yield
        ProxyResponse[String](
          proxyResponse.statusCode,
          proxyResponse.headers,
          bodyOption
        )

      val response = proxyResposeOrError match {
        case Right(proxyRespose) =>
          proxyRespose
        case Left(e) =>
          ProxyResponse[String](
            500,
            Some(Map("Content-Type" -> s"text/plain; charset=${Charset.defaultCharset().name()}")),
            Some(e.getMessage)
          )
      }

      output.write(response.asJson.noSpaces.getBytes)

      Right(())
    }
  )
} 
Example 129
Source File: Lambda.scala    From aws-lambda-scala   with MIT License 5 votes vote down vote up
package io.github.mkotsur.aws.handler

import java.io.{InputStream, OutputStream}

import com.amazonaws.services.lambda.runtime.{Context, RequestStreamHandler}
import io.circe.generic.auto._
import io.github.mkotsur.aws.codecs._
import io.github.mkotsur.aws.proxy.{ProxyRequest, ProxyResponse}
import org.slf4j.LoggerFactory
import cats.syntax.either.catsSyntaxEither
import io.github.mkotsur.aws.handler.Lambda.HandleResult

import scala.language.{higherKinds, postfixOps}
import scala.util.{Failure, Success, Try}

object Lambda extends AllCodec with ProxyRequestCodec {

  type Handle[I, O]    = (I, Context) => HandleResult[O]
  type HandleResult[O] = Either[Throwable, O]
  type Proxy[I, O]     = Lambda[ProxyRequest[I], ProxyResponse[O]]

  object Proxy {
    type Handle[I, O]    = (ProxyRequest[I], Context) => HandleResult[O]
    type HandleResult[O] = Either[Throwable, ProxyResponse[O]]

    private type CanDecodeProxyRequest[A] = CanDecode[ProxyRequest[A]]
    private type CanEncodeProxyRequest[A] = CanEncode[ProxyResponse[A]]

    def instance[I: CanDecodeProxyRequest, O: CanEncodeProxyRequest](
        doHandle: Proxy.Handle[I, O]): Lambda[ProxyRequest[I], ProxyResponse[O]] =
      new Lambda.Proxy[I, O] {
        override protected def handle(i: ProxyRequest[I], c: Context) = doHandle(i, c)
      }
  }

  def instance[I: CanDecode, O: CanEncode](doHandle: Handle[I, O]) =
    new Lambda[I, O] {
      override protected def handle(i: I, c: Context): Either[Throwable, O] = {
        super.handle(i, c)
        doHandle(i, c)
      }
    }

  type ReadStream[I]       = InputStream => Either[Throwable, I]
  type ObjectHandler[I, O] = I => Either[Throwable, O]
  type WriteStream[O]      = (OutputStream, Either[Throwable, O], Context) => Either[Throwable, Unit]

  private val logger = LoggerFactory.getLogger(getClass)

}

abstract class Lambda[I: CanDecode, O: CanEncode] extends RequestStreamHandler {

  
  final def handle(input: InputStream, output: OutputStream, context: Context): Unit =
    handleRequest(input, output, context)

  // This function will ultimately be used as the external handler
  final def handleRequest(input: InputStream, output: OutputStream, context: Context): Unit = {
    val read = implicitly[CanDecode[I]].readStream(input)
    val handled = read.flatMap { input =>
      Try(handle(input, context)) match {
        case Success(v) => v
        case Failure(e) =>
          Lambda.logger.error(s"Error while executing lambda handler: ${e.getMessage}", e)
          Left(e)
      }
    }
    val written = implicitly[CanEncode[O]].writeStream(output, handled, context)
    output.close()
    written.left.foreach(e => throw e)
  }

} 
Example 130
Source File: Timed.scala    From scala-common   with Apache License 2.0 5 votes vote down vote up
package com.softwaremill.benchmark

import scala.util.{Random, Success, Try}

object Timed {

  def timed[T](b: => T): (T, Long) = {
    val start = System.currentTimeMillis()
    val r     = b
    (r, System.currentTimeMillis() - start)
  }

  private def defaultWarmup(tests: List[PerfTest]): Unit = {
    println("Warmup")
    for (test <- tests) {
      val (result, time) = timed { test.run() }
      println(f"${test.name}%-25s $result%-25s ${time / 1000.0d}%4.2fs")
    }

    println("---")
  }

  def runTests(
      tests: List[(String, () => String)],
      repetitions: Int
  ): Unit = {
    val testInstances = tests.map {
      case (nameStr, block) =>
        new PerfTest {
          override def name: String = nameStr

          override def run(): Try[String] = Success(block())
        }
    }
    runTests(testInstances, repetitions)
  }

  def runTests[T <: PerfTest](
      tests: List[T],
      repetitions: Int,
      warmup: List[T] => Unit = defaultWarmup _
  ): Unit = {
    val allTests = Random.shuffle(List.fill(repetitions)(tests).flatten)
    warmup(tests)
    println(s"Running ${allTests.size} tests")

    val rawResults = for (test <- allTests) yield {
      test.warmup()
      val name = test.name
      val (result, time) = timed {
        test.run()
      }
      result.foreach { rStr =>
        println(f"$name%-25s $rStr%-25s ${time / 1000.0d}%4.2fs")
      }
      result.map(r => name -> time)
    }
    val successfulRawResults = rawResults.filter(_.isSuccess).map(_.get)

    val results: Map[String, (Double, Double)] = successfulRawResults
      .groupBy(_._1)
      .map { case (name, nameWithTimes) =>
        val times = nameWithTimes.map(_._2)
        val count  = times.size
        val mean   = times.sum.toDouble / count
        val dev    = times.map(t => (t - mean) * (t - mean))
        val stddev = Math.sqrt(dev.sum / count)
        name -> (mean, stddev)
      }

    println("---")
    println("Averages (name,  mean, stddev)")
    results.toList.sortBy(_._2._1).foreach {
      case (name, (mean, stddev)) =>
        println(f"$name%-25s ${mean / 1000.0d}%4.2fs $stddev%4.2fms")
    }
  }

} 
Example 131
Source File: FutureTrySpec.scala    From scala-common   with Apache License 2.0 5 votes vote down vote up
import com.softwaremill.futuretry._
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.prop.TableDrivenPropertyChecks
import org.scalatest.matchers.must.Matchers

import scala.concurrent.duration.Duration
import scala.concurrent.{Future, Await, Promise}
import scala.util.{Failure, Success, Try}

class FutureTrySpec extends AnyFlatSpec with Matchers with TableDrivenPropertyChecks with ScalaFutures {

  import scala.concurrent.ExecutionContext.Implicits.global

  "tried" must "convert a successful result into a Success" in {
    val p = Promise[String]
    p.complete(Try("a"))

    val transformedFuture = p.future.tried

    transformedFuture.futureValue must be(Success("a"))
  }

  it must "convert an exceptional result into a Failure" in {
    val p = Promise[String]
    val exception = new RuntimeException("blah")
    p.complete(Try(throw exception))

    val transformedFuture = p.future.tried

    transformedFuture.futureValue must be(Failure(exception))
  }

  "transform" must "correctly transform between all Try variants in" in {
    val exception = new RuntimeException("bloh")

    val scenarios = Table[Try[String], Try[String] => Try[String], Try[String]] (
      ("original value", "transform", "expected output"),
      (Success("a"), identity[Try[String]], Success("a")),
      (Failure(exception), (x: Try[String]) => x match { case Failure(e) => Success(e.toString); case _ => ??? }, Success(exception.toString)),
      (Success("a"), (x: Try[String]) => x match { case Success(_) => Failure(exception); case _ => ??? }, Failure(exception)),
      (Failure(exception), identity[Try[String]], Failure(exception))
    )

    forAll(scenarios) {
      (orgValue, f, output) =>
        {
          val p = Promise[String]
          p.complete(orgValue)

          val transformedFuture = p.future.transformTry(f)

          transformedFuture.tried.futureValue must be(output)
        }
    }
  }

} 
Example 132
Source File: CallbackTracer.scala    From cassandra-util   with Apache License 2.0 5 votes vote down vote up
package com.protectwise.cql.tracing

import com.protectwise.cql.tracing.CQLTracer.CQLTraceFork

import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Success, Failure, Try}

class CallbackTracer(callback: CQLTrace[_, _] => Unit) extends CQLTracer {

  class CallbackTracerFork[D](data: D) extends CQLTraceFork {
    def apply[R](f: => R): R = {
      val startTime = System.currentTimeMillis()
      val result = Try(f)
      callback(CQLTrace(data, startTime, System.currentTimeMillis(), result))

      result match {
        case Success(r) => r
        case Failure(e) => throw e
      }
    }

    def apply[R](f: Future[R])(implicit ec: ExecutionContext): Future[R] = {
      val startTime = System.currentTimeMillis()
      f andThen { case result =>
        callback(CQLTrace(data, startTime, System.currentTimeMillis(), result))
      }
    }
  }

  def apply[D](data: D) = new CallbackTracerFork(data)

} 
Example 133
Source File: JsonFormats.scala    From pipelines-examples   with Apache License 2.0 5 votes vote down vote up
package pipelines.examples.sensordata

import java.time.Instant
import java.util.UUID

import scala.util.Try

import spray.json._

trait UUIDJsonSupport extends DefaultJsonProtocol {
  implicit object UUIDFormat extends JsonFormat[UUID] {
    def write(uuid: UUID) = JsString(uuid.toString)

    def read(json: JsValue): UUID = json match {
      case JsString(uuid) ⇒ Try(UUID.fromString(uuid)).getOrElse(deserializationError(s"Expected valid UUID but got '$uuid'."))
      case other          ⇒ deserializationError(s"Expected UUID as JsString, but got: $other")
    }
  }
}

trait InstantJsonSupport extends DefaultJsonProtocol {
  implicit object InstantFormat extends JsonFormat[Instant] {
    def write(instant: Instant) = JsNumber(instant.toEpochMilli)

    def read(json: JsValue): Instant = json match {
      case JsNumber(value) ⇒ Instant.ofEpochMilli(value.toLong)
      case other           ⇒ deserializationError(s"Expected Instant as JsNumber, but got: $other")
    }
  }
}

object MeasurementsJsonSupport extends DefaultJsonProtocol {
  implicit val measurementFormat = jsonFormat3(Measurements.apply)
}

object SensorDataJsonSupport extends DefaultJsonProtocol with UUIDJsonSupport with InstantJsonSupport {
  import MeasurementsJsonSupport._
  implicit val sensorDataFormat = jsonFormat3(SensorData.apply)
} 
Example 134
Source File: Newton.scala    From Scalaprof   with GNU General Public License v2.0 5 votes vote down vote up
package edu.neu.coe.csye._7200

import scala.annotation.tailrec
import scala.util.{Failure, Success, Try}


case class Newton(f: Double => Double, dfbydx: Double => Double) {

  private def step(x: Double, y: Double) = x - y / dfbydx(x)

  def solve(tries: Int, threshold: Double, initial: Double): Try[Double] = {
    @tailrec def inner(r: Double, n: Int): Try[Double] = {
      val y = f(r)
      if (math.abs(y) < threshold) Success(r)
      else if (n == 0) Failure(new Exception("failed to converge"))
      else inner(step(r, y), n - 1)
    }

    inner(initial, tries)
  }
} 
Example 135
Source File: NumberPredicate.scala    From Scalaprof   with GNU General Public License v2.0 5 votes vote down vote up
package com.phasmid.hedge_fund.rules

import scala.util.Try
import scala.util._

import scala.util.Failure


case class NumberPredicate(variable: String, operator: Operator[Double], value: Double) extends Predicate {

  def apply(candidate: Candidate): Either[Throwable, Boolean] = candidate(variable) match {
    case Some(x) => Try { operate(x, operator, value) } match {
      case Success(v) => Right(v)
      case Failure(f) => Left(f)
    }
    case _ => Left(new Exception(s"variable $variable not found in $candidate"))
  }

  // CONSIDER Moving this into Operator class
  def operate(x: Any, operator: Operator[Double], value: Double): Boolean = {
    x match {
      case y: Double => operator(y, value)
      case y: Int => operator(y, value)
      case y: String => operator(y.toDouble, value)
      case _ => throw new Exception(s"variable $variable cannot be for operator $operator")
    }
  }
}

object NumberPredicate {
  def apply(variable: String, operator: String, value: Double): NumberPredicate =
    new NumberPredicate(variable, Operator.createNumeric(operator), value)
  def apply(variable: String, operator: Operator[Double], value: String): NumberPredicate =
    new NumberPredicate(variable, operator, value.toDouble)
  def apply(variable: String, operator: String, value: String): NumberPredicate =
    apply(variable, Operator.createNumeric(operator), value)
  def apply(predicate: String): NumberPredicate = {
    val rPredicate = """^\s*(\w+)\s*([=<>]{1,2})\s*(-?[0-9]+\.?[0-9]*)\s*$""".r
    predicate match {
      case rPredicate(v, o, n) => apply(v, o, n)
      case _ => throw new Exception(s"predicate: $predicate is malformed")
    }
  }
} 
Example 136
Source File: StringPredicate.scala    From Scalaprof   with GNU General Public License v2.0 5 votes vote down vote up
package com.phasmid.hedge_fund.rules

import scala.util.Try
import scala.util._

import scala.util.Failure


case class StringPredicate(variable: String, operator: Operator[String], value: String) extends Predicate {

  def apply(candidate: Candidate): Either[Throwable, Boolean] = candidate(variable) match {
    case Some(x) => Try { operator(x.toString, value) } match {
      case Success(v) => Right(v)
      case Failure(f) => Left(f)
    }
    case _ => Left(new Exception(s"variable $variable not found in $candidate"))
  }
}

object StringPredicate {
  def apply(variable: String, operator: String, value: String): StringPredicate =
    new StringPredicate(variable, Operator.createText(operator), value)
} 
Example 137
Source File: ExpressionParserNumeric.scala    From Scalaprof   with GNU General Public License v2.0 5 votes vote down vote up
package edu.neu.coe.scala.parse

import edu.neu.coe.scala.numerics.Rational
import scala.util.Try
import edu.neu.coe.scala.numerics.Fuzzy


abstract class ExpressionParserNumeric[T : Numeric](implicit num: Numeric[T]) extends ExpressionParser[T] { self =>

  def div: (T,T)=>T = if (num.isInstanceOf[Fractional[T]]) (num.asInstanceOf[Fractional[T]]).div else throw new IllegalArgumentException("div method unavailable")
  def one: T = num.one
  def zero: T = num.zero
  def negate: (T)=>T = num.negate
  def plus: (T,T)=>T = num.plus
  def times: (T,T)=>T = num.times
}

object DoubleExpressionParser extends ExpressionParserNumeric[Double] {
  def apply(s: String): Try[Double] = Try(s.toDouble)
}
object IntExpressionParser extends ExpressionParserNumeric[Int] {
  def apply(s: String): Try[Int] = Try(s.toInt)
}
object RationalExpressionParser extends ExpressionParserNumeric[Rational] {
  def apply(s: String): Try[Rational] = Try(Rational.apply(s))
}
//object FuzzyExpressionParser extends ExpressionParserNumeric[Fuzzy] {
//  def apply(s: String): Try[Fuzzy] = Try(Fuzzy.apply(s))
//} 
Example 138
Source File: GraphQlClient.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.graphql

import akka.http.scaladsl.Http
import akka.stream.ActorMaterializer
import cool.graph.akkautil.SingleThreadedActorSystem
import play.api.libs.json.{JsPath, JsValue, Json, Reads}

import scala.concurrent.Future
import scala.util.{Failure, Success, Try}

trait GraphQlClient {
  def sendQuery(query: String): Future[GraphQlResponse]
}

object GraphQlClient {
  private implicit lazy val actorSystem       = SingleThreadedActorSystem("graphql-client")
  private implicit lazy val actorMaterializer = ActorMaterializer()(actorSystem)
  private implicit lazy val akkaHttp          = Http()(actorSystem)

  def apply(uri: String, headers: Map[String, String] = Map.empty): GraphQlClient = {
    GraphQlClientImpl(uri, headers, akkaHttp)
  }
}

case class GraphQlResponse(status: Int, body: String) {
  def bodyAs[T](path: String)(implicit reads: Reads[T]): Try[T] = {
    def jsPathForElements(pathElements: Seq[String], current: JsPath = JsPath()): JsPath = {
      if (pathElements.isEmpty) {
        current
      } else {
        jsPathForElements(pathElements.tail, current \ pathElements.head)
      }
    }
    val jsPath      = jsPathForElements(path.split('.'))
    val actualReads = jsPath.read(reads)
    jsonBody.map(_.as(actualReads))
  }

  val is2xx: Boolean = status >= 200 && status <= 299
  val is200: Boolean = status == 200
  val is404: Boolean = status == 404

  def isSuccess: Boolean = deserializedBody match {
    case Success(x) => x.errors.isEmpty && is200
    case Failure(e) => false
  }

  def isFailure: Boolean       = !isSuccess
  def firstError: GraphQlError = deserializedBody.get.errors.head

  private lazy val deserializedBody: Try[GraphQlResponseJson] = {
    for {
      body     <- jsonBody
      response <- Try { body.as(JsonReaders.graphqlResponseReads) }
    } yield response
  }

  lazy val jsonBody: Try[JsValue] = Try(Json.parse(body))
}

case class GraphQlResponseJson(data: JsValue, errors: Seq[GraphQlError])
case class GraphQlError(message: String, code: Int)

object JsonReaders {
  import play.api.libs.functional.syntax._
  import play.api.libs.json._

  implicit lazy val graphqlErrorReads = Json.reads[GraphQlError]
  implicit lazy val graphqlResponseReads = (
    (JsPath \ "data").read[JsValue] and
      (JsPath \ "errors").readNullable[Seq[GraphQlError]].map(_.getOrElse(Seq.empty))
  )(GraphQlResponseJson.apply _)
} 
Example 139
Source File: MetricsManager.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.metrics

import akka.actor.ActorSystem
import com.timgroup.statsd.{NonBlockingStatsDClient, StatsDClient}
import cool.graph.akkautil.SingleThreadedActorSystem

import scala.concurrent.Await
import scala.concurrent.duration._
import scala.util.{Failure, Success, Try}


trait MetricsManager {

  def serviceName: String

  // System used to periodically flush the state of individual gauges
  implicit lazy val gaugeFlushSystem: ActorSystem = SingleThreadedActorSystem(s"$serviceName-gauges")

  lazy val errorHandler = CustomErrorHandler()

  protected val baseTagsString: String = {
    if (sys.env.isDefinedAt("METRICS_PREFIX")) {
      Try {
        val instanceID  = Await.result(InstanceMetadata.fetchInstanceId(), 5.seconds)
        val containerId = ContainerMetadata.fetchContainerId()
        val region      = sys.env.getOrElse("AWS_REGION", "no_region")
        val env         = sys.env.getOrElse("METRICS_PREFIX", "local")

        s"env=$env,region=$region,instance=$instanceID,container=$containerId"
      } match {
        case Success(baseTags) => baseTags
        case Failure(err)      => errorHandler.handle(new Exception(err)); ""
      }
    } else {
      ""
    }
  }

  protected val client: StatsDClient = {
    // As we don't have an 'env' ENV var (prod, dev) this variable suppresses failing metrics output locally / during testing
    if (sys.env.isDefinedAt("METRICS_PREFIX")) {
      new NonBlockingStatsDClient("", Integer.MAX_VALUE, new Array[String](0), errorHandler, StatsdHostLookup())
    } else {
      println("[Metrics] Warning, Metrics can't initialize - no metrics will be recorded.")
      DummyStatsDClient()
    }
  }

  // Gauges DO NOT support custom metric tags per occurrence, only hardcoded custom tags during definition!
  def defineGauge(name: String, predefTags: (CustomTag, String)*): GaugeMetric = GaugeMetric(s"$serviceName.$name", baseTagsString, predefTags, client)
  def defineCounter(name: String, customTags: CustomTag*): CounterMetric       = CounterMetric(s"$serviceName.$name", baseTagsString, customTags, client)
  def defineFlushingCounter(name: String, customTags: CustomTag*)              = FlushingCounterMetric(s"$serviceName.$name", baseTagsString, customTags, client)
  def defineTimer(name: String, customTags: CustomTag*): TimerMetric           = TimerMetric(s"$serviceName.$name", baseTagsString, customTags, client)

  def shutdown: Unit = Await.result(gaugeFlushSystem.terminate(), 10.seconds)
} 
Example 140
Source File: TryExtensions.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.utils.`try`

import scala.concurrent.Future
import scala.util.{Failure, Success, Try}

object TryExtensions {
  implicit class TryExtensions[T](val theTry: Try[T]) extends AnyVal {
    def toFuture: Future[T] = theTry match {
      case Success(value)     => Future.successful(value)
      case Failure(exception) => Future.failed(exception)
    }
  }
}

object TryUtil {
  def sequence[T](trys: Vector[Try[T]]): Try[Vector[T]] = {
    val successes = trys.collect { case Success(x)     => x }
    val failures  = trys.collect { case f @ Failure(_) => f }
    if (successes.length == trys.length) {
      Success(successes)
    } else {
      failures.head.asInstanceOf[Try[Vector[T]]]
    }
  }
} 
Example 141
Source File: Consumers.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.rabbit

import com.rabbitmq.client.{AMQP, DefaultConsumer, Envelope, Channel => RabbitChannel}
import cool.graph.bugsnag.{BugSnagger, MetaData}

import scala.util.{Failure, Try}

case class DeliveryConsumer(channel: Channel, f: Delivery => Unit)(implicit bugsnagger: BugSnagger) extends DefaultConsumer(channel.rabbitChannel) {

  override def handleDelivery(consumerTag: String, envelope: Envelope, properties: AMQP.BasicProperties, body: Array[Byte]): Unit = {
    val delivery = Delivery(body, envelope, properties)
    Try {
      f(delivery)
    } match {
      case Failure(e) =>
        val bodyAsString = Try(new String(body)).getOrElse("Message Bytes could not be converted into a String.")
        val metaData     = Seq(MetaData("Rabbit", "messageBody", bodyAsString))
        bugsnagger.report(e, metaData)
      case _ => {} // NO-OP
    }
  }
} 
Example 142
Source File: PlainRabbit.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.rabbit

import java.util.concurrent.{Executors, ThreadFactory}

import scala.util.Try
import com.rabbitmq.client.{ConnectionFactory, Channel => RabbitChannel}
import cool.graph.bugsnag.BugSnagger

object PlainRabbit {
  def connect(name: String, amqpUri: String, numberOfThreads: Int, qos: Option[Int])(implicit bugSnag: BugSnagger): Try[RabbitChannel] = Try {

    val threadFactory: ThreadFactory = Utils.newNamedThreadFactory(name)
    val factory = {
      val f       = new ConnectionFactory()
      val timeout = sys.env.getOrElse("RABBIT_TIMEOUT_MS", "500").toInt
      f.setUri(amqpUri)
      f.setConnectionTimeout(timeout)
      f.setExceptionHandler(RabbitExceptionHandler(bugSnag))
      f.setThreadFactory(threadFactory)
      f.setAutomaticRecoveryEnabled(true)
      f
    }
    val executor   = Executors.newFixedThreadPool(numberOfThreads, threadFactory)
    val connection = factory.newConnection(executor)
    val theQos     = qos.orElse(sys.env.get("RABBIT_CHANNEL_QOS").map(_.toInt)).getOrElse(500)
    val chan       = connection.createChannel()
    chan.basicQos(theQos)
    chan
  }
} 
Example 143
Source File: CompileSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph

import cool.graph.bugsnag.BugSnaggerMock

import scala.util.{Failure, Success, Try}

object CompileSpec {
  import cool.graph.rabbit.Import._
  import cool.graph.rabbit.Import.ExchangeTypes._
  import cool.graph.rabbit.Import.Bindings._

  implicit val bugsnag = BugSnaggerMock
  val amqpUri          = "amqp://localhost"
  val queueName        = "some-name"

  // Consume with 1 consumer
  for {
    channel  <- Rabbit.channel(queueName, amqpUri, consumerThreads = 1)
    queue    <- channel.queueDeclare(queueName, durable = false, autoDelete = true)
    exchange <- channel.exchangeDeclare("some-exchange", durable = false)
    _        <- queue.bindTo(exchange, FanOut)
    _ <- queue.consume { delivery =>
          // do something with the delivery and ack afterwards
          println(delivery.body)
          queue.ack(delivery)
        }
  } yield ()

  // Consume with multiple consumers
  val numberOfConsumers = 4
  for {
    channel  <- Rabbit.channel(queueName, amqpUri, consumerThreads = numberOfConsumers)
    queue    <- channel.queueDeclare(queueName, durable = false, autoDelete = true)
    exchange <- channel.exchangeDeclare("some-exchange", durable = false)
    _        <- queue.bindTo(exchange, Bindings.FanOut)
    _ <- queue.consume(numberOfConsumers) { delivery =>
          // do something with the delivery and ack afterwards
          println(delivery.body)
          queue.ack(delivery)
        }
  } yield ()

  // Publishing
  def setupMyCustomExchange: Exchange = {
    val exchange: Try[Exchange] = for {
      channel  <- Rabbit.channel(queueName, amqpUri, consumerThreads = 1)
      exchange <- channel.exchangeDeclare("some-exchange", durable = false)
    } yield exchange
    exchange match {
      case Success(x) =>
        x
      case Failure(e) =>
        // maybe do something to retry. A naive way could look like this:
        Thread.sleep(1000)
        setupMyCustomExchange
    }
  }
  val exchange = setupMyCustomExchange
  exchange.publish("routingKey", "some message")

  // Publish to a Queue
  def setupMyQueue: Queue = {
    val queue: Try[Queue] = for {
      channel <- Rabbit.channel(queueName, amqpUri, consumerThreads = 1)
      queue   <- channel.queueDeclare("my-queue", durable = false, autoDelete = true)
    } yield queue
    queue match {
      case Success(x) =>
        x
      case Failure(e) =>
        // maybe do something to retry. A naive way could look like this:
        Thread.sleep(1000)
        setupMyQueue
    }
  }
  val queue = setupMyQueue
  queue.publish("some message")
} 
Example 144
Source File: Utils.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.messagebus.utils

import scala.io.Source
import scala.util.{Failure, Success, Try}

object Utils {

  val dockerContainerID: String = {
    Try {
      val source   = Source.fromFile("/etc/hostname")
      val hostname = try { source.mkString.trim } finally source.close()

      hostname
    } match {
      case Success(hostname) => hostname
      case Failure(err)      => println("Warning: Unable to read hostname from /etc/hostname"); ""
    }
  }
} 
Example 145
Source File: RabbitPlainQueueConsumer.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.messagebus.queue.rabbit

import cool.graph.bugsnag.BugSnagger
import cool.graph.messagebus.Conversions.ByteUnmarshaller
import cool.graph.messagebus.QueueConsumer.ConsumeFn
import cool.graph.messagebus.queue.BackoffStrategy
import cool.graph.messagebus.{ConsumerRef, QueueConsumer}
import cool.graph.rabbit.Bindings.RoutingKey
import cool.graph.rabbit.Import.Queue
import cool.graph.rabbit.{Consumer, Delivery, Exchange}

import scala.collection.mutable.ArrayBuffer
import scala.util.{Failure, Success, Try}


case class RabbitPlainQueueConsumer[T](
    queueName: String,
    exchange: Exchange,
    backoff: BackoffStrategy,
    autoDelete: Boolean = true,
    onShutdown: () => Unit = () => {},
    routingKey: Option[String] = None
)(implicit val bugSnagger: BugSnagger, unmarshaller: ByteUnmarshaller[T])
    extends QueueConsumer[T] {
  import scala.concurrent.ExecutionContext.Implicits.global

  private val consumers = ArrayBuffer[Consumer]()

  val queue: Queue = (for {
    queue <- exchange.channel.queueDeclare(queueName, durable = false, autoDelete = autoDelete)
    _ = routingKey match {
      case Some(rk) => queue.bindTo(exchange, RoutingKey(rk))
      case _        =>
    }
  } yield queue) match {
    case Success(q) => q
    case Failure(e) => sys.error(s"Unable to declare queue: $e")
  }

  override def withConsumer(fn: ConsumeFn[T]): ConsumerRef = {
    val consumer = queue.consume { delivery =>
      val payload = parsePayload(queue, delivery)
      fn(payload).onComplete {
        case Success(_)   => queue.ack(delivery)
        case Failure(err) => queue.nack(delivery, requeue = true); println(err)
      }
    } match {
      case Success(c) => c
      case Failure(e) => sys.error(s"Unable to declare consumer: $e")
    }

    RabbitConsumerRef(Seq(consumer))
  }

  def parsePayload(queue: Queue, delivery: Delivery): T = {
    Try { unmarshaller(delivery.body) } match {
      case Success(parsedPayload) =>
        parsedPayload

      case Failure(err) =>
        println(s"[Plain Consumer] Discarding message, invalid message body: $err")
        queue.ack(delivery)
        throw err
    }
  }

  override def shutdown: Unit = {
    println(s"[Plain Consumer] Stopping...")
    consumers.foreach { c =>
      c.unsubscribe.getOrElse(s"[Plain Consumer] Warn: Unable to unbind consumer: $c")
    }
    println(s"[Plain Consumer] Stopping... Done.")

    onShutdown()
  }
} 
Example 146
Source File: InMemoryMessageBusTestKits.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.messagebus.testkits.spechelpers

import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.testkit.TestKit
import cool.graph.messagebus.testkits.{InMemoryPubSubTestKit, InMemoryQueueTestKit}

import scala.reflect.ClassTag
import scala.util.{Failure, Success, Try}

class InMemoryMessageBusTestKits(system: ActorSystem) extends TestKit(system) {
  implicit val actorSystem                     = system
  implicit val materializer: ActorMaterializer = ActorMaterializer()

  def withPubSubTestKit[T](checkFn: (InMemoryPubSubTestKit[T]) => Unit)(implicit tag: ClassTag[T]): Unit = {
    val testKit = InMemoryPubSubTestKit[T]()

    Try { checkFn(testKit) } match {
      case Success(_) => testKit.shutdown()
      case Failure(e) => testKit.shutdown(); throw e
    }
  }

  def withQueueTestKit[T](checkFn: (InMemoryQueueTestKit[T]) => Unit)(implicit tag: ClassTag[T]): Unit = {
    val testKit = InMemoryQueueTestKit[T]()

    Try { checkFn(testKit) } match {
      case Success(_) => testKit.shutdown()
      case Failure(e) => testKit.shutdown(); throw e
    }
  }

  def shutdownTestKit: Unit = {
    materializer.shutdown()
    shutdown(verifySystemShutdown = true)
  }
} 
Example 147
Source File: Reader.scala    From Raphtory   with Apache License 2.0 5 votes vote down vote up
package com.raphtory.core.components.PartitionManager

import akka.actor.Actor
import akka.actor.ActorLogging
import akka.actor.ActorRef
import akka.actor.Props
import akka.actor.Terminated
import akka.cluster.pubsub.DistributedPubSubMediator.SubscribeAck
import akka.cluster.pubsub.DistributedPubSub
import akka.cluster.pubsub.DistributedPubSubMediator
import com.raphtory.core.analysis.API.Analyser
import com.raphtory.core.components.PartitionManager.Workers.ReaderWorker
import com.raphtory.core.model.communication._
import com.raphtory.core.storage.EntityStorage
import com.raphtory.core.utils.Utils
import com.twitter.util.Eval

import scala.collection.parallel.mutable.ParTrieMap
import scala.util.Try

class Reader(
    id: Int,
    test: Boolean,
    managerCountVal: Int,
    storage: ParTrieMap[Int, EntityStorage],
    workerCount: Int = 10
) extends Actor
        with ActorLogging {

  implicit var managerCount: Int = managerCountVal

  // Id which refers to the partitions position in the graph manager map
  val managerId: Int = id

  val mediator: ActorRef = DistributedPubSub(context.system).mediator

  mediator ! DistributedPubSubMediator.Put(self)
  mediator ! DistributedPubSubMediator.Subscribe(Utils.readersTopic, self)

  var readers: ParTrieMap[Int, ActorRef] = new ParTrieMap[Int, ActorRef]()

  for (i <- 0 until workerCount) {
    log.debug("Initialising [{}] worker children for Reader [{}}.", workerCount, managerId)

    // create threads for writing
    val child = context.system.actorOf(
            Props(new ReaderWorker(managerCount, managerId, i, storage(i))).withDispatcher("reader-dispatcher"),
            s"Manager_${id}_reader_$i"
    )

    context.watch(child)
    readers.put(i, child)
  }

  override def preStart(): Unit =
    log.debug("Reader [{}] is being started.", managerId)

  override def receive: Receive = {
    case ReaderWorkersOnline()     => sender ! ReaderWorkersACK()
    case req: AnalyserPresentCheck => processAnalyserPresentCheckRequest(req)
    case req: UpdatedCounter       => processUpdatedCounterRequest(req)
    case SubscribeAck              =>
    case Terminated(child) =>
      log.warning(s"ReaderWorker with path [{}] belonging to Reader [{}] has died.", child.path, managerId)
    case x => log.warning(s"Reader [{}] received unknown [{}] message.", managerId, x)
  }

  def processAnalyserPresentCheckRequest(req: AnalyserPresentCheck): Unit = {
    log.debug(s"Reader [{}] received [{}] request.", managerId, req)

    val className   = req.className
    val classExists = Try(Class.forName(className))

    classExists.toEither.fold(
            { _: Throwable =>
              log.debug("Class [{}] was not found within this image.", className)

              sender ! ClassMissing()
            }, { _: Class[_] =>
              log.debug(s"Class [{}] exists. Proceeding.", className)

              sender ! AnalyserPresent()
            }
    )
  }





  def processUpdatedCounterRequest(req: UpdatedCounter): Unit = {
    log.debug("Reader [{}] received [{}] request.", managerId, req)

    managerCount = req.newValue
    readers.foreach(x => x._2 ! UpdatedCounter(req.newValue))
  }
} 
Example 148
Source File: S3MigrationHandlerBase.scala    From flyway-awslambda   with MIT License 5 votes vote down vote up
package crossroad0201.aws.flywaylambda

import java.text.SimpleDateFormat
import java.util.Date

import com.amazonaws.services.lambda.runtime.Context
import com.amazonaws.services.s3.AmazonS3
import crossroad0201.aws.flywaylambda.deploy.{FlywayDeployment, S3SourceFlywayDeployer}
import crossroad0201.aws.flywaylambda.migration.{FlywayMigrator, MigrationInfo, MigrationResult}
import spray.json.DefaultJsonProtocol

import scala.util.Try

object MigrationResultProtocol extends DefaultJsonProtocol {
  import spray.json._

  implicit val DateFormat = new RootJsonFormat[Date] {
    override def write(value: Date): JsValue = if (value == null) JsNull else JsString(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'").format(value))
    override def read(json: JsValue): Date = ???
  }
  implicit val migrationInfoFormat = jsonFormat6(MigrationInfo.apply)
  implicit val migrationResultFormat = jsonFormat5(MigrationResult.apply)
}

trait S3MigrationHandlerBase extends FlywayMigrator {

  type ResultJson = String
  type ResultStoredPath = String

  protected def migrate(bucketName: String, prefix: String, flywayConfFileName: String = "flyway.conf")(implicit context: Context, s3Client: AmazonS3): Try[ResultJson] = {
    val logger = context.getLogger

    def resultJson(result: MigrationResult): ResultJson = {
      import MigrationResultProtocol._
      import spray.json._

      result.toJson.prettyPrint
    }

    def storeResult(deployment: FlywayDeployment, result: MigrationResult): ResultStoredPath = {
      val jsonPath = s"${deployment.sourcePrefix}/migration-result.json"
      s3Client.putObject(deployment.sourceBucket, jsonPath, resultJson(result))
      jsonPath
    }

    for {
      // Deploy Flyway resources.
      d <- new S3SourceFlywayDeployer(s3Client, bucketName, prefix, flywayConfFileName).deploy
      _ = {
        logger.log(
          s"""--- Flyway configuration ------------------------------------
             |flyway.url      = ${d.url}
             |flyway.user     = ****
             |flyway.password = ****
             |
             |SQL locations   = ${d.location}
             |SQL files       = ${d.sqlFiles.mkString(", ")}
             |-------------------------------------------------------------
              """.stripMargin)
      }

      // Migrate DB.
      r = migrate(d)
      _ = {
        logger.log(s"${r.message}!. ${r.appliedCount} applied.")
        r.infos.foreach { i =>
          logger.log(s"Version=${i.version}, Type=${i.`type`}, State=${i.state} InstalledAt=${i.installedAt} ExecutionTime=${i.execTime} Description=${i.description}")
        }
      }

      // Store migration result.
      storedPath = storeResult(d, r)
      _ = logger.log(s"Migration result stored to $bucketName/$storedPath.")

    } yield resultJson(r)
  }

} 
Example 149
Source File: FlywayMigrator.scala    From flyway-awslambda   with MIT License 5 votes vote down vote up
package crossroad0201.aws.flywaylambda.migration

import crossroad0201.aws.flywaylambda.deploy.FlywayDeployment
import org.flywaydb.core.Flyway

import scala.util.{Failure, Success, Try}

trait FlywayMigrator {

  def migrate(deployment: FlywayDeployment) = {
    val flyway = new Flyway

    val appliedCount = Try {
      flyway.setDataSource(
        deployment.url,
        deployment.user,
        deployment.password
      )
      flyway.setLocations(deployment.location)

      deployment.options.map(_.apply(flyway))

      flyway.migrate
    }

    val migrationInfos = Try {
      flyway.info.all
    }

    (appliedCount, migrationInfos) match {
      case (Success(c), Success(is)) => MigrationResult.success(deployment.url, c, is.map(MigrationInfo(_)))
      case (Success(c), Failure(e)) => MigrationResult.failure(deployment.url, e, Seq())
      case (Failure(e), Success(is)) => MigrationResult.failure(deployment.url, e, is.map(MigrationInfo(_)))
      case (Failure(e1), Failure(e2)) => MigrationResult.failure(deployment.url, e1, Seq())
    }
  }

} 
Example 150
Source File: InvokeMigrationHandler.scala    From flyway-awslambda   with MIT License 5 votes vote down vote up
package crossroad0201.aws.flywaylambda

import java.io.{BufferedOutputStream, InputStream, OutputStream, PrintWriter}

import com.amazonaws.regions.{Region, Regions}
import com.amazonaws.services.lambda.runtime.{Context, RequestStreamHandler}
import com.amazonaws.services.s3.{AmazonS3, AmazonS3Client}

import scala.io.{BufferedSource, Codec}
import scala.util.{Failure, Success, Try}

class InvokeMigrationHandler extends RequestStreamHandler with S3MigrationHandlerBase {
  type BucketName = String
  type Prefix = String
  type ConfFileName = String

  override def handleRequest(input: InputStream, output: OutputStream, context: Context): Unit = {
    def parseInput: Try[(BucketName, Prefix, ConfFileName)] = Try {
      import spray.json._
      import DefaultJsonProtocol._

      val json = new BufferedSource(input)(Codec("UTF-8")).mkString
      val jsObj = JsonParser(json).toJson.asJsObject
      jsObj.getFields(
        "bucket_name",
        "prefix"
      ) match {
        case Seq(JsString(b), JsString(p)) => {
          jsObj.getFields(
            "flyway_conf"
          ) match {
            case Seq(JsString(c)) => (b, p, c)
            case _ => (b, p, "flyway.conf")
          }
        }
        case _ => throw new IllegalArgumentException(s"Missing require key [bucketName, prefix]. - $json")
      }
    }

    val logger = context.getLogger

    implicit val s3Client: AmazonS3 = new AmazonS3Client().withRegion(Region.getRegion(Regions.fromName(sys.env("AWS_REGION"))))

    (for {
      i <- parseInput
      _ = { logger.log(s"Flyway migration start. by invoke lambda function(${i._1}, ${i._2}, ${i._3}).") }
      r <- migrate(i._1, i._2, i._3)(context, s3Client)
    } yield r) match {
      case Success(r) =>
        logger.log(r)
        val b = r.getBytes("UTF-8")
        val bout = new BufferedOutputStream(output)
        Stream.continually(bout.write(b))
        bout.flush()
      case Failure(e) =>
        e.printStackTrace()
        val w = new PrintWriter(output)
        w.write(e.toString)
        w.flush()
    }
  }

} 
Example 151
Source File: S3SourceFlywayDeployer.scala    From flyway-awslambda   with MIT License 5 votes vote down vote up
package crossroad0201.aws.flywaylambda.deploy

import java.nio.file.{Files, Path, Paths}
import java.util.{Properties => JProperties}

import com.amazonaws.services.lambda.runtime.Context
import com.amazonaws.services.s3.AmazonS3
import com.amazonaws.services.s3.model.S3ObjectSummary

import scala.annotation.tailrec
import scala.collection.JavaConverters._
import scala.collection.mutable.ListBuffer
import scala.util.Try

class S3SourceFlywayDeployer(s3Client: AmazonS3, srcBucketName: String, srcPrefix: String, flywayConfFileName: String) extends FlywayDeployer {

  def deploy(implicit context: Context): Try[FlywayDeployment] = Try {
    val logger = context.getLogger

    val tmpDir = Files.createDirectories(Paths.get("/tmp", context.getAwsRequestId))
    Files.createDirectories(Paths.get(tmpDir.toString, srcPrefix))

    @tailrec
    def deployInternal(objects: List[S3ObjectSummary], acc: (Option[JProperties], ListBuffer[Path])): (Option[JProperties], Seq[Path]) = {
      def loadConf(key: String) = {
        val o = s3Client.getObject(srcBucketName, key)
        val props = new JProperties
        props.load(o.getObjectContent)
        logger.log(s"Flyway configuration loaded. s3://$srcBucketName/$key")
        (Some(props), acc._2)
      }
      def createDir(key: String) = {
        val dir = Files.createDirectories(Paths.get(tmpDir.toString, key))
        logger.log(s"Dir created. $dir")
        acc
      }
      def createSqlFile(key: String) = {
        val o = s3Client.getObject(srcBucketName, key)
        val file = Paths.get(tmpDir.toString, key)
        val fileSize = Files.copy(o.getObjectContent, file)
        logger.log(s"SQL file created. $file($fileSize Byte)")
        acc._2 += file
        acc
      }

      objects match {
        case Nil => (acc._1, acc._2)
        case x :: xs =>
          val _acc = x.getKey match {
            case key if key.endsWith(flywayConfFileName) => loadConf(key)
            case key if key.endsWith("/") => createDir(key)
            case key if key.endsWith(".sql") => createSqlFile(key)
            case _ => acc
          }
          deployInternal(xs, _acc)
      }
    }

    val objectSummaries = {
      val objects = s3Client.listObjects(srcBucketName, srcPrefix)
      objects.getObjectSummaries.asScala.toList.sortWith { (x, y) =>
        x.getKey.compareTo(y.getKey) < 1
      }
    }

    logger.log(s"Deploying Flyway resources from $srcBucketName/$srcPrefix... ${objectSummaries.map(_.getKey).mkString(", ")}")

    deployInternal(objectSummaries, (None, ListBuffer())) match {
      case (Some(conf), sqlFiles) =>
        FlywayDeployment(
          srcBucketName,
          srcPrefix,
          conf,
          s"filesystem:${Paths.get(tmpDir.toString, srcPrefix).toString}",
          sqlFiles)
      case _ => throw new IllegalStateException(s"$flywayConfFileName does not exists.")
    }
  }

} 
Example 152
Source File: InterpolatorTest.scala    From contextual   with Apache License 2.0 5 votes vote down vote up
package contextual.tests

import scala.util.{Failure, Success, Try}

import contextual.Interpolator
import contextual.tests.InterpolatorTest.{BadMatch, BadParse, Successful, TestResult}

trait InterpolatorTest[I <: Interpolator] {
  type Example = (() => I#Output, I#Output)
  def testName: String = this.getClass.getSimpleName
  def interpolator: I
  def examples: List[Example]
  def eq(x: I#Output, y: I#Output): Boolean = x == y

  def testExamples: (List[TestResult], List[TestResult]) = {
    examples.map { case (thunk, expected) =>
      Try(thunk()) match {
        case Success(result) if eq(result, expected) => Successful
        case Success(result) => BadMatch(expected, result)
        case Failure(_) => BadParse(expected)
      }
    }.partition {
      case Successful => true
      case _ => false
    }
  }

  def runTests(): Unit = {
    println(s"Running tests for $testName")
    val (succ, fail) = testExamples
    fail.foreach { f =>
      println(s"Test Failed: $f")
    }
    println(s"Results: ${succ.size} successes and ${fail.size} failures")
  }
}

object InterpolatorTest {
  sealed trait TestResult
  case object Successful extends TestResult
  case class BadParse[T](expected: T) extends TestResult
  case class BadMatch[T](expected: T, result: T) extends TestResult
} 
Example 153
Source File: TestExample.scala    From akka-typed-persistence   with Apache License 2.0 5 votes vote down vote up
package com.nokia.ntp.ct
package persistence
package testkit

import scala.util.Try

import org.scalatest.FlatSpecLike

import akka.testkit.TestKit
import akka.typed._

import cats.implicits._

class TestExample extends TestKit(akka.actor.ActorSystem()) with FlatSpecLike { spec =>

  sealed trait MyMsg
  case class Add(n: Int, replyTo: ActorRef[Long]) extends MyMsg
  case object Snap extends MyMsg
  case object Stop extends MyMsg
  case class ReadSeqNr(replyTo: ActorRef[Long]) extends MyMsg

  sealed trait MyEv
  case class Incr(amount: Int) extends MyEv

  sealed case class MyState(ctr: Long) {
    def update(ev: MyEv): MyState = ev match {
      case Incr(n) => this.copy(ctr = ctr + n)
    }
  }

  object MyState {
    implicit val mngd: Update[MyState, MyEv] =
      Update.instance(_ update _)
  }

  val name = "TestExample"

  val b = PersistentActor.immutable[MyMsg, MyEv, MyState](
    MyState(ctr = 0),
    _ => name
  ) { state => p => {
      case Add(n, r) =>
        for {
          st <- p.apply(Incr(n))
        } yield {
          r ! st.ctr
          st
        }
      case Snap =>
        p.snapshot
      case Stop =>
        p.stop
      case ReadSeqNr(r) =>
        for {
          seqNr <- p.lastSequenceNr
          _ = r ! seqNr
        } yield state
    }
    }

  val ti = new TestInterpreter(name, b, ActorSystem.wrap(this.system)) {
    override def assert(b: Boolean, msg: String = ""): Try[Unit] =
      Try(spec.assert(b, msg))
    override def fail(msg: String): Nothing =
      spec.fail(msg)
  }

  "It" should "work" in {
    ti.check(for {
      _ <- ti.expect[Long](ReadSeqNr, 0L)
      _ <- ti.expect[Long](Add(3, _), 3L)
      _ <- ti.expect[Long](ReadSeqNr, 1L)
      _ <- ti.expect[Long](Add(2, _), 5L)
      _ <- ti.expect[Long](ReadSeqNr, 2L)
      _ <- ti.expectSt(_.ctr, 5L)
      _ <- ti.message(Stop)
      _ <- ti.expectStop
    } yield ())
  }
} 
Example 154
Source File: ElasticsearchIntegrationTest.scala    From elasticsearch-client   with Apache License 2.0 5 votes vote down vote up
package com.sumologic.elasticsearch.restlastic

import com.sumologic.elasticsearch.restlastic.RestlasticSearchClient.ReturnTypes
import com.sumologic.elasticsearch.restlastic.dsl.Dsl._
import org.junit.runner.RunWith
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.time.{Millis, Span}
import org.scalatest.{BeforeAndAfterAll, Suite}
import org.scalatestplus.junit.JUnitRunner

import scala.util.{Random, Try}



@RunWith(classOf[JUnitRunner])
trait ElasticsearchIntegrationTest extends BeforeAndAfterAll with ScalaFutures {
  this: Suite =>
  private val indexPrefix = "test-index"

  def restClient: RestlasticSearchClient

  val IndexName = s"$indexPrefix-${math.abs(Random.nextLong())}"

  protected def createIndices(cnt: Int = 1): IndexedSeq[Index] = {
    (1 to cnt).map(idx => {
      val index = dsl.Dsl.Index(s"${IndexName}-${idx}")
      val analyzerName = Name("keyword_lowercase")
      val lowercaseAnalyzer = Analyzer(analyzerName, Keyword, Lowercase)
      val notAnalyzed = Analyzer(Name("not_analyzed"), Keyword)
      val analyzers = Analyzers(
        AnalyzerArray(lowercaseAnalyzer, notAnalyzed),
        FilterArray(),
        NormalizerArray(Normalizer(Name("lowercase"), Lowercase)))
      val indexSetting = IndexSetting(12, 1, analyzers, 30)
      val indexFut = restClient.createIndex(index, Some(indexSetting))
      indexFut.futureValue
      index
    })
  }

  override def beforeAll(): Unit = {
    super.beforeAll()
    Try(delete(Index(s"$indexPrefix*")))
  }

  override def afterAll(): Unit = {
    Try(delete(Index(s"$indexPrefix*")))
    super.afterAll()
  }

  private def delete(index: Index): ReturnTypes.RawJsonResponse = {
    implicit val patienceConfig = PatienceConfig(scaled(Span(1500, Millis)), scaled(Span(15, Millis)))
    restClient.deleteIndex(index).futureValue
  }
} 
Example 155
Source File: StringUtils.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.infra

import scala.util.{Success, Try}

object StringUtils {

  def isUnsignedInt(s: String) = s match {
    case UnsignedIntString(_) => true
    case _ => false
  }

  object UnsignedIntString {
    def unapply(s: String): Option[Int] = Try(s.toInt) match {
      case Success(value) if value >= 0 => Some(value)
      case _ => None
    }
  }
} 
Example 156
Source File: DelayedFuture.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.infra

import java.util.concurrent.{Executors, TimeUnit}

import scala.concurrent.duration.{Duration, FiniteDuration}
import scala.concurrent.{CanAwait, ExecutionContext, Future, Promise}
import scala.util.Try

class CancellableFuture[+A](fut: Future[A], canceller: () => Unit) extends Future[A] {
  override def onComplete[U](f: (Try[A]) => U)(implicit executor: ExecutionContext): Unit = fut.onComplete(f)
  override def isCompleted: Boolean = fut.isCompleted
  override def value: Option[Try[A]] = fut.value
  override def transform[S](f: (Try[A]) => Try[S])(implicit executor: ExecutionContext): Future[S] = fut.transform(f)
  override def transformWith[S](f: (Try[A]) => Future[S])(implicit executor: ExecutionContext): Future[S] = fut.transformWith(f)
  override def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this
  override def result(atMost: Duration)(implicit permit: CanAwait): A = fut.result(atMost)

  def cancel(): Unit = canceller()
}

object DelayedFuture {
  private def executor = Executors.newSingleThreadScheduledExecutor()

  def apply[R](delay: FiniteDuration)(fun: => R)(implicit executionContext: ExecutionContext): CancellableFuture[R] = {
    val resultPromise = Promise[R]()
    var isCancelled = false
    executor.schedule(new Runnable {
      override def run(): Unit = {
        if (!isCancelled)
          resultPromise.completeWith(Future(fun))
      }
    }, delay.toMillis, TimeUnit.MILLISECONDS)
    def cancel(): Unit = isCancelled = true
    new CancellableFuture[R](resultPromise.future, () => cancel())
  }
} 
Example 157
Source File: ExecutorProxy.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.infra

import java.lang.reflect.{InvocationHandler, InvocationTargetException, Method}
import java.util.concurrent.Executor
import java.util.concurrent.atomic.AtomicInteger

import org.slf4s.Logging

import scala.concurrent.duration._
import scala.concurrent.{Await, Future, Promise, TimeoutException}
import scala.reflect.ClassTag
import scala.util.{Failure, Success, Try}

class ExecutorProxy(executor: Executor) {
  import scala.collection.JavaConverters._

  def createFor[A <: AnyRef : ClassTag](instance: A): A = {
    val clazz = implicitly[ClassTag[A]].runtimeClass
    java.lang.reflect.Proxy.newProxyInstance(clazz.getClassLoader, Array(clazz), new Handler(instance)).asInstanceOf[A]
  }

  class Handler(instance: AnyRef) extends InvocationHandler with Logging {
    import scala.concurrent.ExecutionContext.Implicits._
    private val className = instance.getClass.getName

    private val idGen = new AtomicInteger(0)
    private var awaitingCalls = Map[Int, String]()

    override def invoke(proxy: scala.Any, method: Method, args: Array[AnyRef]): AnyRef = {
      val resultPromise = Promise[AnyRef]()

      val before = System.nanoTime()

      val id = idGen.getAndIncrement()
      val argss = Option(args).getOrElse(Array.empty)
      val desc = s"$method(${argss.mkString(", ")})[$id]"
      log.trace(s"Waiting to execute: $desc")

      // Snapshot of waiting calls prior to submitting to the executor
      val waitingCallsAtEntry = awaitingCalls

      executor.execute(() => {
        log.trace(s"Execute: $id")
        Try(method.invoke(instance, args: _*)) match {
          case Success(f: Future[_]) => resultPromise.completeWith(f.asInstanceOf[Future[AnyRef]])
          case Success(result) => resultPromise.success(result)
          case Failure(t: InvocationTargetException) => resultPromise.failure(t.getCause)
          case Failure(t) => resultPromise.failure(t)
        }
      })

      resultPromise.future.onComplete { _ =>
        val methodName = method.getName
        val millis = (System.nanoTime() - before).nanos.toMillis
        log.trace(s"Elapsed time for $className.$methodName = $millis ms")
      }

      if (classOf[Future[_]].isAssignableFrom(method.getReturnType)) resultPromise.future
      else {
        // Update with this call
        awaitingCalls += (id -> desc)
        //TODO: Configurable timeout
        try Await.result(resultPromise.future, 30.seconds) catch {
          case _: TimeoutException =>
            val other = waitingCallsAtEntry.values
            val sb = new StringBuilder(s"Timed out waiting for '$desc' to complete. Calls at entry: ${other.mkString("'", "', '", "'")}. Stack:\n")
            appendStackTraces(sb)
            log.debug(sb.toString())
            throw new TimeoutException(s"Timed out waiting for '$desc' to complete.")
        } finally {
          // Done with this call
          awaitingCalls -= id
          log.trace(s"Done: $id")
        }
      }
    }

    private def appendStackTraces(sb: StringBuilder): Unit = {
      Thread.getAllStackTraces.asScala.foreach { tup =>
        sb.append("\n> THREAD ").append(tup._1.getName).append("\n")
        tup._2.foreach(ste => sb.append("  ").append(ste).append("\n"))
      }
    }
  }
} 
Example 158
Source File: FileReader.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.infra

import java.io.File
import java.nio.charset.Charset
import java.nio.file.Files

import scala.util.Try

trait FileReader {
  def read(file: File, charset: Charset): Try[String]
}


class FileSystemFileReader extends FileReader {
  override def read(file: File, charset: Charset): Try[String] = {
    Try {
      val bytes = Files.readAllBytes(file.toPath)
      new String(bytes, charset)
    }
  }
} 
Example 159
Source File: VersionExtractor.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.nashorn

import com.programmaticallyspeaking.ncd.host.ScriptVersion
import com.sun.jdi.ReferenceType

import scala.util.Try

object VersionExtractor {
  def extract(referenceType: ReferenceType): ScriptVersion =
    extract(referenceType.name())

  def extract(name: String): ScriptVersion = {
    val parts = name.split('$')
    // Part 1 is: "jdk/nashorn/internal/scripts/Script"
    // Part 2 may be: "Recompilation" or compilation ID, but only if ID > 0
    // Part 3 is compilation ID if part 2 is "Recompilation"
    val isRecompilation = parts.lift(1).contains("Recompilation")
    val compilationIdPart = if (isRecompilation) 2 else 1
    // Wrap in Try to handle missing ID, e.g.: jdk.nashorn.internal.scripts.Script$\^eval\_
    val compilationId = Try(parts.lift(compilationIdPart).map(_.toInt).getOrElse(0)).getOrElse(0)
    ScriptVersion(compilationId, !isRecompilation)
  }
} 
Example 160
Source File: ServerStarter.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.chrome.net

import scala.util.{Failure, Random, Success, Try}

trait ServerStarter[TServer] {

  def startServer(port: Int): TServer

  def startServer(): (TServer, Int) = {
    val r = new Random()
    var lastFailure: Throwable = null
    for (i <- 1 to 20) {
      val port = 50000 + r.nextInt(5000)
      Try(startServer(port)) match {
        case Success(server) => return (server, port)
        case Failure(t) => lastFailure = t
      }
    }
    throw new RuntimeException("Failed to start the server", lastFailure)
  }
} 
Example 161
Source File: DomainMethodArgumentFactory.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.chrome.domains

import com.programmaticallyspeaking.ncd.chrome.net.Protocol
import com.programmaticallyspeaking.ncd.infra.ObjectMapping

import scala.util.{Failure, Success, Try}

object DomainMethodArgumentFactory {
  def create(message: Protocol.IncomingMessage): AnyRef = {
    message.method.split('.').toList match {
      case domain :: method :: Nil =>
        privCreate(domain, method, message.params)

      case _ =>
        throw new IllegalArgumentException("Malformed method: " + message.method)
    }
  }

  private def isEnableOrDisable(method: String) = method == "enable" || method == "disable"

  private def privCreate(aDomain: String, method: String, params: Map[String, Any]): AnyRef = {
    var domain = aDomain
    val hasParams = params != null

    // Special method handling - enable and disable are generic (unless there are parameters)
    if (isEnableOrDisable(method) && !hasParams) domain = "Domain"

    val className = DomainMethodArgumentFactory.getClass.getPackage.getName + "." + domain + "$" + method
    val maybeClasses = lookupClasses(className)
    val caseObjectClass = maybeClasses.caseObject.getOrElse(throw rejection(domain, method, "the domain and/or method are unknown"))

    if (hasParams) {
      // Create a case class instance
      maybeClasses.caseClass match {
        case Some(caseClass) =>
          ObjectMapping.fromMap(params, caseClass).asInstanceOf[AnyRef]
        case None =>
          // There are arguments, so we ended up here in the case class branch. But the method refers to a case object.
          throw rejection(domain, method, "there are arguments")
      }
    } else {
      // Return case class or case object instance.
      maybeClasses.caseClass match {
        case Some(caseClass) =>
          ObjectMapping.fromMap(Map.empty, caseClass).asInstanceOf[AnyRef]
        case None =>
          // Creating a new instance of the case class isn't stable, since whether or not we get the correct instance
          // depends on if the case object has been initialized. Instead, we read the value of the MODULE$ field.
          val field = caseObjectClass.getField("MODULE$")
          field.get(null)
      }
    }
  }

  private def rejection(domain: String, method: String, reason: String): Exception =
    new IllegalArgumentException(s"Cannot create domain method $domain.$method because $reason")

  private def lookupClasses(className: String): Classes =
    Classes(lookupClass(className), lookupClass(className + "$"))

  private def lookupClass(name: String): Option[Class[_]] = Try(Class.forName(name)) match {
    case Success(clazz) => Some(clazz)
    case Failure(t) => None
  }

  case class Classes(caseClass: Option[Class[_]], caseObject: Option[Class[_]])
} 
Example 162
Source File: ValueNodeExtractor.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.host

import com.programmaticallyspeaking.ncd.host.types.{ObjectPropertyDescriptor, Undefined}

import scala.collection.mutable.ArrayBuffer
import scala.util.{Failure, Success, Try}

trait ObjectInteraction {
  def getOwnProperties(objectId: ObjectId): Seq[(String, ObjectPropertyDescriptor)]

  def invokePropertyGetter(objectId: ObjectId, getter: FunctionNode): Try[ValueNode]
}

class ScriptHostBasedObjectInteraction(scriptHost: ScriptHost) extends ObjectInteraction {
  override def getOwnProperties(objectId: ObjectId): Seq[(String, ObjectPropertyDescriptor)] =
    scriptHost.getObjectProperties(objectId, onlyOwn = true, onlyAccessors = false)

  override def invokePropertyGetter(objectId: ObjectId, getter: FunctionNode): Try[ValueNode] = {
    scriptHost.callFunctionOn(StackFrame.TopId, None, "function (g, o) { return g.call(o); }", Seq(getter.objectId, objectId))
  }
}

class ValueNodeExtractor(objectInteraction: ObjectInteraction) {
  import com.programmaticallyspeaking.ncd.infra.StringUtils._

  def extract(v: ValueNode): Any = extract(v, Set.empty)

  private def propertyList(oid: ObjectId, observedObjectIds: Set[ObjectId]): Seq[(String, Any)] = {
    val props = objectInteraction.getOwnProperties(oid)
    props.map { e =>
      val propName = e._1
      e._2.value match {
        case Some(vn) =>
          propName -> extract(vn, observedObjectIds + oid)
        case None =>
          propName -> (e._2.getter match {
            case Some(g: FunctionNode) =>
              objectInteraction.invokePropertyGetter(oid, g) match {
                case Success(vn) => extract(vn, observedObjectIds + oid)
                case Failure(t) => s"<Error calling getter for property '$propName' of object '${oid.id}': ${t.getMessage}>" // TODO!! Handle better!?
              }
            case _ =>
              s"<Error: Unrecognized property '$propName' of object '${oid.id}'>" //TODO: What here?
          })
      }
    }
  }

  private def extract(v: ValueNode, observedObjectIds: Set[ObjectId]): Any = v match {
    case SimpleValue(value) if value == Undefined => null
    case SimpleValue(value) => value
    case lzy: LazyNode => extract(lzy.resolve(), observedObjectIds)
    case ArrayNode(_, _, oid) if observedObjectIds.contains(oid) => s"<Error: cycle detected for array '${oid.id}'>"
    case ArrayNode(size, _, oid) =>
      val propList = propertyList(oid, observedObjectIds)
      val array = new Array[Any](size)
      propList.foreach {
        case (UnsignedIntString(idx), value) if idx < size => array(idx) = value
        case _ =>
      }
      array
    case ObjectNode(_, oid) if observedObjectIds.contains(oid) => s"<Error: cycle detected for object '${oid.id}'>"
    case ObjectNode(_, oid) =>
      // Unsure how Chrome does it when there is a Symbol and a string key with the same string rep...
      propertyList(oid, observedObjectIds).toMap
    case EmptyNode => null
    case DateNode(stringRep, _) => stringRep
    case FunctionNode(name, _, _) => s"<function $name() {}>"
    case ErrorValue(data, _, _, _) => s"<${data.name}: ${data.message}>"
    // Don't know why I don't get a pattern match warning even though ValueNode is sealed. Is it because
    // LazyNode isn't sealed?
  }
} 
Example 163
Source File: EvaluateTestFixture.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.nashorn

import com.programmaticallyspeaking.ncd.host._
import com.programmaticallyspeaking.ncd.messaging.Observer
import com.programmaticallyspeaking.ncd.testing.UnitTest
import org.scalatest.concurrent.{IntegrationPatience, ScalaFutures}

import scala.collection.mutable
import scala.concurrent.{ExecutionContext, Promise}
import scala.util.{Failure, Success, Try}

class EvaluateTestFixture extends UnitTest with NashornScriptHostTestFixture with ScalaFutures with IntegrationPatience {

  override implicit val executionContext: ExecutionContext = ExecutionContext.global

  type Tester = (ScriptHost, Seq[StackFrame]) => Unit

  protected def evaluateInScript(script: String, unknownEventHandler: (ScriptEvent) => Unit = _ => {})(testers: Tester*): Unit = {
    assert(script.contains("debugger;"), "Script must contain a 'debugger' statement")
    assert(testers.nonEmpty, "Must have at least one tester")
    val testerQueue = mutable.Queue[Tester](testers: _*)
    val donePromise = Promise[Unit]()
    val observer = Observer.from[ScriptEvent] {
      case bp: HitBreakpoint =>
        val host = getHost
        val next = testerQueue.dequeue()
        Try(next(host, bp.stackFrames)) match {
          case Success(_) =>
            host.resume()
            if (testerQueue.isEmpty) donePromise.success(())

          case Failure(t) =>
            donePromise.failure(t)
        }
      case x => unknownEventHandler(x)
    }
    observeAndRunScriptAsync(script, observer)(_ => donePromise.future)
  }
} 
Example 164
Source File: BreakpointTestFixture.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.nashorn

import com.programmaticallyspeaking.ncd.host._
import com.programmaticallyspeaking.ncd.messaging.Observer
import com.programmaticallyspeaking.ncd.testing.UnitTest

import scala.collection.mutable
import scala.concurrent.{ExecutionContext, Future, Promise}
import scala.util.{Failure, Success, Try}

class BreakpointTestFixture extends UnitTest with NashornScriptHostTestFixture {

  override implicit val executionContext: ExecutionContext = ExecutionContext.global

  type Tester = (ScriptHost, HitBreakpoint) => Any

  protected def waitForBreakpoints(script: String, hostSetup: (NashornScriptHost) => Unit = (_) => {})(testers: Tester*): Unit = {
    assert(script.contains("debugger;"), "Script must contain a 'debugger' statement")
    assert(testers.nonEmpty, "Must have at least one tester")
    val testerQueue = mutable.Queue[Tester](testers: _*)
    val donePromise = Promise[Unit]()
    val observer = Observer.from[ScriptEvent] {
      case bp: HitBreakpoint =>
        val host = getHost
        val next = testerQueue.dequeue()
        val isDone = testerQueue.isEmpty
        Try(next(host, bp)) match {
          case Success(f: Future[_]) =>
            f.onComplete {
              case Success(_) =>
                host.resume()
                if (isDone) donePromise.success(())

              case Failure(t) =>
//                host.resume()
              donePromise.tryFailure(t)
            }

          case Success(_) =>
            host.resume()
//            if (testerQueue.isEmpty) donePromise.success(())
            if (isDone) donePromise.success(())

          case Failure(t) =>
            donePromise.failure(t)
        }
    }
    observeAndRunScriptAsync(script, observer, hostSetup)(_ => donePromise.future)
  }

  protected def waitForBreakpoint(script: String, hostSetup: (NashornScriptHost) => Unit = (_) => {})(tester: Tester): Unit = {
    waitForBreakpoints(script, hostSetup)(tester)
  }

  protected def waitForBreakpointThenEvent(script: String, hostSetup: (NashornScriptHost) => Unit = (_) => {})
                                          (tester: (ScriptHost, HitBreakpoint) => Unit)
                                          (eventHandler: PartialFunction[ScriptEvent, Unit]): Unit = {
    assert(script.contains("debugger;"), "Script must contain a 'debugger' statement")
    val eventPromise = Promise[Unit]()
    val stackframesPromise = Promise[HitBreakpoint]()
    val observer = Observer.from[ScriptEvent] {
      case bp: HitBreakpoint => stackframesPromise.trySuccess(bp)
      case other =>
        if (eventHandler.isDefinedAt(other)) {
          eventPromise.complete(Try(eventHandler.apply(other)))
        }
    }
    observeAndRunScriptAsync(script, observer, hostSetup) { host =>
      stackframesPromise.future.flatMap(bp => {
        try tester(host, bp) finally host.resume()
        eventPromise.future
      })
    }
  }

  protected def waitForEvent(script: String, hostSetup: (NashornScriptHost) => Unit = (_) => {})(tester: PartialFunction[ScriptEvent, Unit]): Unit = {
    val eventPromise = Promise[Unit]()
    val observer = Observer.from[ScriptEvent] {
      case ev =>
        if (tester.isDefinedAt(ev)) {
          eventPromise.complete(Try(tester.apply(ev)))
        }

    }
    observeAndRunScriptAsync(script, observer, hostSetup) { host =>
      eventPromise.future
    }
  }
} 
Example 165
Source File: CallFunctionOnTest.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.nashorn

import com.programmaticallyspeaking.ncd.host._
import com.programmaticallyspeaking.ncd.host.types.{ExceptionData, Undefined}
import org.scalatest.prop.TableDrivenPropertyChecks

import scala.concurrent.Promise
import scala.util.{Failure, Success, Try}

class CallFunctionOnTest extends EvaluateTestFixture with TableDrivenPropertyChecks {

  private def testSuccess[A](tr: Try[A])(tester: (A) => Unit): Unit = tr match {
    case Success(value) => tester(value)
    case Failure(t) => fail(t)
  }

  def testObjectValue(script: String, objName: String)(f: ObjectId => Unit): Unit = {
    evaluateInScript(script)({ (host, stackframes) =>
      host.evaluateOnStackFrame(stackframes.head.id, objName) match {
        case Success(cn: ComplexNode) => f(cn.objectId)
        case Success(other) => fail("Unexpected evaluate result: " + other)
        case Failure(t) => fail("Error", t)
      }
    })
  }

  def testObjectValue(f: ObjectId => Unit): Unit = {
    val script =
      """
        |function fun() {
        |  var obj = { value: 42 };
        |  debugger;
        |  obj.toString();
        |}
        |fun();
      """.stripMargin
    testObjectValue(script, "obj")(f)
  }

  "callFunctionOn" - {

    "works for access to 'this'" in {
      val funcDecl = "function () { return this.value; }"
      testObjectValue { objId =>
        val retVal = getHost.callFunctionOn(StackFrame.TopId, Some(objId), funcDecl, Seq.empty)

        retVal should be(Success(SimpleValue(42)))
      }
    }

    "works with argument" in {
      val funcDecl = "function (x) { return x.value; }"
      testObjectValue { objId =>
        val retVal = getHost.callFunctionOn(StackFrame.TopId, None, funcDecl, Seq(objId))

        retVal should be(Success(SimpleValue(42)))
      }
    }

    "can access Object in a strict mode function" in {
      val script =
        """
          |function fun() {
          |  'use strict';
          |  var obj = { value: 99 };
          |  debugger;
          |  obj.toString();
          |}
          |fun();
        """.stripMargin
      testObjectValue(script, "obj") { objId =>
        getHost.callFunctionOn(StackFrame.TopId, None, "function (x) { return Object.getOwnPropertyNames(x); }", Seq(objId)) match {
          case Success(an: ArrayNode) =>
            an.size should be (1)
          case Success(other) => fail("Unexpected callFunctionOn result: " + other)
          case Failure(t) => fail("Error", t)
        }
      }
    }
  }
} 
Example 166
Source File: RunningJava9.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.nashorn.java9

import com.programmaticallyspeaking.ncd.nashorn.NashornScriptHostTestFixture

import scala.concurrent.duration._
import scala.util.Try

object RunningJava9 {
  def java9Home: Option[String] = {
    if (isJava9) None
    else Option(System.getenv("JAVA9_HOME")).orElse(Some("c:\\Program files\\Java\\jdk-9"))
  }

  private lazy val isJava9 = {
    Try(Class.forName("jdk.nashorn.internal.runtime.Symbol")).isSuccess
  }
}

trait RunningJava9 { self: NashornScriptHostTestFixture =>
  // Java 9 on Travis is slow to start, increase the timeout a bit
  override val runVMTimeout: FiniteDuration = 16.seconds
  protected override def javaHome: Option[String] = RunningJava9.java9Home
} 
Example 167
Source File: MapBasedObjectInteraction.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.testing

import com.programmaticallyspeaking.ncd.host._
import com.programmaticallyspeaking.ncd.host.types.{ObjectPropertyDescriptor, PropertyDescriptorType}

import scala.util.{Failure, Try}

class MapBasedObjectInteraction(data: Map[ObjectId, Map[String, ValueNode]]) extends ObjectInteraction {

  override def getOwnProperties(objectId: ObjectId): Seq[(String, ObjectPropertyDescriptor)] = data.get(objectId) match  {
    case Some(objectData) =>
      objectData.map { e =>
        e._1 -> ObjectPropertyDescriptor(PropertyDescriptorType.Data, false, true, false, true, Some(e._2), None, None)
      }.toSeq
    case None => throw new IllegalArgumentException("Unknown object ID: " + objectId.id)
  }

  override def invokePropertyGetter(objectId: ObjectId, getter: FunctionNode): Try[ValueNode] =
    Failure(new UnsupportedOperationException("MapBasedObjectInteraction doesn't support function invocation"))
} 
Example 168
Source File: Fakes.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.testing

import java.io.File
import java.net.URL

import com.programmaticallyspeaking.ncd.chrome.net.FilePublisher
import com.programmaticallyspeaking.ncd.host._
import com.programmaticallyspeaking.ncd.messaging.{Observable, SerializedSubject}

import scala.concurrent.Future
import scala.concurrent.duration.FiniteDuration
import scala.util.Try

object FakeFilePublisher extends FilePublisher {
  override def publish(file: File): URL = new URL("http://localhost/no/such/file")
}

object FakeScriptHost extends ScriptHost {
  val eventSubject = new SerializedSubject[ScriptEvent]

  override def evaluateOnStackFrame(stackFrameId: String, expression: String): Try[ValueNode] = Try(notImpl)
  override def removeBreakpointById(id: String): Unit = notImpl
  override def resume(): Unit = notImpl
  override def reset(): Unit = notImpl
  override def findScript(id: ScriptIdentity): Option[Script] = None
  override def events: Observable[ScriptEvent] = eventSubject
  override def scripts: Seq[Script] = Seq.empty
  override def setBreakpoint(id: ScriptIdentity, location: ScriptLocation, options: BreakpointOptions): Breakpoint = notImpl
  override def getBreakpointLocations(id: ScriptIdentity, from: ScriptLocation, to: Option[ScriptLocation]): Seq[ScriptLocation] = Seq.empty
  override def step(stepType: StepType): Unit = notImpl
  override def pauseOnBreakpoints(): Unit = notImpl
  override def ignoreBreakpoints(): Unit = notImpl
  override def getObjectProperties(objectId: ObjectId, onlyOwn: Boolean, onlyAccessors: Boolean): Seq[(String, types.ObjectPropertyDescriptor)] = Seq.empty
  override def pauseOnExceptions(pauseType: ExceptionPauseType): Unit = notImpl
  
  private def notImpl: Nothing = throw new UnsupportedOperationException("FakeScriptHost is not complete")

  override def restartStackFrame(stackFrameId: String): Seq[StackFrame] = notImpl

  override def startProfiling(samplingInterval: FiniteDuration): Unit = notImpl
  override def stopProfiling(): ProfilingData = notImpl

  override def pauseAtNextStatement(): Unit = notImpl

  override def setSkipAllPauses(skip: Boolean): Unit = notImpl

  override def compileScript(script: String, url: String, persist: Boolean): Future[Option[Script]] = Future.failed(notImpl)

  override def runCompiledScript(scriptId: String): Try[ValueNode] = notImpl

  override def warnings: Seq[String] = Seq.empty

  override def callFunctionOn(stackFrameId: String, thisObject: Option[ObjectId], functionDeclaration: String, arguments: Seq[ObjectId]): Try[ValueNode] = Try(notImpl)
} 
Example 169
Source File: MiniDatabase.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.minidatabase

import scala.io.Source
import scala.util.{Failure, Success, Try}


object MiniDatabase {
  def load(wo: Option[String]): List[Entry] = {
    val sy = Try(wo match {
      case Some(w) => Source.fromFile(w)
      case None => Source.fromResource("minidatabase.csv")
    })
    sy match {
      case Success(s) =>
        println(s)
        val result = s.getLines.toList.map(e => Entry(e.split(",")))
        s.close()
        result
      case Failure(x) =>
        System.err.println(x.getLocalizedMessage)
        Nil
    }
  }

  def measure(height: Height): String = height match {
    case Height(8, _) => "giant"
    case Height(7, _) => "very tall"
    case Height(6, _) => "tall"
    case Height(5, _) => "normal"
    case Height(_, _) => "short"
  }

  def main(args: Array[String]): Unit = {
    val wo = args.headOption
      val db = load(wo)
      print(db)
  }
}

case class Entry(name: Name, social: Social, dob: Date, height: Height, weight: Int)

case class Height(feet: Int, in: Int) {
  def inches: Int = feet * 12 + in
}

object Entry {
  def apply(name: String, social: String, dob: String, height: String, weight: String): Entry =
    Entry(Name(name), Social(social), Date(dob), Height(height), weight.toInt)

  def apply(entry: Seq[String]): Entry = apply(entry.head, entry(1), entry(2), entry(3), entry(4))
}

object Height {
  private val rHeightFtIn = """^\s*(\d+)\s*(?:ft|\')(\s*(\d+)\s*(?:in|\"))?\s*$""".r
  private val rHeightFt = """^\s*(\d+)\s*(?:ft|\')$""".r

  def apply(ft: String, in: String) = new Height(ft.toInt, in.toInt)

  def apply(ft: Int) = new Height(ft, 0)

  def apply(height: String): Height = height match {
    case rHeightFt(ft) => Height(ft.toInt)
    case rHeightFtIn(ft, _, in) => Height(ft, in)
    case _ => throw new IllegalArgumentException(height)
  }
}

case class Name(first: String, middle: String, last: String)

case class Social(area: Int, group: Int, serial: Int)

case class Date(year: Int, month: Int, day: Int)

object Name {
  private val rName3 = """^(\w+)\s+(\w.*)\s+(\w+)$""".r
  private val rName2 = """^(\w+)\s+(\w+)$""".r
  private val rName1 = """^(\w+)$""".r

  def apply(name: String): Name = ???
}

object Date {
  private val rDate1 = """^(\w+)\s+(\d+)\w\w\s(\d{4})$""".r
  private val rDate2 = """^(\d+)\/(\d+)\/(\d+)$""".r

  def apply(year: String, month: String, day: String): Date = ???

  def apply(date: String): Date = ???
}

object Social {
  private val rSsn = """^(\d{3})\-(\d{2})\-(\d{4})$""".r

  def apply(ssn: String): Social = ???
} 
Example 170
Source File: ParseCSVwithHTML.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.parse

import java.io.{BufferedWriter, File, FileWriter}

import scala.collection.mutable
import scala.io.{BufferedSource, Source}
import scala.util.{Failure, Success, Try}


object ParseCSVwithHTML extends App {
  val parser = ParseCSVwithHTML(CsvParser(delimiter = '\t' + ""))
  val title = "Report"
  if (args.length > 0) {
    val filename = args.head
    val source: BufferedSource = Source.fromFile(filename, "UTF-16")
    val w = parser.parseStreamIntoHTMLTable(source.getLines.toStream, title)
    val file = new File("output.html")
    val bw = new BufferedWriter(new FileWriter(file))
    bw.write(w)
    bw.close()
    println(s"Successfully written $file")
  }
  else
    System.err.println("syntax: ParseCSVwithHTML filename")

} 
Example 171
Source File: StockReader.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.cache

import edu.neu.coe.csye7200.MonadOps

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.io.Source
import scala.util.Try

object StockReader {

  def getPrice(symbol: String)(implicit resource: String): Future[Double] = MonadOps.flatten(Future(readPrice(symbol)))

  private def readPrice(symbol: String)(implicit resource: String): Try[Double] = {
    def readMatchingPrices = {
      val ws = Source.fromInputStream(getClass.getResourceAsStream(resource)).getLines
      val was = for (w <- ws.toSeq) yield w.split("""\s+""")
      for (wa <- was; if wa.length > 1; x = wa.head; if x == symbol) yield wa.last
    }

    val wy = MonadOps.flatten(Try(readMatchingPrices.headOption), new Exception(s"no entry matching $symbol"))
    for (w <- wy; x <- Try(w.toDouble)) yield x
  }
} 
Example 172
Source File: RenderableSpec.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200

import org.scalatest.{FlatSpec, Inside, Matchers}

import scala.util.Try

case class Scalar(s: String) extends Renderable {
  def render: String = s.toUpperCase()
}


class RenderableSpec extends FlatSpec with Matchers with Inside {
  behavior of "Renderable"
  it should "render simple values like toString" in {
    Scalar("x").render shouldBe "X"
  }
  it should "render list values with indentation" in {
    val list = Seq(Scalar("x"), Scalar("y"), Scalar("z"))
    list.render shouldBe "(\nX,\nY,\nZ\n)"
  }
  it should "render list values with double indentation" in {
    val list = Seq(Seq(Scalar("x0"), Scalar("x1")), Seq(Scalar("y0"), Scalar("y1")), Seq(Scalar("z0"), Scalar("z1")))
    list.render shouldBe "(\n(\nX0,\nX1\n),\n(\nY0,\nY1\n),\n(\nZ0,\nZ1\n)\n)"
  }
  it should "render option values" in {
    val xo = Option(Scalar("x"))
    xo.render shouldBe "Some(X)"
  }
  it should "render try values" in {
    val xy = Try(Scalar("x"))
    xy.render shouldBe "Success(X)"
  }
  it should "render either values" in {
    val e = Left(Scalar("x"))
    e.render shouldBe "Left(X)"
  }
} 
Example 173
Source File: CacheSpec.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.cache

import java.net.URL

import org.scalatest.concurrent.{Futures, ScalaFutures}
import org.scalatest.{FlatSpec, Matchers}

import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{Random, Try}

class CacheSpec extends FlatSpec with Matchers with Futures with ScalaFutures {

  behavior of "apply"

  val random = Random

  def lookupStock(k: String): Future[Double] = Future {
    random.setSeed(k.hashCode)
    random.nextInt(1000) / 100.0
  }

  it should "work" in {
    val cache = MyCache[String,Double](lookupStock)
    val xf: Future[Double] = cache("MSFT")
    whenReady(xf) { u => u should matchPattern { case x: Double =>  } }
    xf.value.get.get shouldBe 3.64
  }
} 
Example 174
Source File: NumberPredicate.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.hedge_fund.rules

import scala.util.Try
import scala.util._

import scala.util.Failure


case class NumberPredicate(variable: String, operator: Operator[Double], value: Double) extends Predicate {

  def apply(candidate: Candidate): Either[Throwable, Boolean] = candidate(variable) match {
    case Some(x) => Try { operate(x, operator, value) } match {
      case Success(v) => Right(v)
      case Failure(f) => Left(f)
    }
    case _ => Left(new Exception(s"variable $variable not found in $candidate"))
  }

  // CONSIDER Moving this into Operator class
  def operate(x: Any, operator: Operator[Double], value: Double): Boolean = {
    x match {
      case y: Double => operator(y, value)
      case y: Int => operator(y, value)
      case y: String => operator(y.toDouble, value)
      case _ => throw new Exception(s"variable $variable cannot be for operator $operator")
    }
  }
}

object NumberPredicate {
  def apply(variable: String, operator: String, value: Double): NumberPredicate =
    new NumberPredicate(variable, Operator.createNumeric(operator), value)
  def apply(variable: String, operator: Operator[Double], value: String): NumberPredicate =
    new NumberPredicate(variable, operator, value.toDouble)
  def apply(variable: String, operator: String, value: String): NumberPredicate =
    apply(variable, Operator.createNumeric(operator), value)
  def apply(predicate: String): NumberPredicate = {
    val rPredicate = """^\s*(\w+)\s*([=<>]{1,2})\s*(-?[0-9]+\.?[0-9]*)\s*$""".r
    predicate match {
      case rPredicate(v, o, n) => apply(v, o, n)
      case _ => throw new Exception(s"predicate: $predicate is malformed")
    }
  }
} 
Example 175
Source File: StringPredicate.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.hedge_fund.rules

import scala.util.Try
import scala.util._

import scala.util.Failure


case class StringPredicate(variable: String, operator: Operator[String], value: String) extends Predicate {

  def apply(candidate: Candidate): Either[Throwable, Boolean] = candidate(variable) match {
    case Some(x) => Try { operator(x.toString, value) } match {
      case Success(v) => Right(v)
      case Failure(f) => Left(f)
    }
    case _ => Left(new Exception(s"variable $variable not found in $candidate"))
  }
}

object StringPredicate {
  def apply(variable: String, operator: String, value: String): StringPredicate =
    new StringPredicate(variable, Operator.createText(operator), value)
} 
Example 176
Source File: ExpressionParserNumeric.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.parse

import edu.neu.coe.csye7200.numerics.Rational
import scala.util.Try
import edu.neu.coe.csye7200.numerics.Fuzzy


abstract class ExpressionParserNumeric[T : Numeric](implicit num: Numeric[T]) extends ExpressionParser[T] { self =>

  def div: (T,T)=>T = if (num.isInstanceOf[Fractional[T]]) (num.asInstanceOf[Fractional[T]]).div else throw new IllegalArgumentException("div method unavailable")
  def one: T = num.one
  def zero: T = num.zero
  def negate: (T)=>T = num.negate
  def plus: (T,T)=>T = num.plus
  def times: (T,T)=>T = num.times
}

object DoubleExpressionParser extends ExpressionParserNumeric[Double] {
  def apply(s: String): Try[Double] = Try(s.toDouble)
}
object IntExpressionParser extends ExpressionParserNumeric[Int] {
  def apply(s: String): Try[Int] = Try(s.toInt)
}
object RationalExpressionParser extends ExpressionParserNumeric[Rational] {
  def apply(s: String): Try[Rational] = Try(Rational.apply(s))
}
//object FuzzyExpressionParser extends ExpressionParserNumeric[Fuzzy] {
//  def apply(s: String): Try[Fuzzy] = Try(Fuzzy.apply(s))
//} 
Example 177
Source File: ExpressionParser.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.parse

import scala.util.parsing.combinator._
import scala.util.Try


abstract class ExpressionParser[T] extends JavaTokenParsers with (String => Try[T]) { self =>

  def apply(s: String): Try[T]
  def div: (T,T)=>T
  def negate: (T)=>T
  def plus: (T,T)=>T
  def times: (T,T)=>T
  def one: T
  def zero: T
  
  trait Expression {
    def value: Try[T]
  }

  def lift(t: Try[T])(f: (T) => T): Try[T] = t map f
  def map2(t1: Try[T], t2: Try[T])(f: (T,T) => T): Try[T] = for { tt1 <- t1 ; tt2 <- t2 } yield f(tt1,tt2)
  
  abstract class Factor extends Expression
  case class Expr(t: Term, ts: List[String~Term]) extends Expression {
    def termVal(t: String~Term): Try[T] = t match {case "+"~x => x.value; case "-"~x => lift(x.value)(negate); case z~_ => scala.util.Failure(ParseException(s"Expr: operator $z is not supported")) }
    def value: Try[T] = ts.foldLeft(t.value)((a, x) => map2(a,termVal(x))(plus))
  }
  case class Term(f: Factor, fs: List[String~Factor]) extends Expression {
    def factorVal(t: String~Factor): Try[T] = t match {case "*"~x => x.value; case "/"~x => map2(Try(one),x.value)(div); case z~_ => scala.util.Failure(ParseException(s"Term: operator $z is not supported")) }
    def value = fs.foldLeft(f.value)((a,x)=>map2(a,factorVal(x))(times))
  }
  case class FloatingPoint(x: String) extends Factor {
    def value = self.apply(x)
  }
  case class Parentheses(e: Expr) extends Factor {
    def value: Try[T] = e.value
  }
  
  def expr: Parser[Expr] = term~rep("+"~term | "-"~term | failure("expr")) ^^ { case t~r => r match {case x: List[String~Term] => Expr(t,x)}}
  def term: Parser[Term] = factor~rep("*"~factor | "/"~factor | failure("term")) ^^ { case f~r => r match {case x: List[String~Factor] => Term(f,x)}} 
  def factor: Parser[Factor] = (floatingPointNumber | "("~expr~")" | failure("factor")) ^^ { case "("~e~")" => e match {case x: Expr => Parentheses(x)}; case s: String => FloatingPoint(s) }
}

case class ParseException(s: String) extends Exception(s"Parse exception: $s") 
Example 178
Source File: CacheSpec.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.cache

import java.net.URL

import org.scalatest.concurrent.{Futures, ScalaFutures}
import org.scalatest.{FlatSpec, Matchers}

import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{Random, Try}

class CacheSpec extends FlatSpec with Matchers with Futures with ScalaFutures {

  behavior of "apply"

  val random = Random

  def lookupStock(k: String): Future[Double] = Future {
    random.setSeed(k.hashCode)
    random.nextInt(1000) / 100.0
  }

  it should "work" in {
    val cache = MyCache[String,Double](lookupStock)
    val xf: Future[Double] = cache("MSFT")
    whenReady(xf) { u => u should matchPattern { case x: Double =>  } }
    xf.value.get.get shouldBe 3.64
  }
} 
Example 179
Source File: ServingUDFs.scala    From mmlspark   with MIT License 5 votes vote down vote up
// Copyright (C) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in project root for information.

package org.apache.spark.sql.execution.streaming

import com.microsoft.ml.spark.io.http.HTTPResponseData
import com.microsoft.ml.spark.io.http.HTTPSchema.{binary_to_response, empty_response, string_to_response}
import org.apache.spark.sql.execution.streaming.continuous.HTTPSourceStateHolder
import org.apache.spark.sql.expressions.UserDefinedFunction
import org.apache.spark.sql.functions.{lit, struct, to_json, udf}
import org.apache.spark.sql.types._
import org.apache.spark.sql.{Column, Row}

import scala.util.Try

object ServingUDFs {

  private def jsonReply(c: Column) = string_to_response(to_json(c))

  def makeReplyUDF(data: Column, dt: DataType, code: Column = lit(200), reason: Column = lit("Success")): Column = {
    dt match {
      case NullType => empty_response(code, reason)
      case StringType => string_to_response(data, code, reason)
      case BinaryType => binary_to_response(data)
      case _: StructType => jsonReply(data)
      case _: MapType => jsonReply(data)
      case at: ArrayType => at.elementType match {
        case _: StructType => jsonReply(data)
        case _: MapType => jsonReply(data)
        case _ => jsonReply(struct(data))
      }
      case _ => jsonReply(struct(data))
    }
  }

  private def sendReplyHelper(mapper: Row => HTTPResponseData)(serviceName: String, reply: Row, id: Row): Boolean = {
    if (Option(reply).isEmpty || Option(id).isEmpty) {
      null.asInstanceOf[Boolean] //scalastyle:ignore null
    } else {
      Try(HTTPSourceStateHolder.getServer(serviceName).replyTo(id.getString(0), id.getString(1), mapper(reply)))
        .toOption.isDefined
    }
  }

  def sendReplyUDF: UserDefinedFunction = {
    val toData = HTTPResponseData.makeFromRowConverter
    udf(sendReplyHelper(toData) _, BooleanType)
  }

} 
Example 180
Source File: RESTHelpers.scala    From mmlspark   with MIT License 5 votes vote down vote up
// Copyright (C) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in project root for information.

package com.microsoft.ml.spark.cognitive

import org.apache.commons.io.IOUtils
import org.apache.http.client.config.RequestConfig
import org.apache.http.client.methods._
import org.apache.http.impl.client.{CloseableHttpClient, HttpClientBuilder}
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager

import scala.concurrent.blocking
import scala.util.Try

object RESTHelpers {
  lazy val RequestTimeout = 60000

  lazy val RequestConfigVal: RequestConfig = RequestConfig.custom()
    .setConnectTimeout(RequestTimeout)
    .setConnectionRequestTimeout(RequestTimeout)
    .setSocketTimeout(RequestTimeout)
    .build()

  lazy val ConnectionManager = {
    val cm = new PoolingHttpClientConnectionManager()
    cm.setDefaultMaxPerRoute(Int.MaxValue)
    cm.setMaxTotal(Int.MaxValue)
    cm
  }

  lazy val Client: CloseableHttpClient = HttpClientBuilder
    .create().setConnectionManager(ConnectionManager)
    .setDefaultRequestConfig(RequestConfigVal).build()

  def retry[T](backoffs: List[Int], f: () => T): T = {
    try {
      f()
    } catch {
      case t: Throwable =>
        val waitTime = backoffs.headOption.getOrElse(throw t)
        println(s"Caught error: $t with message ${t.getMessage}, waiting for $waitTime")
        blocking {Thread.sleep(waitTime.toLong)}
        retry(backoffs.tail, f)
    }
  }

  //TODO use this elsewhere
  def safeSend(request: HttpRequestBase,
               backoffs: List[Int] = List(100, 500, 1000),
               expectedCodes: Set[Int] = Set(),
               close: Boolean = true): CloseableHttpResponse = {

    retry(List(100, 500, 1000), { () =>
      val response = Client.execute(request)
      try {
        if (response.getStatusLine.getStatusCode.toString.startsWith("2") ||
          expectedCodes(response.getStatusLine.getStatusCode)
        ) {
          response
        } else {
          val requestBodyOpt = Try(request match {
            case er: HttpEntityEnclosingRequestBase => IOUtils.toString(er.getEntity.getContent)
            case _ => ""
          }).get

          val responseBodyOpt = Try(IOUtils.toString(response.getEntity.getContent)).getOrElse("")

          throw new RuntimeException(
            s"Failed: " +
              s"\n\t response: $response " +
              s"\n\t requestUrl: ${request.getURI}" +
              s"\n\t requestBody: $requestBodyOpt" +
              s"\n\t responseBody: $responseBodyOpt")
        }
      } catch {
        case e: Exception =>
          response.close()
          throw e
      } finally {
        if (close) {
          response.close()
        }
      }
    })
  }

} 
Example 181
Source File: AsyncUtils.scala    From mmlspark   with MIT License 5 votes vote down vote up
// Copyright (C) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in project root for information.

package com.microsoft.ml.spark.core.utils

import scala.concurrent.duration.Duration
import scala.concurrent.{Await, ExecutionContext, Future}
import scala.util.{Failure, Success, Try}

object AsyncUtils {
  def bufferedAwait[T](it: Iterator[Future[T]],
                                 concurrency: Int,
                                 timeout: Duration)
                                (implicit ec: ExecutionContext): Iterator[T] = {
    bufferedAwaitSafe(it, concurrency, timeout).map{
      case Success(data) => data
      case f: Failure[T] => throw f.exception
    }
  }

  private def safeAwait[T](f: Future[T], timeout: Duration): Try[T] = {
    Try(Await.result(f, timeout))
  }

  def bufferedAwaitSafe[T](it: Iterator[Future[T]],
                       concurrency: Int,
                       timeout: Duration)
                      (implicit ec: ExecutionContext): Iterator[Try[T]] = {
    if (concurrency > 1) {
      val slidingIterator = it.sliding(concurrency - 1).withPartial(true)
      // `hasNext` will auto start the nth future in the batch
      val (initIterator, tailIterator) = slidingIterator.span(_ => slidingIterator.hasNext)
      initIterator.map(futureBatch => safeAwait(futureBatch.head, timeout)) ++
        tailIterator.flatMap(lastBatch => lastBatch.map(safeAwait(_, timeout)))
    } else if (concurrency == 1) {
      it.map(f => safeAwait(f, timeout))
    } else {
      throw new IllegalArgumentException(
        s"Concurrency needs to be at least 1, got: $concurrency")
    }
  }

  private def safeAwaitWithContext[T, C](f: (Future[T], C), timeout: Duration): (Try[T], C) = {
    (Try(Await.result(f._1, timeout)), f._2)
  }

  def bufferedAwaitSafeWithContext[T,C](it: Iterator[(Future[T],C)],
                           concurrency: Int,
                           timeout: Duration)
                          (implicit ec: ExecutionContext): Iterator[(Try[T],C)] = {
    if (concurrency > 1) {
      val slidingIterator = it.sliding(concurrency - 1).withPartial(true)
      // `hasNext` will auto start the nth future in the batch
      val (initIterator, tailIterator) = slidingIterator.span(_ => slidingIterator.hasNext)
      initIterator.map(futureBatch => safeAwaitWithContext(futureBatch.head, timeout)) ++
        tailIterator.flatMap(lastBatch => lastBatch.map(safeAwaitWithContext(_, timeout)))
    } else if (concurrency == 1) {
      it.map(f => safeAwaitWithContext(f, timeout))
    } else {
      throw new IllegalArgumentException(
        s"Concurrency needs to be at least 1, got: $concurrency")
    }
  }
} 
Example 182
Source File: StreamUtilities.scala    From mmlspark   with MIT License 5 votes vote down vote up
// Copyright (C) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in project root for information.

package com.microsoft.ml.spark.core.env

import java.io.{ByteArrayOutputStream, InputStream}
import java.util.zip.ZipInputStream

import org.apache.commons.io.IOUtils

import scala.io.Source
import scala.util.Random

object StreamUtilities {

  import scala.util.{Failure, Success, Try}
  def usingMany[T <: AutoCloseable, U](disposable: Seq[T])(task: Seq[T] => U): Try[U] = {
    try {
      Success(task(disposable))
    } catch {
      case e: Exception => Failure(e)
    } finally {
      disposable.foreach(d => d.close())
    }
  }

  def using[T <: AutoCloseable, U](disposable: T)(task: T => U): Try[U] = {
    try {
      Success(task(disposable))
    } catch {
      case e: Exception => Failure(e)
    } finally {
      disposable.close()
    }
  }

  def usingSource[T <: Source, U](disposable: T)(task: T => U): Try[U] = {
    try {
      Success(task(disposable))
    } catch {
      case e: Exception => Failure(e)
    } finally {
      disposable.close()
    }
  }

  
  class ZipIterator(stream: InputStream, zipfile: String, random: Random, sampleRatio: Double = 1)
    extends Iterator[(String, Array[Byte])] {

    private val zipStream = new ZipInputStream(stream)

    private def getNext: Option[(String, Array[Byte])] = {
      var entry = zipStream.getNextEntry
      while (entry != null) {
        if (!entry.isDirectory && random.nextDouble < sampleRatio) {

          val filename = zipfile + java.io.File.separator + entry.getName

          //extracting all bytes of a given entry
          val byteStream = new ByteArrayOutputStream
          IOUtils.copy(zipStream, byteStream)
          val bytes = byteStream.toByteArray

          assert(bytes.length == entry.getSize,
            "incorrect number of bytes is read from zipstream: " + bytes.length + " instead of " + entry.getSize)

          return Some((filename, bytes))
        }
        entry = zipStream.getNextEntry
      }

      stream.close()
      None
    }

    private var nextValue = getNext

    def hasNext: Boolean = nextValue.isDefined

    def next: (String, Array[Byte]) = {
      val result = nextValue.get
      nextValue = getNext
      result
    }
  }

} 
Example 183
Source File: Service.scala    From reactive-microservices   with MIT License 5 votes vote down vote up
import akka.actor.ActorSystem
import com.restfb.types.User
import redis.RedisClient
import scala.concurrent.{ExecutionContext, Future}
import scala.util.Try

class Service(gateway: Gateway)(implicit actorSystem: ActorSystem, ec: ExecutionContext) extends Config {
  def register(authResponse: AuthResponse, tokenValueOption: Option[String]): Try[Future[Either[String, Identity]]] = {
    gateway.getFbUserDetails(authResponse.accessToken).map { user =>
      redis.exists(userToRedisKey(user)).flatMap {
        case true => Future.successful(Left(s"User with id ${user.getId} is already registered"))
        case false => acquireIdentity(tokenValueOption).flatMap {
          case Right(identity) => saveUserIdentityMapping(user, identity)
          case l => Future.successful(l)
        }
      }
    }
  }

  def login(authResponse: AuthResponse, tokenValueOption: Option[String]): Try[Future[Either[String, Token]]] = {
    gateway.getFbUserDetails(authResponse.accessToken).map { user =>
      getIdentityIdForUser(user).flatMap {
        case Some(identityId) => doLogin(identityId, tokenValueOption)
        case None => Future.successful(Left(s"User with id ${user.getId} is not registered"))
      }
    }
  }

  private def doLogin(identityId: Long, tokenValueOption: Option[String]): Future[Either[String, Token]] = {
    tokenValueOption match {
      case Some(tokenValue) => gateway.requestRelogin(tokenValue).map {
        case Some(token) => Right(token)
        case None => Left("Token expired or not found")
      }
      case None => gateway.requestLogin(identityId).map(Right(_))
    }
  }

  private def acquireIdentity(tokenValueOption: Option[String]): Future[Either[String, Identity]] = {
    tokenValueOption match {
      case Some(tokenValue) => gateway.requestToken(tokenValue).map(_.right.map(token => Identity(token.identityId)))
      case None => gateway.requestNewIdentity().map(Right(_))
    }
  }

  private def saveUserIdentityMapping(user: User, identity: Identity): Future[Either[String, Identity]] = {
    redis.setnx(userToRedisKey(user), identity.id).map {
      case true => Right(identity)
      case false => Left(s"User with id ${user.getId} is already registered")
    }
  }

  private def getIdentityIdForUser(user: User): Future[Option[Long]] = redis.get(userToRedisKey(user)).map(_.map(_.utf8String.toLong))

  private def userToRedisKey(user: User): String = s"auth-fb:id:${user.getId}"

  private val redis = RedisClient(host = redisHost, port = redisPort, password = Option(redisPassword), db = Option(redisDb))
} 
Example 184
Source File: Gateway.scala    From reactive-microservices   with MIT License 5 votes vote down vote up
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.client.RequestBuilding
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
import akka.http.scaladsl.model.StatusCodes._
import akka.http.scaladsl.model.{HttpRequest, HttpResponse}
import akka.http.scaladsl.unmarshalling.Unmarshal
import akka.stream.FlowMaterializer
import akka.stream.scaladsl.{Sink, Source}
import com.restfb.DefaultFacebookClient
import com.restfb.types.User
import java.io.IOException
import scala.concurrent.{blocking, ExecutionContext, Future}
import scala.util.Try

case class InternalLoginRequest(identityId: Long, authMethod: String = "fb")
case class InternalReloginRequest(tokenValue: String, authMethod: String = "fb")

class Gateway(implicit actorSystem: ActorSystem, materializer: FlowMaterializer, ec: ExecutionContext)
  extends JsonProtocols with Config {

  private val identityManagerConnectionFlow = Http().outgoingConnection(identityManagerHost, identityManagerPort)
  private val tokenManagerConnectionFlow = Http().outgoingConnection(tokenManagerHost, tokenManagerPort)

  private def requestIdentityManager(request: HttpRequest): Future[HttpResponse] = {
    Source.single(request).via(identityManagerConnectionFlow).runWith(Sink.head)
  }

  private def requestTokenManager(request: HttpRequest): Future[HttpResponse] = {
    Source.single(request).via(tokenManagerConnectionFlow).runWith(Sink.head)
  }

  def requestToken(tokenValue: String): Future[Either[String, Token]] = {
    requestTokenManager(RequestBuilding.Get(s"/tokens/$tokenValue")).flatMap { response =>
      response.status match {
        case Success(_) => Unmarshal(response.entity).to[Token].map(Right(_))
        case NotFound => Future.successful(Left("Token expired or not found"))
        case _ => Future.failed(new IOException(s"Token request failed with status ${response.status} and error ${response.entity}"))
      }
    }
  }

  def requestNewIdentity(): Future[Identity] = {
    requestIdentityManager(RequestBuilding.Post("/identities")).flatMap { response =>
      response.status match {
        case Success(_) => Unmarshal(response.entity).to[Identity]
        case _ => Future.failed(new IOException(s"Identity request failed with status ${response.status} and error ${response.entity}"))
      }
    }
  }

  def requestLogin(identityId: Long): Future[Token] = {
    val loginRequest = InternalLoginRequest(identityId)
    requestTokenManager(RequestBuilding.Post("/tokens", loginRequest)).flatMap { response =>
      response.status match {
        case Success(_) => Unmarshal(response.entity).to[Token]
        case _ => Future.failed(new IOException(s"Login request failed with status ${response.status} and error ${response.entity}"))
      }
    }
  }

  def requestRelogin(tokenValue: String): Future[Option[Token]] = {
    requestTokenManager(RequestBuilding.Patch("/tokens", InternalReloginRequest(tokenValue))).flatMap { response =>
      response.status match {
        case Success(_) => Unmarshal(response.entity).to[Token].map(Option(_))
        case NotFound => Future.successful(None)
        case _ => Future.failed(new IOException(s"Relogin request failed with status ${response.status} and error ${response.entity}"))
      }
    }
  }

  def getFbUserDetails(accessToken: String): Try[User] = {
    Try {
      blocking {
        val client = new DefaultFacebookClient(accessToken)
        client.fetchObject("me", classOf[User])
      }
    }
  }
} 
Example 185
Source File: TypeMappers.scala    From ticket-booking-aecor   with Apache License 2.0 5 votes vote down vote up
package ru.pavkin.booking.common.protobuf

import java.time.{ Duration, Instant }

import ru.pavkin.booking.common.models._
import scalapb.TypeMapper
import shapeless._

import scala.util.Try

trait AnyValTypeMapper {

  implicit def anyValTypeMapper[V, U](implicit ev: V <:< AnyVal,
                                      V: Unwrapped.Aux[V, U]): TypeMapper[U, V] = {
    val _ = ev
    TypeMapper[U, V](V.wrap)(V.unwrap)
  }

}

trait CaseClassTypeMapper {

  implicit def caseClassTypeMapper[A, B, Repr <: HList](
    implicit aGen: Generic.Aux[A, Repr],
    bGen: Generic.Aux[B, Repr]
  ): TypeMapper[A, B] =
    TypeMapper { x: A =>
      bGen.from(aGen.to(x))
    } { x =>
      aGen.from(bGen.to(x))
    }

}

trait BaseTypeMapper {

  implicit val bigDecimal: TypeMapper[String, BigDecimal] =
    TypeMapper[String, BigDecimal] { x =>
      val value = if (x.isEmpty) "0" else x
      BigDecimal(value)
    }(_.toString())

  implicit val instant: TypeMapper[Long, Instant] =
    TypeMapper[Long, Instant](Instant.ofEpochMilli)(_.toEpochMilli)

  implicit val instantOpt: TypeMapper[Long, Option[Instant]] =
    instant.map2(i => if (i.toEpochMilli == 0) None else Some(i))(
      _.getOrElse(Instant.ofEpochMilli(0))
    )

  implicit val duration: TypeMapper[String, java.time.Duration] =
    TypeMapper[String, Duration] { s =>
      Try(Duration.parse(s)).getOrElse(Duration.ZERO)
    } {
      _.toString
    }

}

trait TypeMapperInstances extends BaseTypeMapper with AnyValTypeMapper with CaseClassTypeMapper {

  implicit class TypeMapperOps[A <: Any](a: A) {
    def toCustom[B](implicit tm: TypeMapper[A, B]): B = tm.toCustom(a)
    def toBase[B](implicit tm: TypeMapper[B, A]): B = tm.toBase(a)
  }

}

object TypeMappers extends TypeMapperInstances {

  implicit val money: TypeMapper[String, Money] =
    bigDecimal.map2(Money(_))(_.amount)
} 
Example 186
Source File: Parser.scala    From uap-scala   with Do What The F*ck You Want To Public License 5 votes vote down vote up
package org.uaparser.scala

import java.io.InputStream
import java.util.{ List => JList, Map => JMap }
import org.uaparser.scala.Device.DeviceParser
import org.uaparser.scala.OS.OSParser
import org.uaparser.scala.UserAgent.UserAgentParser
import org.yaml.snakeyaml.Yaml
import org.yaml.snakeyaml.constructor.SafeConstructor
import scala.collection.JavaConverters._
import scala.util.Try

case class Parser(userAgentParser: UserAgentParser, osParser: OSParser, deviceParser: DeviceParser)
    extends UserAgentStringParser {
  def parse(agent: String): Client =
    Client(userAgentParser.parse(agent), osParser.parse(agent), deviceParser.parse(agent))
}

object Parser {
  def fromInputStream(source: InputStream): Try[Parser] = Try {
    val yaml = new Yaml(new SafeConstructor)
    val javaConfig = yaml.load(source).asInstanceOf[JMap[String, JList[JMap[String, String]]]]
    val config = javaConfig.asScala.toMap.mapValues(_.asScala.toList.map(_.asScala.toMap.filterNot {
      case (_ , value) => value eq null
    }))
    val userAgentParser = UserAgentParser.fromList(config.getOrElse("user_agent_parsers", Nil))
    val osParser = OSParser.fromList(config.getOrElse("os_parsers", Nil))
    val deviceParser = DeviceParser.fromList(config.getOrElse("device_parsers", Nil))
    Parser(userAgentParser, osParser, deviceParser)
  }
  def default: Parser = fromInputStream(this.getClass.getResourceAsStream("/regexes.yaml")).get
} 
Example 187
Source File: CachingParser.scala    From uap-scala   with Do What The F*ck You Want To Public License 5 votes vote down vote up
package org.uaparser.scala

import java.io.InputStream
import java.util.{ Collections, LinkedHashMap, Map => JMap }
import scala.util.Try

case class CachingParser(parser: Parser, maxEntries: Int) extends UserAgentStringParser {
  lazy val clients: JMap[String, Client] = Collections.synchronizedMap(
    new LinkedHashMap[String, Client](maxEntries + 1, 1.0f, true) {
      override protected def removeEldestEntry(eldest: JMap.Entry[String, Client]): Boolean =
        super.size > maxEntries
    }
  )
  def parse(agent: String): Client = Option(clients.get(agent)).getOrElse {
    val client = parser.parse(agent)
    clients.put(agent, client)
    client
  }
}

object CachingParser {
  val defaultCacheSize: Int = 1000
  def fromInputStream(source: InputStream, size: Int = defaultCacheSize): Try[CachingParser] =
    Parser.fromInputStream(source).map(CachingParser(_, size))
  def default(size: Int = defaultCacheSize): CachingParser = CachingParser(Parser.default, size)
} 
Example 188
Source File: Runner.scala    From doobie-codegen   with MIT License 5 votes vote down vote up
package mdmoss.doobiegen

import java.io.{File, PrintWriter}
import java.nio.file.Paths

import mdmoss.doobiegen.GenOptions.{GenOption, Ignore}
import mdmoss.doobiegen.StatementTypes.Statement
import mdmoss.doobiegen.output.SourceWriter
import org.parboiled2.ParseError

import scala.collection.mutable.ListBuffer
import scala.util.{Failure, Success, Try}

object Runner {

  sealed trait TestDBSource
  case class TestDatabase(driver: String, url: String, username: String, password: String) extends TestDBSource
  case class InsertString(source: String) extends TestDBSource

  sealed trait TargetVersion

  object TargetVersion {
    case object DoobieV023 extends TargetVersion
    case object DoobieV024 extends TargetVersion
    case object DoobieV030 extends TargetVersion
    case object DoobieV04 extends TargetVersion
  }

  case class Target(
    schemaDir: String,
    testDb: TestDBSource,
    src: String,
    `package`: String,
    statements: Option[Map[String, List[Statement]]],
    columnOptions: Map[String, Map[String, List[GenOption]]],
    quiet: Boolean = false,
    targetVersion: TargetVersion = TargetVersion.DoobieV023,
    // This is mainly an override for testing
    tableSpecificStatements: Map[String, List[Statement]]
  ) {

    def enclosingPackage = `package`.split('.').reverse.headOption
  }

  object Target {
    def apply(
      schemaDir: String,
      testDb: TestDatabase,
      src: String,
      `package`: String
    ): Target = Target(
      schemaDir = schemaDir,
      testDb = testDb,
      src = src,
      `package` = `package`,
      statements = None,
      columnOptions = Map(),
      quiet = false,
      targetVersion = TargetVersion.DoobieV023,
      tableSpecificStatements = Map.empty[String, List[Statement]]
    )
  }

  val Default = Target(
    schemaDir = "sql/",
    TestDatabase(
      "org.postgresql.Driver",
      "jdbc:postgresql:gen",
      "test",
      "test"
    ),
    src = "out/src",
    `package` = "mdmoss.doobiegen.db"
  )

  def run(target: Target) = {
    
        destDir.toFile.getParentFile.listFiles()
          .filter(_.isDirectory)
          .filter(_.toPath.endsWith("gen"))
          .foreach(delete)
      }
    }
  }

  def delete(file: File): Unit = {
    if (file.isDirectory) {
      file.listFiles().foreach(delete)
      file.delete()
    } else {
      file.delete()
    }
  }

  val Seperator = "*" * 80
} 
Example 189
Source File: TokenSpec.scala    From lucene4s   with MIT License 5 votes vote down vote up
package tests

import java.util.UUID

import org.scalatest.concurrent.Eventually
import org.scalatest.{Matchers, WordSpec}

import scala.util.Try
import com.outr.lucene4s._
import com.outr.lucene4s.field.{Field, FieldType}

class TokenSpec extends WordSpec with Matchers with Eventually {
  val lucene: Lucene = new DirectLucene(uniqueFields = List.empty, defaultFullTextSearchable = true, autoCommit = true)

  val sessionTokenField: Field[String] = lucene.create.field[String](name = "sessionTokenField", FieldType.Untokenized)
  val sessionEmailField: Field[String] = lucene.create.field[String](name = "sessionEmailField")
  val sessionExpireField: Field[Long] = lucene.create.field[Long](name = "sessionExpireField")

  "generate session tokens for emails, expire tokens and assert that expire time is set" in {
    val userEmails = (1 to 10) map (i => s"[email protected]")

    // Generate UUID -> e-mail tuples and index them
    val tokensAndEmails: Seq[(String, String)] = userEmails map { email =>
      val token = UUID.randomUUID().toString.replaceAllLiterally("-", "")
      lucene.doc().fields(sessionTokenField(token), sessionEmailField(email)).index()
      (token, email)
    }

    // Update all the tokens to be expired
    tokensAndEmails foreach {
      case (token, email) => lucene
        .update(sessionTokenField(token))
        .fields(
          sessionTokenField(token),
          sessionEmailField(email),
          sessionExpireField(System.currentTimeMillis())
        ).index()
    }
    lucene.commit()

    // Query each token and verify values are correct
    tokensAndEmails foreach {
      case (token, email) => {
//        val searchTerm = sessionTokenField(token.replaceAllLiterally("-", ""))
        val searchTerm = parse(sessionTokenField, token.replaceAllLiterally("-", ""))
        val results = lucene.query().filter(searchTerm).search().results
        results should have size 1
        val headResult = results.head

        headResult(sessionTokenField) shouldBe token
        headResult(sessionEmailField) shouldBe email
        Try(headResult(sessionExpireField)).toOption shouldBe defined
      }
    }
  }
} 
Example 190
Source File: LAPACK.scala    From hail   with MIT License 5 votes vote down vote up
package is.hail.linalg

import java.lang.reflect.Method

import com.sun.jna.{FunctionMapper, Library, Native, NativeLibrary, Pointer}
import com.sun.jna.ptr.{DoubleByReference, IntByReference}

import scala.util.{Failure, Success, Try}
import is.hail.utils._

class UnderscoreFunctionMapper extends FunctionMapper {
  override def getFunctionName(library: NativeLibrary, method: Method): String = {
    method.getName() + "_"
  }
}

object LAPACK {
  lazy val libraryInstance = {
    val standard = Native.loadLibrary("lapack", classOf[LAPACKLibrary]).asInstanceOf[LAPACKLibrary]

    versionTest(standard) match {
      case Success(version) =>
        log.info(s"Imported LAPACK version ${version} with standard names")
        standard
      case Failure(exception) =>
        val underscoreAfterMap = new java.util.HashMap[String, FunctionMapper]()
        underscoreAfterMap.put(Library.OPTION_FUNCTION_MAPPER, new UnderscoreFunctionMapper)
        val underscoreAfter = Native.loadLibrary("lapack", classOf[LAPACKLibrary], underscoreAfterMap).asInstanceOf[LAPACKLibrary]
        versionTest(underscoreAfter) match {
          case Success(version) =>
            log.info(s"Imported LAPACK version ${version} with underscore names")
            underscoreAfter
          case Failure(exception) =>
            throw exception
        }
    }
  }

  def dgeqrf(M: Int, N: Int, A: Long, LDA: Int, TAU: Long, WORK: Long, LWORK: Int): Int = {
    val mInt = new IntByReference(M)
    val nInt = new IntByReference(N)
    val LDAInt = new IntByReference(LDA)
    val LWORKInt = new IntByReference(LWORK)
    val infoInt = new IntByReference(1)
    libraryInstance.dgeqrf(mInt, nInt, A, LDAInt, TAU, WORK, LWORKInt, infoInt)
    infoInt.getValue()
  }

  def dorgqr(M: Int, N: Int, K: Int, A: Long, LDA: Int, TAU: Long, WORK: Long, LWORK: Int): Int = {
    val mInt = new IntByReference(M)
    val nInt = new IntByReference(N)
    val kInt = new IntByReference(K)
    val LDAInt = new IntByReference(LDA)
    val LWORKInt = new IntByReference(LWORK)
    val infoInt = new IntByReference(1)
    libraryInstance.dorgqr(mInt, nInt, kInt, A, LDAInt, TAU, WORK, LWORKInt, infoInt)
    infoInt.getValue()
  }

  private def versionTest(libInstance: LAPACKLibrary): Try[String] = {
    val major = new IntByReference()
    val minor = new IntByReference()
    val patch = new IntByReference()

    TryAll {
      libInstance.ilaver(major, minor, patch)
      s"${major.getValue}.${minor.getValue}.${patch.getValue}"
    }
  }
}

trait LAPACKLibrary extends Library {
  def dgeqrf(M: IntByReference, N: IntByReference, A: Long, LDA: IntByReference, TAU: Long, WORK: Long, LWORK: IntByReference, INFO: IntByReference)
  def dorgqr(M: IntByReference, N: IntByReference, K: IntByReference, A: Long, LDA: IntByReference, TAU: Long, WORK: Long, LWORK: IntByReference, Info: IntByReference)
  def ilaver(MAJOR: IntByReference, MINOR: IntByReference, PATCH: IntByReference)
} 
Example 191
Source File: BLAS.scala    From hail   with MIT License 5 votes vote down vote up
package is.hail.linalg

import com.sun.jna.{FunctionMapper, Library, Native}
import com.sun.jna.ptr.{DoubleByReference, FloatByReference, IntByReference}
import is.hail.utils._

import scala.util.{Failure, Success, Try}


object BLAS {
  lazy val libraryInstance = {
    val standard = Native.loadLibrary("blas", classOf[BLASLibrary]).asInstanceOf[BLASLibrary]

    verificationTest(standard) match {
      case Success(_) =>
        log.info("Imported BLAS with standard names")
        standard
      case Failure(exc) =>
        val underscoreAfterMap = new java.util.HashMap[String, FunctionMapper]()
        underscoreAfterMap.put(Library.OPTION_FUNCTION_MAPPER, new UnderscoreFunctionMapper)
        val underscoreAfter = Native.loadLibrary("blas", classOf[BLASLibrary], underscoreAfterMap).asInstanceOf[BLASLibrary]
        verificationTest(underscoreAfter) match {
          case Success(_) =>
            log.info("Imported BLAS with underscore names")
            underscoreAfter
          case Failure(exception) =>
            throw exception
        }
    }
  }

  private def verificationTest(libInstance: BLASLibrary): Try[Unit] = {
    val n = new IntByReference(2)
    val incx = new IntByReference(1)
    val x = Array(3.0, 4.0)
    TryAll {
      val norm = libInstance.dnrm2(n, x, incx)
      assert(Math.abs(norm - 5.0) < .1)
    }
  }

  def sgemm(TRANSA: String, TRANSB: String, M: Int, N: Int, K: Int, ALPHA: Float, A: Long, LDA: Int, B: Long, LDB: Int, BETA: Float, C: Long, LDC: Int) = {
    val mInt = new IntByReference(M)
    val nInt = new IntByReference(N)
    val kInt = new IntByReference(K)
    val alphaDouble = new FloatByReference(ALPHA)
    val LDAInt = new IntByReference(LDA)
    val LDBInt = new IntByReference(LDB)
    val betaDouble = new FloatByReference(BETA)
    val LDCInt = new IntByReference(LDC)

    libraryInstance.sgemm(TRANSA, TRANSB, mInt, nInt, kInt, alphaDouble, A, LDAInt, B, LDBInt, betaDouble, C, LDCInt)
  }

  def dgemm(TRANSA: String, TRANSB: String, M: Int, N: Int, K: Int, ALPHA: Double, A: Long, LDA: Int, B: Long, LDB: Int, BETA: Double, C: Long, LDC: Int) = {
    val mInt = new IntByReference(M)
    val nInt = new IntByReference(N)
    val kInt = new IntByReference(K)
    val alphaDouble = new DoubleByReference(ALPHA)
    val LDAInt = new IntByReference(LDA)
    val LDBInt = new IntByReference(LDB)
    val betaDouble = new DoubleByReference(BETA)
    val LDCInt = new IntByReference(LDC)

    libraryInstance.dgemm(TRANSA, TRANSB, mInt, nInt, kInt, alphaDouble, A, LDAInt, B, LDBInt, betaDouble, C, LDCInt)
  }
}

trait BLASLibrary extends Library {
  def dgemm(TRANSA: String, TRANSB: String, M: IntByReference, N: IntByReference, K: IntByReference,
    ALPHA: DoubleByReference, A: Long, LDA: IntByReference, B: Long, LDB: IntByReference,
    BETA: DoubleByReference, C: Long, LDC: IntByReference)
  def sgemm(TRANSA: String, TRANSB: String, M: IntByReference, N: IntByReference, K: IntByReference,
    ALPHA: FloatByReference, A: Long, LDA: IntByReference, B: Long, LDB: IntByReference,
    BETA: FloatByReference, C: Long, LDC: IntByReference)
  def dnrm2(N: IntByReference, X: Array[Double], INCX: IntByReference): Double
} 
Example 192
Source File: package.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.util

import scala.language.higherKinds
import scala.collection.generic.CanBuildFrom
import scala.collection.mutable
import scala.util.{Success, Try}


package object exceptions {

  def trySequence[A, M[X] <: TraversableOnce[X]](
    in: M[Try[A]]
  )(implicit cbf: CanBuildFrom[M[Try[A]], A, M[A]]): Try[M[A]] = {
    in.foldLeft(Success(cbf(in)): Try[mutable.Builder[A, M[A]]]) { (tr, ta) =>
        {
          for {
            r <- tr
            a <- ta
          } yield r += a
        }
      }
      .map(_.result())
  }

  def stackTraceToString(t: Throwable): String = {
    val w = new java.io.StringWriter()
    val pw = new java.io.PrintWriter(w)
    t.printStackTrace(pw)
    val resp = w.toString
    w.close()
    pw.close()
    resp
  }
} 
Example 193
Source File: WIPRegulator.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.util.concurrent

import java.util.concurrent.{ArrayBlockingQueue, TimeUnit}
import scala.concurrent.Future
import scala.util.{Failure, Success, Try}
import com.typesafe.scalalogging.Logger
import scala.concurrent.ExecutionContext.Implicits.global
import cmwell.util.jmx._


case class WIPRegulator(private var numOfWorkers: Int, noWorkerAlertInterval: Long = 30000) extends WIPRegulatorMBean {

  jmxRegister(this, "cmwell.indexer:type=WIPRegulator")

  private val wipQueue = new ArrayBlockingQueue[String](50)

  // Set intitial number of concurrent requests
  for (i <- 1 to numOfWorkers)(wipQueue.add("WIP Worker " + i))

  def doWithWorkerAsync[T](f: => Future[T])(implicit logger: Logger): Future[T] = {
    var notFinished = true
    var reply: Future[T] = Future.failed(FailedToExecuteException())

    while (notFinished) {
      Try {
        wipQueue.poll(noWorkerAlertInterval, TimeUnit.MILLISECONDS)
      } match {
        case Success(null) =>
          logger.error(s"waited for $noWorkerAlertInterval miliseconds and did not get worker, something is wrong")
        case Success(worker) => reply = f; reply.onComplete(_ => wipQueue.add(worker)); notFinished = false
        case Failure(execption) =>
          logger.error("InteruptedException while trying to poll a worker from queue"); reply = Future.failed(execption);
          notFinished = false
      }
    }

    reply
  }

  def getNumOfWorkers(): Int = wipQueue.size()

  def addWorker(): Unit = this.synchronized { numOfWorkers += 1; wipQueue.add(s"WIP Worker $numOfWorkers") }

  def removeWorker(): Unit = this.synchronized { wipQueue.remove(s"WIP Worker $numOfWorkers"); numOfWorkers -= 1 }

}

trait WIPRegulatorMBean {
  def getNumOfWorkers(): Int
  def addWorker()
  def removeWorker()
}

case class FailedToExecuteException(msg: String = "Undifiened reason") extends Exception(msg) 
Example 194
Source File: SimpleScheduler.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.util.concurrent

import java.util.concurrent.{ScheduledExecutorService, ScheduledFuture, ScheduledThreadPoolExecutor}

import com.typesafe.scalalogging.LazyLogging

import scala.concurrent.{ExecutionContext, Future, Promise}
import scala.concurrent.duration.{Duration, FiniteDuration}
import scala.util.Try


object SimpleScheduler extends LazyLogging {
  private[this] lazy val timer = {
    val executor = new ScheduledThreadPoolExecutor(1)
    executor.setRemoveOnCancelPolicy(true)
    executor.asInstanceOf[ScheduledExecutorService]
  }

  //method is private, since we must keep execution on the expense of out timer thread to be as limited as possible.
  //this method can be used if and only if we know `body` is a safe and small job.
  private[util] def scheduleInstant[T](duration: FiniteDuration)(body: => T) = {
    val p = Promise[T]()
    val cancellable = timer.schedule(
      new Runnable {
        override def run(): Unit = {
          // body must not be expensive to compute since it will be run in our only timer thread expense.
          p.complete(Try(body))
        }
      },
      duration.toMillis,
      java.util.concurrent.TimeUnit.MILLISECONDS
    )
    p.future -> Cancellable(cancellable)
  }

  def scheduleAtFixedRate(initialDelay: FiniteDuration, period: FiniteDuration, mayInterruptIfRunning: Boolean = false)(
    task: => Any
  )(implicit executionContext: ExecutionContext): Cancellable = {
    // memoize runnable task
    val runnable: Runnable = new Runnable {
      override def run(): Unit = Try(task).failed.foreach { err =>
        logger.error("schedueled task failed", err)
      }
    }

    val cancellable = timer.scheduleAtFixedRate(new Runnable {
      override def run(): Unit = executionContext.execute(runnable)
    }, initialDelay.toMillis, period.toMillis, java.util.concurrent.TimeUnit.MILLISECONDS)

    Cancellable(cancellable, mayInterruptIfRunning)
  }

  def schedule[T](duration: FiniteDuration)(body: => T)(implicit executionContext: ExecutionContext): Future[T] = {
    val p = Promise[T]()
    timer.schedule(
      new Runnable {
        override def run(): Unit = {
          // body may be expensive to compute, and must not be run in our only timer thread expense,
          // so we compute the task inside a `Future` and make it run on the expense of the given executionContext.
          p.completeWith(Future(body)(executionContext))
        }
      },
      duration.toMillis,
      java.util.concurrent.TimeUnit.MILLISECONDS
    )
    p.future
  }

  def scheduleFuture[T](duration: Duration)(body: => Future[T]): Future[T] = {
    val p = Promise[T]()
    timer.schedule(new Runnable {
      override def run(): Unit = p.completeWith(body)
    }, duration.toMillis, java.util.concurrent.TimeUnit.MILLISECONDS)
    p.future
  }
}

object Cancellable {
  def apply(scheduledFuture: ScheduledFuture[_], mayInterruptIfRunning: Boolean = false)=
    new Cancellable { override def cancel(): Boolean = scheduledFuture.cancel(mayInterruptIfRunning) }
}

trait Cancellable {
  def cancel(): Boolean
} 
Example 195
Source File: ChildFirstURLClassLoader.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.util.loading

import java.io.{File, InputStream}
import java.net.{URL, URLClassLoader}

// scalastyle:off
import sun.misc.CompoundEnumeration
// scalastyle:on
import scala.util.{Failure, Success, Try}


class ChildFirstURLClassLoader(urls: Array[URL], parent: ClassLoader, except: Seq[String] = Seq())
    extends URLClassLoader(urls, parent) {

  protected override def loadClass(name: String, resolve: Boolean): Class[_] = {
    def tryFind(findAction: => Class[_]): Option[Class[_]] = Try(findAction) match {
      case Failure(e: ClassNotFoundException) => None
      case Failure(e)                         => throw e
      case Success(c)                         => Some(c)
    }

    def loadLocally = if (except.exists(name.startsWith)) None else tryFind(findClass(name))
    def loadFromParent = if (getParent == null) None else tryFind(getParent.loadClass(name))

    val alreadyLoaded = findLoadedClass(name)
    if (alreadyLoaded != null) {
      alreadyLoaded
    } else {

      val `class` = loadLocally.getOrElse(loadFromParent.orNull)

      if (resolve)
        resolveClass(`class`)
      `class`
    }
  }

  override def getResource(name: String): URL = findResource(name) match {
    case null => super.getResource(name)
    case u    => u
  }

  override def getResources(name: String): java.util.Enumeration[URL] = {
    val parent = getParent
    val localUrls = findResources(name)
    val parentUrls: java.util.Enumeration[URL] =
      if (parent != null) parent.getResources(name) else java.util.Collections.emptyEnumeration()
    new CompoundEnumeration(Array(localUrls, parentUrls))
  }

  override def getResourceAsStream(name: String): InputStream = {
    getResource(name) match {
      case null => null
      case url =>
        Try(url.openStream) match {
          case Success(x) => x
          case Failure(_) => null
        }
    }
  }
}

object ChildFirstURLClassLoader {
  def loadClassFromJar[T](className: String, jarPath: String, commonPackageNames:String, excludes: Seq[String] = Seq()): T =
    Loader(jarPath, excludes :+ commonPackageNames).load(className)

  case class Loader(jarPath: String, excludes: Seq[String] = Seq()) {
    val urls = if(new java.io.File(jarPath).isFile) Array(new File(jarPath).toURI.toURL) else Array[URL](new URL(jarPath))
    private val cl =
      new ChildFirstURLClassLoader(urls, this.getClass.getClassLoader, excludes)
    def load[T](className: String) = cl.loadClass(className).newInstance.asInstanceOf[T]
  }

} 
Example 196
Source File: MarkdownFormatter.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package markdown

import cmwell.domain.FileInfoton

import scala.util.Try


object MarkdownFormatter {
  def asHtmlString(i: FileInfoton): String = {
    val title = Try(i.systemFields.path.drop(i.systemFields.path.lastIndexOf('/') + 1).replace(
      ".md", "")).toOption.getOrElse(i.systemFields.path)
    val content = xml.Utility.escape(i.content.get.asString)
    asHtmlString(title, content)
  }

  def asHtmlString(title: String, content: String): String = {
    s"""|<span style="display:none">[comment]: <> (
        |<html>
        |  <head>
        |    <meta charset="utf-8">
        |    <script src="/meta/app/sys/js/jquery-1.6.2.min.js" type="text/javascript"></script>
        |    <script type="text/javascript" src="/meta/app/sys/js/Markdown.Converter.js"></script>
        |    <script type="text/javascript" src="/meta/app/sys/js/Markdown.Sanitizer.js"></script>
        |    <script type="text/javascript" src="/meta/app/sys/js/Markdown.Extra.js"></script>
        |    <script src="/meta/app/sys/js/highlight/highlight.pack.js" type="text/javascript"></script>
        |    <script src="/meta/app/sys/js/cmwmdviewer.js" type="text/javascript"></script>
        |    <title>$title</title>
        |    <link rel="stylesheet" href="/meta/app/sys/js/highlight/styles/aiaas.css"/>
        |    <link rel="stylesheet" href="/meta/app/sys/css/cmwmdviewer.css"/>
        |  </head>
        |)</span>""".stripMargin.replace("\n", "") + s"\n\n$content"
  }
} 
Example 197
Source File: DigestHeader.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package security.httpauth

import scala.util.parsing.combinator.RegexParsers
import scala.util.{Success, Try}


trait DigestHeader {
  val realm: String
  val nonce: String
  val opaque: String
}

case class DigestServerHeader(realm: String, nonce: String, opaque: String) extends DigestHeader {
  override def toString = {
    Seq("realm" -> realm, "nonce" -> nonce, "opaque" -> opaque)
      .map { case (key, value) => s"""$key="$value"""" }
      .mkString("Digest ", ",", "")
  }
}

case class DigestClientHeader(realm: String, nonce: String, opaque: String, username: String, response: String)
    extends DigestHeader

object DigestClientHeader {
  private val mandatoryKeys = Set("realm", "nonce", "opaque", "username", "response")

  def fromMap(map: Map[String, String]) = {
    require(mandatoryKeys.forall(map.keySet), "Missing one or more mandatory keys")
    DigestClientHeader(map("realm"), map("nonce"), map("opaque"), map("username"), map("response"))
  }
}

object DigestHeaderUtils {
  def fromClientHeaderString(s: String) = {
    Try(DigestClientHeader.fromMap(DigestHeaderParser.parseHeader(s))) match {
      case Success(dch) => dch
      case _ =>
        throw new IllegalArgumentException(s"$s is not a valid Digest Client Header")
    }
  }
}

object DigestHeaderParser extends RegexParsers {
  private def keyParser: Parser[String] = "[a-zA-Z0-9\"?&/_-]+".r ^^ { _.toString.replace("\"", "") }
  private def valueParser: Parser[String] = "[a-zA-Z0-9\"?=&/_-]+".r ^^ { _.toString.replace("\"", "") }
  private def keyValueParser: Parser[(String, String)] = keyParser ~ "=" ~ valueParser ^^ { case k ~ _ ~ v => k -> v }
  private def digestHeaderParser: Parser[Map[String, String]] = "Digest " ~> repsep(keyValueParser, ",\\s?".r) ^^ {
    _.toMap
  }

  def parseHeader(headerValue: String): Map[String, String] = parse(digestHeaderParser, headerValue) match {
    case Success(map, _) => map
    case _               => Map.empty[String, String]
  }
} 
Example 198
Source File: Token.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package security


import cmwell.ws.Settings
import com.typesafe.config.ConfigFactory
import org.joda.time.DateTime
import pdi.jwt.{Jwt, JwtAlgorithm, JwtClaim, JwtOptions}
import security.Token.{secret, secret2}

import scala.util.Try

class Token(jwt: String, authCache: EagerAuthCache) {
  private val requiredClaims = Set("sub", "exp")

  private val claimsSet = {
    val decodedJ = Jwt.decodeRaw(jwt, JwtOptions(signature = false))

    if (decodedJ.isFailure)
      throw new IllegalArgumentException("Given string was not in JWT format")

    val decoded = ujson.read(decodedJ.get)

    if (!(requiredClaims.map(c => Try(decoded(c))).forall(_.isSuccess)))
      throw new IllegalArgumentException("Mandatory claims are missing from token")

    decoded
  }

  val username = claimsSet("sub").str
  val expiry = new DateTime(claimsSet("exp").num.toLong)

  def isValid = {
    (Jwt.isValid(jwt, secret, Seq(JwtAlgorithm.HS256)) || Jwt.isValid(jwt, secret2, Seq(JwtAlgorithm.HS256))) &&
    expiry.isAfterNow &&
    (Try(claimsSet("rev").num.toInt).recover{case _ => 0}.get >=
      Token.getUserRevNum(username, authCache) || username == "root") // root has immunity to revision revoking
  }

  override def toString = s"Token(${claimsSet})"
}

object Token {
  def apply(jwt: String, authCache: EagerAuthCache) = Try(new Token(jwt, authCache)).toOption

  // not using ws.Settings, so it'd be available from `sbt ws/console`
  private lazy val secret = ConfigFactory.load().getString("play.http.secret.key")
  // not using ws.Settings, so it'd be available from `sbt ws/console`
  private lazy val secret2 = ConfigFactory.load().getString("cmwell.ws.additionalSecret.key")
  private val jwtHeader = JwtAlgorithm.HS256

  private def getUserRevNum(username: String, authCache: EagerAuthCache) =
    authCache.getUserInfoton(username).flatMap(u => (u \ "rev").asOpt[Int]).getOrElse(0)

  def generate(authCache: EagerAuthCache,
               username: String,
               expiry: Option[DateTime] = None,
               rev: Option[Int] = None,
               isAdmin: Boolean = false): String = {
    val maxDays = Settings.maxDaysToAllowGenerateTokenFor
    if (!isAdmin && expiry.isDefined && expiry.get.isAfter(DateTime.now.plusDays(maxDays))) {
      throw new IllegalArgumentException(s"Token expiry must be less than $maxDays days")
    }
    if (!isAdmin && rev.isDefined) {
      throw new IllegalArgumentException("rev should only be supplied in Admin mode (i.e. manually via console)")
    }

    val claimsJson =
        s"""{"sub":"$username", "exp":${expiry.getOrElse(DateTime.now.plusDays(1)).getMillis},""" +
           s""""rev": ${rev.getOrElse(getUserRevNum(username, authCache))}}"""

    Jwt.encode(JwtClaim(claimsJson), secret, jwtHeader)
  }
} 
Example 199
Source File: ServicesRoutesCache.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package logic.services

import akka.actor.ActorSystem
import cmwell.domain.{DeletedInfoton, Infoton, ObjectInfoton}
import cmwell.fts.PathFilter
import cmwell.ws.Settings
import com.typesafe.scalalogging.LazyLogging
import javax.inject.{Inject, Singleton}
import logic.CRUDServiceFS

import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success, Try}

@Singleton
class ServicesRoutesCache @Inject()(crudService: CRUDServiceFS)(implicit ec: ExecutionContext, sys: ActorSystem) extends LazyLogging {
  private var services: Map[String, ServiceDefinition] = Map.empty
  private val (initialDelay, interval) = Settings.servicesRoutesCacheInitialDelay -> Settings.servicesRoutesCacheRefreshInterval

  sys.scheduler.schedule(initialDelay, interval)(populate())

  def find(path: String): Option[ServiceDefinition] =
    services.values.find(sd => path.startsWith(sd.route))

  def list: Set[String] = services.values.map(_.route).toSet

  def populate(): Future[Unit] = {
    //TODO use consume API, don't get everything each time
    crudService.search(Some(PathFilter("/meta/services", descendants = false)), withData = true, withDeleted = true).andThen {
      case Success(sr) =>
        val toAddOrUpdate = sr.infotons.collect { case oi: ObjectInfoton => oi }
        val toRemove = sr.infotons.collect { case di: DeletedInfoton => di }

        toAddOrUpdate.map(desrialize).
          collect { case Success(se) => se }.
          foreach { se => services += se.infotonPath -> se.serviceDefinition }

        toRemove.foreach(services -= _.systemFields.path)

      case Failure(t) => logger.error("Could not load Services from /meta/services", t)
    }.map(_ => ())
  }

  private def desrialize(infoton: Infoton): Try[ServiceEntry] = Try {
    val fields = infoton.fields.getOrElse(throw new RuntimeException(s"Infoton with no fields was not expected (path=${infoton.systemFields.path})"))

    def field(name: String): String = fields(name).head.value.toString

    val route = field("route")
    field("type.lzN1FA") match {
      case "cmwell://meta/sys#Redirection" =>
        val sourcePattern = field("sourcePattern")
        val replacement = field("replacement")
        val replceFunc = (input: String) => sourcePattern.r.replaceAllIn(input, replacement)
        ServiceEntry(infoton.systemFields.path, RedirectionService(route, sourcePattern, replceFunc))
      case "cmwell://meta/sys#Source" => ??? //TODO implement the unimplemented
      case "cmwell://meta/sys#Binary" => ??? //TODO implement the unimplemented
      case other => throw new RuntimeException(s"Infoton with type $other was not expected (path=${infoton.systemFields.path})")
    }
  }

  case class ServiceEntry(infotonPath: String, serviceDefinition: ServiceDefinition)
} 
Example 200
Source File: LogLevelHandler.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package controllers

import cmwell.ctrl.config.Jvms
import com.typesafe.scalalogging.LazyLogging
import k.grid.Grid
import k.grid.monitoring.{MonitorActor, SetNodeLogLevel}
import play.api.mvc._
import security.AuthUtils
import javax.inject._

import filters.Attrs

import scala.util.Try

@Singleton
class LogLevelHandler @Inject()(authUtils: AuthUtils) extends InjectedController with LazyLogging {
  def handleSetLogLevel = Action { implicit req =>
    val tokenOpt = authUtils.extractTokenFrom(req)
    if (authUtils.isOperationAllowedForUser(security.Admin, tokenOpt))
      setLogLevel(req)
    else
      Forbidden("not authorized")
  }

  private def setLogLevel(req: Request[AnyContent]) = {
    val validLogLevels = SetNodeLogLevel.lvlMappings.keySet
    val roleMapping = Map("WEB" -> Jvms.WS, "BG" -> Jvms.BG, "CTRL" -> Jvms.CTRL, "CW" -> Jvms.CW, "DC" -> Jvms.DC)
    val validComponents = roleMapping.keySet
    val validHosts = Grid.availableMachines

    val lvlStr = req.getQueryString("lvl")
    val component = req.getQueryString("comp").map(_.toUpperCase())
    val host = req.getQueryString("host")
    val duration = if (req.getQueryString("duration").isEmpty) Some("10") else req.getQueryString("duration")

    (lvlStr, component, host, duration) match {
      case (Some(l), _, _, _) if !validLogLevels.contains(l.toUpperCase) =>
        BadRequest(s"Bad log level provided, the valid log levels are ${validLogLevels.mkString(", ")}.")
      case (_, Some(c), _, _) if !validComponents.contains(c.toUpperCase) =>
        BadRequest(s"Bad component provided, the valid components are ${validComponents.mkString(", ")}.")
      case (_, _, Some(h), _) if !validHosts.contains(h.toUpperCase) =>
        BadRequest(s"Bad host provided, the valid hosts are ${validHosts.mkString(", ")}.")
      case (_, _, _, Some(d)) if Try(d.toInt).isFailure =>
        BadRequest(
          s"Bad duration provided, please provide a positive int, or 0 if you wish to keep this log level indefinitely."
        )
      case (None, _, _, _) =>
        BadRequest(s"No log level provided, the valid log levels are ${validLogLevels.mkString(", ")}")
      case _ =>
        val lvl = lvlStr.flatMap(SetNodeLogLevel.levelTranslator)

        lvl.foreach { level =>
          val members = {
            val f1 = host
              .map { h =>
                Grid.jvmsAll.filter(_.host == h)
              }
              .getOrElse(Grid.jvmsAll)

            val f2 = component
              .map(c => roleMapping(c))
              .map { c =>
                f1.filter(h => h.identity.isDefined && h.identity.get == c)
              }
              .getOrElse(f1)
            f2
          }

          logger.info(s"Changing the log level of [${members.mkString(", ")}] to $level")

          members.foreach { member =>
            Grid.selectActor(MonitorActor.name, member) ! SetNodeLogLevel(level, duration.map(_.toInt))
          }
        }
        Ok("Done!")
    }
  }
}