java.nio.file.Paths Scala Examples

The following examples show how to use java.nio.file.Paths. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: SnowflakeConnectorUtils.scala    From spark-snowflake   with Apache License 2.0 6 votes vote down vote up
package net.snowflake.spark.snowflake

import java.nio.file.Paths
import java.security.InvalidKeyException

import net.snowflake.spark.snowflake.pushdowns.SnowflakeStrategy
import org.apache.spark.sql.SparkSession
import org.slf4j.{Logger, LoggerFactory}


  def disablePushdownSession(session: SparkSession): Unit = {
    session.experimental.extraStrategies = session.experimental.extraStrategies
      .filterNot(strategy => strategy.isInstanceOf[SnowflakeStrategy])
  }

  def setPushdownSession(session: SparkSession, enabled: Boolean): Unit = {
    if (enabled) {
      enablePushdownSession(session)
    } else {
      disablePushdownSession(session)
    }
  }

  // TODO: Improve error handling with retries, etc.

  @throws[SnowflakeConnectorException]
  def handleS3Exception(ex: Exception): Unit = {
    if (ex.getCause.isInstanceOf[InvalidKeyException]) {
      // Most likely cause: Unlimited strength policy files not installed
      var msg: String = "Strong encryption with Java JRE requires JCE " +
        "Unlimited Strength Jurisdiction Policy " +
        "files. " +
        "Follow JDBC client installation instructions " +
        "provided by Snowflake or contact Snowflake " +
        "Support. This needs to be installed in the Java runtime for all Spark executor nodes."

      log.error(
        "JCE Unlimited Strength policy files missing: {}. {}.",
        ex.getMessage: Any,
        ex.getCause.getMessage: Any
      )

      val bootLib: String =
        java.lang.System.getProperty("sun.boot.library.path")

      if (bootLib != null) {
        msg += " The target directory on your system is: " + Paths
          .get(bootLib, "security")
          .toString
        log.error(msg)
      }

      throw new SnowflakeConnectorException(msg)
    } else {
      throw ex
    }
  }
}

class SnowflakeConnectorException(message: String) extends Exception(message)
class SnowflakePushdownException(message: String)
  extends SnowflakeConnectorException(message)
class SnowflakeConnectorFeatureNotSupportException(message: String)
  extends Exception(message)

class SnowflakePushdownUnsupportedException(message: String,
                                            val unsupportedOperation: String,
                                            val details: String,
                                            val isKnownUnsupportedOperation: Boolean)
  extends Exception(message) 
Example 2
Source File: AvroParquetSourceTest.scala    From eel-sdk   with Apache License 2.0 6 votes vote down vote up
package io.eels.component.parquet

import java.nio.file.Paths

import io.eels.component.parquet.avro.AvroParquetSource
import io.eels.component.parquet.util.ParquetLogMute
import io.eels.schema._
import org.apache.avro.SchemaBuilder
import org.apache.avro.generic.{GenericData, GenericRecord}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.parquet.avro.AvroParquetWriter
import org.scalatest.{Matchers, WordSpec}

class AvroParquetSourceTest extends WordSpec with Matchers {
  ParquetLogMute()

  private implicit val conf = new Configuration()
  private implicit val fs = FileSystem.get(conf)

  private val personFile = Paths.get(getClass.getResource("/io/eels/component/parquet/person.avro.pq").toURI)
  private val resourcesDir = personFile.getParent

  "AvroParquetSource" should {
    "read schema" in {
      val people = AvroParquetSource(personFile)
      people.schema shouldBe StructType(
        Field("name", StringType, nullable = false),
        Field("job", StringType, nullable = false),
        Field("location", StringType, nullable = false)
      )
    }
    "read parquet files" in {
      val people = AvroParquetSource(personFile.toAbsolutePath()).toDataStream().toSet.map(_.values)
      people shouldBe Set(
        Vector("clint eastwood", "actor", "carmel"),
        Vector("elton john", "musician", "pinner")
      )
    }
    "read multiple parquet files using file expansion" in {
      import io.eels.FilePattern._
      val people = AvroParquetSource(s"${resourcesDir.toUri.toString}/*.pq").toDataStream().toSet.map(_.values)
      people shouldBe Set(
        Vector("clint eastwood", "actor", "carmel"),
        Vector("elton john", "musician", "pinner"),
        Vector("clint eastwood", "actor", "carmel"),
        Vector("elton john", "musician", "pinner")
      )
    }
    // todo add merge to parquet source
    "merge schemas" ignore {

      try {
        fs.delete(new Path("merge1.pq"), false)
      } catch {
        case t: Throwable =>
      }
      try {
        fs.delete(new Path("merge2.pq"), false)
      } catch {
        case t: Throwable =>
      }

      val schema1 = SchemaBuilder.builder().record("schema1").fields().requiredString("a").requiredDouble("b").endRecord()
      val schema2 = SchemaBuilder.builder().record("schema2").fields().requiredInt("a").requiredBoolean("c").endRecord()

      val writer1 = AvroParquetWriter.builder[GenericRecord](new Path("merge1.pq")).withSchema(schema1).build()
      val record1 = new GenericData.Record(schema1)
      record1.put("a", "aaaaa")
      record1.put("b", 124.3)
      writer1.write(record1)
      writer1.close()

      val writer2 = AvroParquetWriter.builder[GenericRecord](new Path("merge2.pq")).withSchema(schema2).build()
      val record2 = new GenericData.Record(schema2)
      record2.put("a", 111)
      record2.put("c", true)
      writer2.write(record2)
      writer2.close()

      ParquetSource(new Path("merge*")).schema shouldBe
        StructType(
          Field("a", StringType, nullable = false),
          Field("b", DoubleType, nullable = false),
          Field("c", BooleanType, nullable = false)
        )

      fs.delete(new Path(".merge1.pq.crc"), false)
      fs.delete(new Path(".merge2.pq.crc"), false)
      fs.delete(new Path("merge1.pq"), false)
      fs.delete(new Path("merge2.pq"), false)
    }
  }
} 
Example 3
Source File: BazelRunfiles.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.bazeltools

import java.nio.file.{Path, Paths}

import com.google.devtools.build.runfiles.Runfiles

trait BazelRunfiles {
  private val MainWorkspace = "com_github_digital_asset_daml"

  private val MainWorkspacePath = Paths.get(MainWorkspace)

  private val inBazelEnvironment =
    Set("RUNFILES_DIR", "JAVA_RUNFILES", "RUNFILES_MANIFEST_FILE", "RUNFILES_MANIFEST_ONLY").exists(
      sys.env.contains)

  def rlocation(path: String): String =
    if (inBazelEnvironment)
      Runfiles.create.rlocation(MainWorkspace + "/" + path)
    else
      path

  def rlocation(path: Path): Path =
    if (inBazelEnvironment) {
      val workspacePathString = MainWorkspacePath
        .resolve(path)
        .toString
        .replaceAllLiterally("\\", "/")
      val runfilePath = Option(Runfiles.create.rlocation(workspacePathString))
      Paths.get(runfilePath.getOrElse(throw new IllegalArgumentException(path.toString)))
    } else
      path

  def requiredResource(name: String): java.io.File = {
    val file = new java.io.File(rlocation(name))
    if (file.exists()) file
    else throw new IllegalStateException(s"File doest not exist: ${file.getAbsolutePath}")
  }
}

object BazelRunfiles extends BazelRunfiles 
Example 4
Source File: MetricsReporter.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.platform.configuration

import java.net.{InetAddress, InetSocketAddress}
import java.nio.file.{Path, Paths}

import com.codahale.metrics
import com.codahale.metrics.{MetricRegistry, ScheduledReporter}
import com.daml.platform.sandbox.config.InvalidConfigException
import com.google.common.net.HostAndPort
import scopt.Read

import scala.util.Try

sealed trait MetricsReporter {
  def register(registry: MetricRegistry): ScheduledReporter
}

object MetricsReporter {

  case object Console extends MetricsReporter {
    override def register(registry: MetricRegistry): ScheduledReporter =
      metrics.ConsoleReporter
        .forRegistry(registry)
        .build()
  }

  final case class Csv(directory: Path) extends MetricsReporter {
    override def register(registry: MetricRegistry): ScheduledReporter =
      metrics.CsvReporter
        .forRegistry(registry)
        .build(directory.toFile)
  }

  final case class Graphite(address: InetSocketAddress) extends MetricsReporter {
    override def register(registry: MetricRegistry): ScheduledReporter =
      metrics.graphite.GraphiteReporter
        .forRegistry(registry)
        .build(new metrics.graphite.Graphite(address))
  }

  object Graphite {
    val defaultHost: InetAddress = InetAddress.getLoopbackAddress
    val defaultPort: Int = 2003

    def apply(): Graphite =
      Graphite(new InetSocketAddress(defaultHost, defaultPort))

    def apply(port: Int): Graphite =
      Graphite(new InetSocketAddress(defaultHost, port))
  }

  implicit val metricsReporterRead: Read[MetricsReporter] = Read.reads {
    _.split(":", 2).toSeq match {
      case Seq("console") => Console
      case Seq("csv", directory) => Csv(Paths.get(directory))
      case Seq("graphite") =>
        Graphite()
      case Seq("graphite", address) =>
        Try(address.toInt)
          .map(port => Graphite(port))
          .recover {
            case _: NumberFormatException =>
              //noinspection UnstableApiUsage
              val hostAndPort = HostAndPort
                .fromString(address)
                .withDefaultPort(Graphite.defaultPort)
              Graphite(new InetSocketAddress(hostAndPort.getHost, hostAndPort.getPort))
          }
          .get
      case _ =>
        throw new InvalidConfigException(
          """Must be one of "console", "csv:PATH", or "graphite[:HOST][:PORT]".""")
    }
  }

} 
Example 5
Source File: Util.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.extractor.helpers

import java.net.URI
import java.nio.file.{Files, Path, Paths}

object Util {

  @annotation.varargs
  def guessRelativeFileLocation(filenames: String*): URI = {
    val uri = guessPath(filenames)
    Paths.get(".").toAbsolutePath.relativize(uri).toUri
  }

  @annotation.varargs
  def guessFileLocation(filenames: String*): URI = guessPath(filenames).toUri

  private def cwd = Paths.get(".").toAbsolutePath

  def guessPath(filenames: Seq[String]): Path = {
    def folders(from: Path): Stream[Path] =
      if (from == null) Stream.empty else from #:: folders(from.getParent)

    def guess(from: Path): Stream[Path] =
      folders(from).flatMap { d =>
        filenames.toStream.map(d.resolve)
      }

    val guesses = guess(cwd)

    guesses
      .find(Files.exists(_))
      .getOrElse(throw new IllegalStateException(s"""Could not find ${filenames
                                                      .mkString(", ")}, having searched:
                                         |${guesses.mkString("\n")}""".stripMargin))
  }

} 
Example 6
Source File: TlsConfigurationCli.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.ledger.api.tls

import java.nio.file.Paths

import scala.util.Try

object TlsConfigurationCli {
  def parse[C](parser: scopt.OptionParser[C], colSpacer: String)(
      setter: (TlsConfiguration => TlsConfiguration, C) => C): Unit = {
    def enableSet(tlsUp: TlsConfiguration => TlsConfiguration, c: C) =
      setter(tlsc => tlsUp(tlsc copy (enabled = true)), c)

    import parser.opt

    opt[String]("pem")
      .optional()
      .text("TLS: The pem file to be used as the private key.")
      .validate(validatePath(_, "The file specified via --pem does not exist"))
      .action { (path, c) =>
        enableSet(_ copy (keyFile = Some(Paths.get(path).toFile)), c)
      }

    opt[String]("crt")
      .optional()
      .text(
        s"TLS: The crt file to be used as the cert chain.\n${colSpacer}" +
          s"Required for client authentication."
      )
      .validate(validatePath(_, "The file specified via --crt does not exist"))
      .action { (path, c) =>
        enableSet(_ copy (keyCertChainFile = Some(Paths.get(path).toFile)), c)
      }

    opt[String]("cacrt")
      .optional()
      .text("TLS: The crt file to be used as the the trusted root CA.")
      .validate(validatePath(_, "The file specified via --cacrt does not exist"))
      .action { (path, c) =>
        enableSet(_ copy (trustCertCollectionFile = Some(Paths.get(path).toFile)), c)
      }

    // allows you to enable tls without any special certs,
    // i.e., tls without client auth with the default root certs.
    // If any certificates are set tls is enabled implicitly and
    // this is redundant.
    opt[Unit]("tls")
      .optional()
      .text("TLS: Enable tls. This is redundant if --pem, --crt or --cacrt are set")
      .action { (_, c) =>
        enableSet(identity, c)
      }

    ()
  }

  private def validatePath(path: String, message: String): Either[String, Unit] = {
    val valid = Try(Paths.get(path).toFile.canRead).getOrElse(false)
    if (valid) Right(()) else Left(message)
  }
} 
Example 7
Source File: Config.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml

import java.nio.file.{Path, Paths}

import scopt.{OptionParser, Read}

object Config {

  private implicit val pathRead: Read[Path] = Read.reads(Paths.get(_))

  private implicit val readTest: Read[MigrationStep.Test] =
    Read.stringRead.map(s =>
      s.split(",", -1) match {
        case Array(Divulgence.ApplicationId, owner, divulgee, suffix) =>
          new Divulgence(owner, divulgee, suffix)
        case Array(KeyTransfer.ApplicationId, owner, receiver, suffix) =>
          new KeyTransfer(owner, receiver, suffix)
        case Array(ProposeAccept.ApplicationId, proposer, accepter, note) =>
          new ProposeAccept(proposer, accepter, note)
        case _ =>
          throw new IllegalArgumentException(s"Illegal test name or parameters '$s'")
    })

  val parser: OptionParser[Config] = new scopt.OptionParser[Config]("migration-step") {
    opt[Path]("dar")
      .action((dar, c) => c.copy(dar = dar))
      .required()
    opt[String]("host")
      .action((host, c) => c.copy(host = host))
      .required()
    opt[Int]("port")
      .action((port, c) => c.copy(port = port))
      .required()
    opt[Path]("output")
      .action((path, c) => c.copy(outputFile = path))
      .required()
    opt[MigrationStep.Test]("test")
      .action((test, c) => c.copy(test = test))
      .required()
  }

  // Null-safety is provided by the CLI parser making all fields required
  val default: Config = Config(null, 0, null, null, null)

  sealed trait Test {
    def host: String
    def port: Int
    def outputFile: Path
  }

}

final case class Config(
    host: String,
    port: Int,
    outputFile: Path,
    dar: Path,
    test: MigrationStep.Test,
) extends Config.Test 
Example 8
Source File: Conf.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.lf.codegen.conf

import java.nio.file.{Path, Paths}

import ch.qos.logback.classic.Level
import com.daml.buildinfo.BuildInfo
import scopt.{OptionParser, Read}


final case class Conf(
    darFiles: Map[Path, Option[String]] = Map(),
    outputDirectory: Path,
    decoderPkgAndClass: Option[(String, String)] = None,
    verbosity: Level = Level.ERROR,
    roots: List[String] = Nil
)

object Conf {

  private[conf] final val PackageAndClassRegex =
    """(?:(\p{javaJavaIdentifierStart}\p{javaJavaIdentifierPart}+(?:\.\p{javaJavaIdentifierStart}\p{javaJavaIdentifierPart}+)*)\.)(\p{javaJavaIdentifierStart}\p{javaJavaIdentifierPart}+)""".r

  def parse(args: Array[String]): Option[Conf] =
    parser.parse(args, Conf(Map.empty, Paths.get(".")))

  def parser: OptionParser[Conf] = new scopt.OptionParser[Conf]("codegen") {
    head("codegen", BuildInfo.Version)
    note("Code generator for the DAML ledger bindings.\n")

    arg[(Path, Option[String])]("<DAR-file[=package-prefix]>...")(
      optTupleRead(readPath, Read.stringRead))
      .unbounded()
      .action((p, c) => c.copy(darFiles = c.darFiles + p))
      .required()
      .text("DAR file to use as input of the codegen with an optional, but recommend, package prefix for the generated sources.")

    opt[Path]('o', "output-directory")(readPath)
      .action((p, c) => c.copy(outputDirectory = p))
      .required()
      .text("Output directory for the generated sources")

    opt[(String, String)]('d', "decoderClass")(readClassName)
      .action((className, c) => c.copy(decoderPkgAndClass = Some(className)))
      .text("Fully Qualified Class Name of the optional Decoder utility")

    opt[Level]('V', "verbosity")(readVerbosity)
      .action((l, c) => c.copy(verbosity = l))
      .text("Verbosity between 0 (only show errors) and 4 (show all messages) -- defaults to 0")

    opt[String]('r', "root")(Read.stringRead)
      .unbounded()
      .action((rexp, c) => c.copy(roots = rexp :: c.roots))
      .text(
        "Regular expression for fully-qualified names of templates to generate -- defaults to .*")

    help("help").text("This help text")

  }

  private[conf] val readPath: scopt.Read[Path] = scopt.Read.stringRead.map(s => Paths.get(s))

  val readClassName: scopt.Read[(String, String)] = scopt.Read.stringRead.map {
    case PackageAndClassRegex(p, c) => (p, c)
    case _ =>
      throw new IllegalArgumentException("Expected a Full Qualified Class Name")
  }

  val readVerbosity: scopt.Read[Level] = scopt.Read.stringRead.map {
    case "0" => Level.ERROR
    case "1" => Level.WARN
    case "2" => Level.INFO
    case "3" => Level.DEBUG
    case "4" => Level.TRACE
    case _ =>
      throw new IllegalArgumentException(
        "Expected a verbosity value between 0 (least verbose) and 4 (most verbose)")
  }

  private[conf] def optTupleRead[A: Read, B: Read]: Read[(A, Option[B])] =
    new Read[(A, Option[B])] {
      override def arity: Int = 2

      override def reads: String => (A, Option[B]) = { s: String =>
        s.split('=').toList match {
          case Nil =>
            throw new IllegalArgumentException("Expected a key with an optional value: key[=value]")
          case key :: Nil => (implicitly[Read[A]].reads(key), None)
          case key :: value :: Nil =>
            (implicitly[Read[A]].reads(key), Some(implicitly[Read[B]].reads(value)))
          case _ =>
            throw new IllegalArgumentException("Expected a key with an optional value: key[=value]")
        }
      }
    }

} 
Example 9
Source File: ConfSpec.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.lf.codegen.conf

import java.nio.file.Paths

import org.scalatest.{FlatSpec, Matchers, OptionValues}

class ConfSpec extends FlatSpec with Matchers with OptionValues {

  behavior of "Conf.parse"

  it should "return error when no arguments are passed" in {
    Conf.parse(Array.empty) shouldBe empty
  }

  it should "return error when only inputs are passed" in {
    Conf.parse(Array("foo")) shouldBe empty
  }

  it should "return error when only output is passed" in {
    Conf.parse(Array("-o", "bar")) shouldBe empty
  }

  it should "return error when only inputs and decoder class are passed" in {
    Conf.parse(Array("-d", "package.ClassName", "input")) shouldBe empty
  }

  it should "return a Conf when input, output and a known backend are passed" in {
    Conf.parse(Array("-o", "output", "input")) shouldNot be(empty)
  }

  it should "return a Conf when input, output, a known backend and deocder FQCN are passed" in {
    Conf.parse(Array("-o", "output", "-d", "package.ClassName", "input")) shouldNot be(empty)
  }

  // XXX SC remove in Scala 2.13. aggregatingNatureOfGenTraversable is
  // mis-signed because it forces Map[K, V] to destructure as
  // TRAV[e] = Map[K, e], which is of course not <: GenTraversable[e]. And it's
  // needless, as proven below, just like the similar problem with
  // Future.traverse's sig
  import scala.collection.GenTraversable, org.scalatest.enablers.Aggregating
  private[this] implicit def `fixed sig aggregatingNatureOfGenTraversable`[
      E: org.scalactic.Equality,
      TRAV]: Aggregating[TRAV with GenTraversable[E]] =
    Aggregating.aggregatingNatureOfGenTraversable[E, GenTraversable]

  it should "return a Conf with expected single unmapped input and output" in {
    val conf = Conf.parse(Array("-o", "output", "input")).value
    conf.darFiles should contain theSameElementsAs Map(Paths.get("input") -> None)
  }

  it should "return error when illegal Decoder class is passed" in {
    Conf.parse(Array("-o", "output", "-d", "$illegal")) shouldBe empty
  }

  it should "return a Conf with expected single mapped input, output and backend" in {
    val conf = Conf.parse(Array("-o", "output", "input=input.prefix")).value
    conf.darFiles should contain theSameElementsAs Map(Paths.get("input") -> Some("input.prefix"))
  }

  it should "return a Conf with expected multiple mapped inputs, output and backend" in {
    val conf = Conf
      .parse(Array("-o", "output", "input1=input1.prefix", "input2=input2.prefix"))
      .value
    conf.darFiles should contain theSameElementsAs Map(
      Paths.get("input1") -> Some("input1.prefix"),
      Paths.get("input2") -> Some("input2.prefix")
    )
  }
  it should "return a Conf with expected multiple mixed inputs, output and backend" in {
    val conf =
      Conf.parse(Array("-o", "output", "input1=input1.prefix", "input2")).value
    conf.darFiles should contain theSameElementsAs Map(
      Paths.get("input1") -> Some("input1.prefix"),
      Paths.get("input2") -> None
    )
  }
} 
Example 10
Source File: NavigatorBackend.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.navigator

import java.nio.file.{Files, Paths}
import java.util.UUID

import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import com.daml.buildinfo.BuildInfo

object NavigatorBackend extends UIBackend {

  private val configFile = "frontend-config.js"
  override def customEndpoints: Set[CustomEndpoint[_]] = Set()
  override def customRoutes: List[Route] = List(frontendConfigRoute)
  override def applicationInfo: ApplicationInfo = ApplicationInfo(
    id = s"Navigator-${UUID.randomUUID().toString}",
    name = "Navigator",
    version = BuildInfo.Version,
  )
  override def banner: Option[String] =
    Some(
      raw"""   _  __          _           __
        |  / |/ /__ __  __(_)__ ____ _/ /____  ____
        | /    / _ `/ |/ / / _ `/ _ `/ __/ _ \/ __/
        |/_/|_/\_,_/|___/_/\_, /\_,_/\__/\___/_/
        |                 /___/
        |Version """.stripMargin + applicationInfo.version
    )

  
  private val frontendConfigRoute: Route = {
    path("api" / "config") {
      if (Files.exists(Paths.get(configFile)))
        getFromFile(configFile)
      else
        complete(StatusCodes.NotFound)
    }
  }
} 
Example 11
Source File: PortFilesSpec.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.ports

import java.nio.file.{Path, Paths}
import java.util.UUID

import com.daml.ports.PortFiles.FileAlreadyExists
import org.scalatest.{FreeSpec, Inside, Matchers}
import scalaz.{-\/, \/-}

class PortFilesSpec extends FreeSpec with Matchers with Inside {

  "Can create a port file with a unique file name" in {
    val path = uniquePath()
    inside(PortFiles.write(path, Port(1024))) {
      case \/-(()) =>
    }
    path.toFile.exists() shouldBe true
  }

  "Cannot create a port file with a nonunique file name" in {
    val path = uniquePath()
    inside(PortFiles.write(path, Port(1024))) {
      case \/-(()) =>
    }
    inside(PortFiles.write(path, Port(1024))) {
      case -\/(FileAlreadyExists(p)) =>
        p shouldBe path
    }
  }

  private def uniquePath(): Path = {
    val fileName = s"${this.getClass.getSimpleName}-${UUID.randomUUID().toString}.dummy"
    Paths.get(fileName)
  }
} 
Example 12
Source File: PortLock.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.testing.postgresql

import java.io.RandomAccessFile
import java.nio.channels.{
  ClosedChannelException,
  FileChannel,
  FileLock,
  OverlappingFileLockException
}
import java.nio.file.{Files, Path, Paths}

import com.daml.ports.Port

private[postgresql] object PortLock {

  // We can't use `sys.props("java.io.tmpdir")` because Bazel changes this for each test run.
  // For this to be useful, it needs to be shared across concurrent runs.
  private val portLockDirectory: Path = {
    val tempDirectory =
      if (sys.props("os.name").startsWith("Windows")) {
        Paths.get(sys.props("user.home"), "AppData", "Local", "Temp")
      } else {
        Paths.get("/tmp")
      }
    tempDirectory.resolve(Paths.get("daml", "build", "postgresql-testing", "ports"))
  }

  def lock(port: Port): Either[FailedToLock, Locked] = {
    Files.createDirectories(portLockDirectory)
    val portLockFile = portLockDirectory.resolve(port.toString)
    val file = new RandomAccessFile(portLockFile.toFile, "rw")
    val channel = file.getChannel
    try {
      val lock = channel.tryLock()
      val locked = new Locked(port, lock, channel, file)
      if (lock != null) {
        Right(locked)
      } else {
        locked.unlock()
        Left(FailedToLock(port))
      }
    } catch {
      case _: OverlappingFileLockException =>
        channel.close()
        file.close()
        Left(FailedToLock(port))
    }
  }

  final class Locked(val port: Port, lock: FileLock, channel: FileChannel, file: RandomAccessFile) {
    def unlock(): Unit = {
      try {
        lock.release()
      } catch {
        // ignore
        case _: ClosedChannelException =>
      }
      channel.close()
      file.close()
    }
  }

  case class FailedToLock(port: Port) extends RuntimeException(s"Failed to lock port $port.")

} 
Example 13
Source File: NetworkBuilderMain.scala    From iotchain   with MIT License 5 votes vote down vote up
package jbok.app

import java.nio.file.Paths

import cats.effect.{ExitCode, IO, IOApp}
import cats.implicits._
import jbok.common.math.N
import jbok.core.CoreModule
import jbok.core.config.{GenesisBuilder, NetworkBuilder}
import jbok.core.models.{Address, ChainId}
import jbok.crypto.signature.{ECDSA, KeyPair, Signature}

object NetworkBuilderMain extends IOApp {
  override def run(args: List[String]): IO[ExitCode] = {
    def randomKP: KeyPair =
      Signature[ECDSA].generateKeyPair[IO]().unsafeRunSync()

    val miner0 = randomKP
    val miner1 = randomKP
    val miner2 = randomKP

    val coinbase0 = Address(randomKP)
    val coinbase1 = Address(randomKP)
    val coinbase2 = Address(randomKP)

    val alloc = KeyPair(
      KeyPair.Public("a4991b82cb3f6b2818ce8fedc00ef919ba505bf9e67d96439b63937d24e4d19d509dd07ac95949e815b307769f4e4d6c3ed5d6bd4883af23cb679b251468a8bc"),
      KeyPair.Secret("1a3c21bb6e303a384154a56a882f5b760a2d166161f6ccff15fc70e147161788")
    )

    val genesis = GenesisBuilder()
      .withChainId(ChainId(10))
      .addAlloc(Address(alloc), N("1" + "0" * 27))
      .addAlloc(Address(miner0), N("1" + "0" * 27))
      .addMiner(Address(miner0))
//      .addMiner(Address(miner1))
//      .addMiner(Address(miner2))
      .build

    val config = CoreModule.testConfig.copy(genesis = genesis)

    val home = System.getProperty("user.home")
    val root = Paths.get(home).resolve(".iotchain")

    val builder = NetworkBuilder(config)
      .withBlockPeriod(10000)
      .addNode(miner0, coinbase0, root.resolve("node-0"), "127.0.0.2")
      .addNode(miner1, coinbase1, root.resolve("node-1"), "127.0.0.3")
      .addNode(miner2, coinbase2, root.resolve("node-2"), "127.0.0.4")

    builder.dump.as(ExitCode.Success)
  }
} 
Example 14
Source File: FullNode.scala    From iotchain   with MIT License 5 votes vote down vote up
package jbok.app

import java.nio.channels.FileLock
import java.nio.file.Paths

import cats.effect._
import fs2._
import jbok.app.service.{HttpService, StoreUpdateService}
import jbok.common.FileUtil
import jbok.common.log.Logger
import jbok.core.CoreNode

final class FullNode[F[_]](
    core: CoreNode[F],
    httpService: HttpService[F],
    storeUpdateService: StoreUpdateService[F]
)(implicit F: ConcurrentEffect[F]) {
  private[this] val log = Logger[F]

  def lock: Resource[F, FileLock] =
    FileUtil[F].lock(Paths.get(s"${core.config.rootPath}").resolve("LOCK"))

  def stream: Stream[F, Unit] =
    Stream.resource(lock).flatMap { _ =>
      Stream.eval_(log.i(s"staring FullNode...")) ++
        Stream(
          core.stream,
          httpService.stream,
          storeUpdateService.stream
        ).parJoinUnbounded
          .handleErrorWith(e => Stream.eval(log.e("FullNode has an unhandled failure", e)))
          .onFinalize(log.i(s"FullNode ready to exit, bye bye..."))
    }
} 
Example 15
Source File: AppMain.scala    From iotchain   with MIT License 5 votes vote down vote up
package jbok.app

import java.nio.file.Paths

import cats.effect.{ExitCode, IO, IOApp}
import cats.implicits._
import jbok.common.log.Logger

object AppMain extends IOApp {
  private[this] val log = Logger[IO]

  private val buildVersion: String = getClass.getPackage.getImplementationVersion

  private val banner: String = """
                         | _____   _______    _____ _           _
                         ||_   _| |__   __|  / ____| |         (_)
                         |  | |  ___ | |    | |    | |__   __ _ _ _ __
                         |  | | / _ \| |    | |    | '_ \ / _` | | '_ \
                         | _| || (_) | |    | |____| | | | (_| | | | | |
                         ||_____\___/|_|     \_____|_| |_|\__,_|_|_| |_|
                         |""".stripMargin

  private val version = s"v${buildVersion} © 2018 - 2019 The IoTChain Authors"

  override def run(args: List[String]): IO[ExitCode] =
    log.i(banner) >>
      log.i(version) >>
      AppModule
        .resource[IO](Paths.get(args.headOption.getOrElse("/etc/iotchain/config.yaml")))
        .use(_.get[FullNode[IO]].stream.compile.drain)
        .as(ExitCode.Success)
} 
Example 16
Source File: CliMain.scala    From iotchain   with MIT License 5 votes vote down vote up
package jbok.app

import java.nio.file.Paths

import cats.effect.{ExitCode, IO, IOApp}
import javax.net.ssl.SSLContext
import jbok.app.cli.Cli
import jbok.common.config.Config
import jbok.common.log.{Level, Logger}
import jbok.core.api.JbokClientPlatform
import jbok.core.config.FullConfig
import monocle.macros.syntax.lens._

import scala.concurrent.duration._

object CliMain extends IOApp {
  override def run(args: List[String]): IO[ExitCode] =
    Config[IO].read[FullConfig](Paths.get(args.head)).flatMap { config =>
      AppModule.resource[IO](config.lens(_.persist.driver).set("memory")).use { objects =>
        val config = objects.get[FullConfig]
        val ssl    = objects.get[Option[SSLContext]]
        JbokClientPlatform.resource[IO](config.service.uri, ssl).use { client =>
          val cli = new Cli[IO](client)
          for {
            _ <- Logger.setRootLevel[IO](Level.Error)
            _ <- cli.loop(5.seconds).compile.drain
          } yield ExitCode.Success
        }
      }
    }
} 
Example 17
Source File: TxGeneratorMain.scala    From iotchain   with MIT License 5 votes vote down vote up
package jbok.app

import java.nio.file.Paths

import cats.effect.{ExitCode, IO, IOApp}
import cats.implicits._
import jbok.app.txgen.TxGenerator
import jbok.common.config.Config
import jbok.core.api.JbokClientPlatform
import jbok.core.config.FullConfig
import jbok.core.mining.TxGen
import jbok.crypto.signature.{ECDSA, KeyPair, Signature}
import monocle.macros.syntax.lens._

object TxGeneratorMain extends IOApp {
  override def run(args: List[String]): IO[ExitCode] =
    Config[IO].read[FullConfig](Paths.get(args.head)).flatMap { config =>
      AppModule.resource[IO](config.lens(_.persist.driver).set("memory")).use { objects =>
        val config           = objects.get[FullConfig]
        implicit val chainId = config.genesis.chainId
        val keyPairs: List[KeyPair] = List(
          KeyPair(
            KeyPair.Public("a4991b82cb3f6b2818ce8fedc00ef919ba505bf9e67d96439b63937d24e4d19d509dd07ac95949e815b307769f4e4d6c3ed5d6bd4883af23cb679b251468a8bc"),
            KeyPair.Secret("1a3c21bb6e303a384154a56a882f5b760a2d166161f6ccff15fc70e147161788")
          )
        ) ++ Signature[ECDSA].generateKeyPair[IO]().replicateA(10).unsafeRunSync()

        JbokClientPlatform.resource[IO](config.service.uri).use { client =>
          for {
            txGen <- TxGen[IO](keyPairs, client)
            generator = new TxGenerator[IO](config, txGen, client)
            _ <- generator.stream.compile.drain
          } yield ExitCode.Success
        }
      }
    }
} 
Example 18
Source File: SSLContextHelper.scala    From iotchain   with MIT License 5 votes vote down vote up
package jbok.crypto.ssl

import java.nio.file.Paths
import java.security.KeyStore

import cats.effect.Sync
import cats.implicits._
import javax.net.ssl.{KeyManagerFactory, SSLContext, SSLEngine, TrustManagerFactory}
import jbok.common.FileUtil
import jbok.common.log.Logger

final class ClientSSLEngine(val engine: SSLEngine) extends AnyVal

final class ServerSSLEngine(val engine: SSLEngine) extends AnyVal

object SSLContextHelper {
  def apply[F[_]](config: SSLConfig)(implicit F: Sync[F]): F[Option[SSLContext]] =
    if (!config.enabled) {
      F.pure(None)
    } else {
      Logger[F].i(s"init SSLContext from keyStore=${config.keyStorePath} trustStore=${config.trustStorePath}") >>
        FileUtil[F]
          .inputStream(Paths.get(config.keyStorePath))
          .use { keyStoreIS =>
            FileUtil[F].inputStream(Paths.get(config.trustStorePath)).use { trustStoreIS =>
              F.delay {
                val keyStore = KeyStore.getInstance("JKS")
                keyStore.load(keyStoreIS, "changeit".toCharArray)
                val keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm)
                keyManagerFactory.init(keyStore, "changeit".toCharArray)

                val trustStore = KeyStore.getInstance("JKS")
                trustStore.load(trustStoreIS, "changeit".toCharArray)
                val trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm)
                trustManagerFactory.init(trustStore)

                val ctx = SSLContext.getInstance(config.protocol)
                ctx.init(keyManagerFactory.getKeyManagers, trustManagerFactory.getTrustManagers, null)
                ctx
              }
            }
          }
          .map(_.some)
    }

  def clientEngine(ctx: SSLContext): ClientSSLEngine = {
    val engine = ctx.createSSLEngine()
    engine.setUseClientMode(true)
    engine.setNeedClientAuth(true)
    new ClientSSLEngine(engine)
  }

  def serverEngine(ctx: SSLContext): ServerSSLEngine = {
    val engine = ctx.createSSLEngine()
    engine.setUseClientMode(false)
    engine.setNeedClientAuth(true)
    new ServerSSLEngine(engine)
  }
} 
Example 19
Source File: LoggerPlatform.scala    From iotchain   with MIT License 5 votes vote down vote up
package jbok.common.log

import java.nio.file.{Path, Paths}

import cats.effect.Sync
import cats.implicits._
import jbok.common.FileUtil
import scribe.handler.LogHandler
import scribe.writer.FileWriter
import scribe.writer.file.LogPath

import scala.concurrent.duration._

object LoggerPlatform {
  def initConfig[F[_]: Sync](config: LogConfig): F[Unit] = {
    val level = Level.fromName(config.level)
    Logger.setRootLevel(level) >>
      (config.logDir match {
        case "/dev/null" =>
          Logger.setRootHandlers(Logger.consoleHandler(level.some))
        case dir =>
          FileUtil[F].open(Paths.get(config.logDir), create = true, asDirectory = true) >>
            Logger.setRootHandlers(
              Logger.consoleHandler(level.some),
              fileHandler(Paths.get(dir), level.some, config.maxLogs)
            )
      })
  }

  def fileHandler(directory: Path, minimumLevel: Option[Level] = None, maxLogs: Int = 15): LogHandler = LogHandler(
    Logger.fileFormatter,
    FileWriter().nio
      .path(LogPath.simple("iotchain.log", directory = directory))
      .rolling(LogPath.daily(prefix = "iotchain", directory = directory))
      .maxLogs(maxLogs, checkRate = 1.seconds),
    minimumLevel.map(Logger.fromJbokLevel)
  )
} 
Example 20
Source File: NetworkBuilder.scala    From iotchain   with MIT License 5 votes vote down vote up
package jbok.core.config

import java.net.InetSocketAddress
import java.nio.file.{Path, Paths}

import better.files.File
import cats.effect.IO
import cats.implicits._
import io.circe.syntax._
import jbok.common.config.Config
import jbok.core.keystore.KeyStorePlatform
import jbok.core.models.Address
import jbok.core.peer.PeerUri
import jbok.crypto.signature.KeyPair
import monocle.macros.syntax.lens._

import sys.process.{ProcessLogger, stringSeqToProcess}
import scala.concurrent.duration._

final case class NetworkBuilder(
    base: FullConfig,
    configs: List[FullConfig] = Nil,
) {
  val home = System.getProperty("user.home")
  val root = Paths.get(home).resolve(".jbok")

  def withBlockPeriod(n: Int): NetworkBuilder =
    copy(base = base.lens(_.mining.period).set(n.millis))

  def createCert(ip: String, cn: String, caDir: Path, certDir: Path): IO[String] = IO {
    val path = File(".")
    val projectDir = path.path.toAbsolutePath
    val processLogger = new ProcessLogger {
      override def out(s: => String): Unit = println(s)
      override def err(s: => String): Unit = println(s)
      override def buffer[T](f: => T): T = f
    }

    Seq("bash", "-c", s"${projectDir.resolve("bin/create-cert.sh")} ${ip} ${cn} ${projectDir.resolve("bin").toAbsolutePath} ${caDir.toAbsolutePath} ${certDir.toAbsolutePath}")
      .lineStream_!(processLogger)
      .mkString("\n")
  }

  def addNode(keyPair: KeyPair, coinbase: Address, rootPath: Path, host: String): NetworkBuilder = {
    val config = base
      .lens(_.rootPath).set(rootPath.toAbsolutePath.toString)
      .lens(_.peer.host).set(host)
      .lens(_.service.local).set(host)
      .lens(_.service.enableMetrics).set(true)
//      .lens(_.service.secure).set(true)
      .lens(_.mining.enabled).set(true)
      .lens(_.mining.address).set(Address(keyPair))
      .lens(_.mining.coinbase).set(coinbase)
//      .lens(_.ssl.enabled).set(true)
      .lens(_.ssl.trustStorePath).set(rootPath.resolve("cert/cacert.jks").toAbsolutePath.toString)
      .lens(_.ssl.keyStorePath).set(rootPath.resolve("cert/server.jks").toAbsolutePath.toString)
      .lens(_.persist.driver).set("rocksdb")
      .lens(_.persist.path).set(s"${rootPath.resolve("data").toAbsolutePath}")
      .lens(_.log.logDir).set(s"${rootPath.resolve("logs").toAbsolutePath}")
      .lens(_.keystore.dir).set(s"${rootPath.resolve("keystore").toAbsolutePath}")
      .lens(_.db.driver).set("org.sqlite.JDBC")
      .lens(_.db.url).set(s"jdbc:sqlite:${rootPath.resolve(s"service.db")}")

    val keystore = new KeyStorePlatform[IO](config.keystore)
    keystore.importPrivateKey(keyPair.secret.bytes, "changeit").unsafeRunSync()

    createCert(host, host, root.resolve("ca"), rootPath.resolve("cert")).unsafeRunSync()
    copy(configs = config :: configs)
  }

  def build: List[FullConfig] = {
    val reversed = configs.reverse
    val seeds = reversed.map(_.peer).map { peer =>
      PeerUri.fromTcpAddr(new InetSocketAddress(peer.host, peer.port)).uri
    }

    reversed.zipWithIndex.map { case (config, i) => config.lens(_.peer.seeds).set(seeds.take(i) ++ seeds.drop(i + 1)) }
  }

  def dump: IO[Unit] =
    build.traverse_(config => Config[IO].dump(config.asJson, Paths.get(config.rootPath).resolve(s"config.yaml")))
} 
Example 21
Source File: ImagePredictor.scala    From BigDL   with Apache License 2.0 5 votes vote down vote up
package com.intel.analytics.bigdl.example.imageclassification

import java.nio.file.Paths

import com.intel.analytics.bigdl.dataset.image._
import com.intel.analytics.bigdl.dlframes.DLClassifierModel
import com.intel.analytics.bigdl.example.imageclassification.MlUtils._
import com.intel.analytics.bigdl.numeric.NumericFloat
import com.intel.analytics.bigdl.utils.{Engine, LoggerFilter}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext


object ImagePredictor {
  LoggerFilter.redirectSparkInfoLogs()
  Logger.getLogger("com.intel.analytics.bigdl.example").setLevel(Level.INFO)

  def main(args: Array[String]): Unit = {
    predictParser.parse(args, new PredictParams()).map(param => {
      val conf = Engine.createSparkConf()
      conf.setAppName("Predict with trained model")
      val sc = new SparkContext(conf)
      Engine.init
      val sqlContext = new SQLContext(sc)

      val partitionNum = Engine.nodeNumber() * Engine.coreNumber()
      val model = loadModel(param)
      val valTrans = new DLClassifierModel(model, Array(3, imageSize, imageSize))
        .setBatchSize(param.batchSize)
        .setFeaturesCol("features")
        .setPredictionCol("predict")

      val valRDD = if (param.isHdfs) {
        // load image set from hdfs
        imagesLoadSeq(param.folder, sc, param.classNum).coalesce(partitionNum, true)
      } else {
        // load image set from local
        val paths = LocalImageFiles.readPaths(Paths.get(param.folder), hasLabel = false)
        sc.parallelize(imagesLoad(paths, 256), partitionNum)
      }

      val transf = RowToByteRecords() ->
          BytesToBGRImg() ->
          BGRImgCropper(imageSize, imageSize) ->
          BGRImgNormalizer(testMean, testStd) ->
          BGRImgToImageVector()

      val valDF = transformDF(sqlContext.createDataFrame(valRDD), transf)

      valTrans.transform(valDF)
          .select("imageName", "predict")
          .collect()
          .take(param.showNum)
          .foreach(println)
      sc.stop()
    })
  }
} 
Example 22
Source File: Utils.scala    From BigDL   with Apache License 2.0 5 votes vote down vote up
package com.intel.analytics.bigdl.dataset.image

import java.awt.image.BufferedImage
import java.awt.{BasicStroke, Color, Font, Graphics2D}
import java.io.File
import java.nio.file.Paths
import javax.imageio.ImageIO

import com.intel.analytics.bigdl.tensor.Tensor


  def visDetection(imagePath: String, clsname: String,
    scores: Tensor[Float], bboxes: Tensor[Float],
    thresh: Float = 0.3f, outPath: String = "data/demo"): Unit = {
    val f = new File(outPath)
    if (!f.exists()) {
      f.mkdirs()
    }
    val path = Paths.get(outPath,
      s"${ clsname }_${ imagePath.substring(imagePath.lastIndexOf("/") + 1) }").toString
    vis(imagePath, clsname, scores, bboxes, path, thresh)
  }
} 
Example 23
Source File: COCOSeqFileGenerator.scala    From BigDL   with Apache License 2.0 5 votes vote down vote up
package com.intel.analytics.bigdl.models.utils

import com.intel.analytics.bigdl.dataset.segmentation.{COCODataset, COCOSerializeContext}
import java.io.File
import java.nio.file.{Files, Paths}
import java.util.concurrent.atomic.AtomicInteger
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.io.SequenceFile.Writer
import org.apache.hadoop.io.compress.BZip2Codec
import org.apache.hadoop.io.{BytesWritable, SequenceFile}
import scala.collection.parallel.ForkJoinTaskSupport
import scopt.OptionParser

object COCOSeqFileGenerator {

  
  case class COCOSeqFileGeneratorParams(
    folder: String = ".",
    metaPath: String = "instances_val2014.json",
    output: String = ".",
    parallel: Int = 1,
    blockSize: Int = 12800
  )

  private val parser = new OptionParser[COCOSeqFileGeneratorParams]("BigDL COCO " +
    "Sequence File Generator") {
    head("BigDL COCO Sequence File Generator")
    opt[String]('f', "folder")
      .text("where you put the COCO image files")
      .action((x, c) => c.copy(folder = x))
    opt[String]('o', "output folder")
      .text("where you put the generated seq files")
      .action((x, c) => c.copy(output = x))
    opt[Int]('p', "parallel")
      .text("parallel num")
      .action((x, c) => c.copy(parallel = x))
    opt[Int]('b', "blockSize")
      .text("block size")
      .action((x, c) => c.copy(blockSize = x))
    opt[String]('m', "metaPath")
      .text("metadata json file path")
      .action((x, c) => c.copy(metaPath = x))
  }

  def main(args: Array[String]): Unit = {
    parser.parse(args, COCOSeqFileGeneratorParams()).foreach { param =>
      println("Loading COCO metadata")
      val meta = COCODataset.load(param.metaPath, param.folder)
      println("Metadata loaded")
      val conf: Configuration = new Configuration
      val doneCount = new AtomicInteger(0)
      val tasks = meta.images.filter(img => {
        val path = img.path
        val valid = Files.exists(path) && !Files.isDirectory(path)
        if (!valid) {
          System.err.print(s"[Warning] The image file ${path.getFileName} does not exist.\n")
        }
        valid
      }).grouped(param.blockSize).zipWithIndex.toArray.par
      tasks.tasksupport = new ForkJoinTaskSupport(
        new scala.concurrent.forkjoin.ForkJoinPool(param.parallel))
      tasks.foreach { case (imgs, blkId) =>
        val outFile = new Path(param.output, s"coco-seq-$blkId.seq")
        val key = new BytesWritable
        val value = new BytesWritable
        val writer = SequenceFile.createWriter(conf, Writer.file(outFile), Writer.keyClass(key
          .getClass), Writer.valueClass(value.getClass), Writer.compression(SequenceFile
          .CompressionType.BLOCK, new BZip2Codec))
        val context = new COCOSerializeContext
        imgs.foreach { img =>
          context.clear()
          context.dump(img.fileName)
          img.dumpTo(context)
          context.dump(COCODataset.MAGIC_NUM)
          val keyBytes = context.toByteArray
          key.set(keyBytes, 0, keyBytes.length)
          val bytes = img.data
          value.set(bytes, 0, bytes.length)
          writer.append(key, value)
          val cnt = doneCount.incrementAndGet()
          if (cnt % 500 == 0) {
            System.err.print(s"\r$cnt / ${meta.images.length} = ${cnt.toFloat/meta.images.length}")
          }
        }
        writer.close()
      }
      System.err.print("\n")
    }
  }
} 
Example 24
Source File: ImageNet2012.scala    From BigDL   with Apache License 2.0 5 votes vote down vote up
package com.intel.analytics.bigdl.models.inception

import java.nio.file.Paths

import com.intel.analytics.bigdl.DataSet
import com.intel.analytics.bigdl.dataset._
import com.intel.analytics.bigdl.dataset.image.{BGRImgCropper, BGRImgNormalizer, BytesToBGRImg, CropCenter, CropRandom, MTLabeledBGRImgToBatch, HFlip => DatasetHFlip}
import com.intel.analytics.bigdl.transform.vision.image._
import com.intel.analytics.bigdl.transform.vision.image.augmentation._
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD

object ImageNet2012 {
  def apply(
    path : String,
    sc: SparkContext,
    imageSize : Int,
    batchSize : Int,
    nodeNumber: Int,
    coresPerNode: Int,
    classNumber: Int
  )
  : DataSet[MiniBatch[Float]] = {
    DataSet.SeqFileFolder.filesToImageFeatureDataset(path, sc, classNumber).transform(
      MTImageFeatureToBatch(
        width = imageSize,
        height = imageSize,
        batchSize = batchSize,
        transformer = PixelBytesToMat() ->
          Resize(256, 256) ->
          RandomCropper(224, 224, true, CropRandom) ->
          ChannelNormalize(123, 117, 104) ->
          MatToTensor[Float](), toRGB = false
      )
    )
  }

  def rdd(path: String, batchSize: Int, sc: SparkContext, imageSize : Int)
  : DataSet[MiniBatch[Float]] = {
    val imageFrame = DataSet.SeqFileFolder.filesToImageFrame(path, sc, 1000)
    val transfomer = PixelBytesToMat() ->
      RandomCrop(imageSize, imageSize) ->
      RandomTransformer(HFlip(), 0.5) ->
      ChannelNormalize(0.485f, 0.456f, 0.406f, 0.229f, 0.224f, 0.225f) ->
      MatToTensor[Float]() ->
      ImageFrameToSample[Float](targetKeys = Array(ImageFeature.label)) ->
      ImageFeatureToMiniBatch[Float](batchSize)
    val data = DataSet.imageFrame(imageFrame).transform(transfomer)
    data
  }
}

object ImageNet2012Val {
   def apply(
     path : String,
     sc: SparkContext,
     imageSize : Int,
     batchSize : Int,
     nodeNumber: Int,
     coresPerNode: Int,
     classNumber: Int
   )
   : DataSet[MiniBatch[Float]] = {

     DataSet.SeqFileFolder.filesToImageFeatureDataset(path, sc, 1000).transform(
       MTImageFeatureToBatch(
         width = imageSize,
         height = imageSize,
         batchSize = batchSize,
         transformer = PixelBytesToMat() ->
           Resize(256, 256) ->
           RandomCropper(224, 224, false, CropCenter) ->
           ChannelNormalize(123, 117, 104) ->
           MatToTensor[Float](), toRGB = false
       )
     )
   }

  def rdd(path: String, batchSize: Int, sc: SparkContext, imageSize : Int)
  : DataSet[MiniBatch[Float]] = {
    val imageFrame = DataSet.SeqFileFolder.filesToImageFrame(path, sc, 1000)
    val transfomer = PixelBytesToMat() ->
      CenterCrop(imageSize, imageSize) ->
      RandomTransformer(HFlip(), 0.5) ->
      ChannelNormalize(0.485f, 0.456f, 0.406f, 0.229f, 0.224f, 0.225f) ->
      MatToTensor[Float]() ->
      ImageFrameToSample[Float](targetKeys = Array(ImageFeature.label)) ->
      ImageFeatureToMiniBatch[Float](batchSize)
    val data = DataSet.imageFrame(imageFrame).transform(transfomer)
    data
  }

 } 
Example 25
Source File: Test.scala    From BigDL   with Apache License 2.0 5 votes vote down vote up
package com.intel.analytics.bigdl.models.lenet

import java.nio.file.Paths

import com.intel.analytics.bigdl.dataset.DataSet
import com.intel.analytics.bigdl.dataset.image.{BytesToGreyImg, GreyImgNormalizer, GreyImgToSample}
import com.intel.analytics.bigdl.nn.Module
import com.intel.analytics.bigdl.optim.Top1Accuracy
import com.intel.analytics.bigdl.utils.Engine
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkContext

object Test {
  Logger.getLogger("org").setLevel(Level.ERROR)
  Logger.getLogger("akka").setLevel(Level.ERROR)
  Logger.getLogger("breeze").setLevel(Level.ERROR)


  import Utils._

  def main(args: Array[String]): Unit = {
    testParser.parse(args, new TestParams()).foreach { param =>
      val conf = Engine.createSparkConf().setAppName("Test Lenet on MNIST")
        .set("spark.akka.frameSize", 64.toString)
        .set("spark.task.maxFailures", "1")
      val sc = new SparkContext(conf)
      Engine.init

      val validationData = param.folder + "/t10k-images-idx3-ubyte"
      val validationLabel = param.folder + "/t10k-labels-idx1-ubyte"

      val partitionNum = Engine.nodeNumber() * Engine.coreNumber()
      val rddData = sc.parallelize(load(validationData, validationLabel), partitionNum)
      val transformer =
        BytesToGreyImg(28, 28) -> GreyImgNormalizer(testMean, testStd) -> GreyImgToSample()
      val evaluationSet = transformer(rddData)

      val model = Module.load[Float](param.model)
      val result = model.evaluate(evaluationSet,
        Array(new Top1Accuracy[Float]), Some(param.batchSize))

      result.foreach(r => println(s"${r._2} is ${r._1}"))
      sc.stop()
    }
  }
} 
Example 26
Source File: Train.scala    From BigDL   with Apache License 2.0 5 votes vote down vote up
package com.intel.analytics.bigdl.models.autoencoder

import java.nio.file.Paths

import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.dataset.image._
import com.intel.analytics.bigdl.dataset.{DataSet, MiniBatch, Transformer}
import com.intel.analytics.bigdl.nn.{MSECriterion, Module}
import com.intel.analytics.bigdl.optim._
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric._
import com.intel.analytics.bigdl.utils.{Engine, OptimizerV1, OptimizerV2, T, Table}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkContext

import scala.reflect.ClassTag

object toAutoencoderBatch {
  def apply(): toAutoencoderBatch[Float] = new toAutoencoderBatch[Float]()
}

class toAutoencoderBatch[T: ClassTag](implicit ev: TensorNumeric[T]
      )extends Transformer[MiniBatch[T], MiniBatch[T]] {
  override def apply(prev: Iterator[MiniBatch[T]]): Iterator[MiniBatch[T]] = {
    prev.map(batch => {
      MiniBatch(batch.getInput().toTensor[T], batch.getInput().toTensor[T])
    })
  }
}

object Train {
  Logger.getLogger("org").setLevel(Level.ERROR)
  Logger.getLogger("akka").setLevel(Level.ERROR)
  Logger.getLogger("breeze").setLevel(Level.ERROR)


  import Utils._

  def main(args: Array[String]): Unit = {
    trainParser.parse(args, new TrainParams()).map(param => {
      val conf = Engine.createSparkConf().setAppName("Train Autoencoder on MNIST")

      val sc = new SparkContext(conf)
      Engine.init

      val trainData = Paths.get(param.folder, "/train-images-idx3-ubyte")
      val trainLabel = Paths.get(param.folder, "/train-labels-idx1-ubyte")

      val trainDataSet = DataSet.array(load(trainData, trainLabel), sc) ->
        BytesToGreyImg(28, 28) -> GreyImgNormalizer(trainMean, trainStd) ->
        GreyImgToBatch(param.batchSize) -> toAutoencoderBatch()

      val model = if (param.modelSnapshot.isDefined) {
        Module.load[Float](param.modelSnapshot.get)
      } else {
        if (param.graphModel) Autoencoder.graph(classNum = 32) else Autoencoder(classNum = 32)
      }

      if (param.optimizerVersion.isDefined) {
        param.optimizerVersion.get.toLowerCase match {
          case "optimizerv1" => Engine.setOptimizerVersion(OptimizerV1)
          case "optimizerv2" => Engine.setOptimizerVersion(OptimizerV2)
        }
      }

      val optimMethod = if (param.stateSnapshot.isDefined) {
        OptimMethod.load[Float](param.stateSnapshot.get)
      } else {
        new Adagrad[Float](learningRate = 0.01, learningRateDecay = 0.0, weightDecay = 0.0005)
      }

      val optimizer = Optimizer(
        model = model,
        dataset = trainDataSet,
        criterion = new MSECriterion[Float]()
      )

      if (param.checkpoint.isDefined) {
        optimizer.setCheckpoint(param.checkpoint.get, Trigger.everyEpoch)
      }
      optimizer
        .setOptimMethod(optimMethod)
        .setEndWhen(Trigger.maxEpoch(param.maxEpoch))
        .optimize()
      sc.stop()
    })
  }
} 
Example 27
Source File: SerializeModelSpec.scala    From BigDL   with Apache License 2.0 5 votes vote down vote up
package com.intel.analytics.bigdl.nn.mkldnn

import java.io.File
import java.nio.file.{Files, Paths}

import com.intel.analytics.bigdl.nn.Module
import com.intel.analytics.bigdl.nn.mkldnn.ResNet.DatasetType.ImageNet
import com.intel.analytics.bigdl.utils.T
import org.scalatest.{FlatSpec, Matchers}

class SerializeModelSpec extends FlatSpec with Matchers {

  "Save a model" should "work correctly" in {
    val identity = System.identityHashCode(this).toString
    val name = "resnet_50." + identity
    val tmpdir = System.getProperty("java.io.tmpdir")
    val path = Paths.get(tmpdir, name).toAbsolutePath

    // do not use vgg16 model, the vgg16 model will set Xavier to average
    // mode, which will influence other test cases because of Xavier is a
    // case object.
    val model = ResNet(32, 1000, T("depth" -> 50, "dataSet" -> ImageNet))
    println(s"generate the model file ${path.toString}")
    model.save(path.toString, true)
    val loaded = Module.load[Float](path.toString)

    val length = Files.size(path) / 1024.0 / 1024.0
    length should be < 300.0

    println(s"delete the model file ${path.toString}")
    Files.deleteIfExists(path)
  }

} 
Example 28
Source File: S3ParquetPageOutput.scala    From embulk-output-s3_parquet   with MIT License 5 votes vote down vote up
package org.embulk.output.s3_parquet

import java.io.File
import java.nio.file.{Files, Paths}

import com.amazonaws.services.s3.transfer.{TransferManager, Upload}
import com.amazonaws.services.s3.transfer.model.UploadResult
import org.apache.parquet.hadoop.ParquetWriter
import org.embulk.config.TaskReport
import org.embulk.output.s3_parquet.aws.Aws
import org.embulk.spi.{Exec, Page, PageReader, TransactionalPageOutput}

case class S3ParquetPageOutput(
    outputLocalFile: String,
    reader: PageReader,
    writer: ParquetWriter[PageReader],
    aws: Aws,
    destBucket: String,
    destKey: String
) extends TransactionalPageOutput {

  private var isClosed: Boolean = false

  override def add(page: Page): Unit = {
    reader.setPage(page)
    while (reader.nextRecord()) {
      ContextClassLoaderSwapper.usingPluginClass {
        writer.write(reader)
      }
    }
  }

  override def finish(): Unit = {}

  override def close(): Unit = {
    synchronized {
      if (!isClosed) {
        ContextClassLoaderSwapper.usingPluginClass {
          writer.close()
        }
        isClosed = true
      }
    }
  }

  override def abort(): Unit = {
    close()
    cleanup()
  }

  override def commit(): TaskReport = {
    close()
    val result: UploadResult = ContextClassLoaderSwapper.usingPluginClass {
      aws.withTransferManager { xfer: TransferManager =>
        val upload: Upload =
          xfer.upload(destBucket, destKey, new File(outputLocalFile))
        upload.waitForUploadResult()
      }
    }
    cleanup()
    Exec
      .newTaskReport()
      .set("bucket", result.getBucketName)
      .set("key", result.getKey)
      .set("etag", result.getETag)
      .set("version_id", result.getVersionId)
  }

  private def cleanup(): Unit = {
    Files.delete(Paths.get(outputLocalFile))
  }
} 
Example 29
Source File: InceptionFetcher.scala    From incubator-s2graph   with Apache License 2.0 5 votes vote down vote up
package org.apache.s2graph.core.fetcher.tensorflow

import java.net.URL
import java.nio.file.Paths

import com.typesafe.config.Config
import org.apache.commons.io.IOUtils
import org.apache.s2graph.core._
import org.apache.s2graph.core.types.VertexId

import scala.concurrent.{ExecutionContext, Future}


object InceptionFetcher {
  val ModelPath = "modelPath"

  def getImageBytes(urlText: String): Array[Byte] = {
    val url = new URL(urlText)

    IOUtils.toByteArray(url)
  }

  def predict(graphDef: Array[Byte],
              labels: Seq[String])(imageBytes: Array[Byte], topK: Int = 10): Seq[(String, Float)] = {
    try {
      val image = LabelImage.constructAndExecuteGraphToNormalizeImage(imageBytes)
      try {
        val labelProbabilities = LabelImage.executeInceptionGraph(graphDef, image)
        val topKIndices = labelProbabilities.zipWithIndex.sortBy(_._1).reverse
          .take(Math.min(labelProbabilities.length, topK)).map(_._2)

        val ls = topKIndices.map { idx => (labels(idx), labelProbabilities(idx)) }

        ls
      } catch {
        case e: Throwable => Nil
      } finally if (image != null) image.close()
    }
  }
}

class InceptionFetcher(graph: S2GraphLike) extends EdgeFetcher {

  import InceptionFetcher._

  import scala.collection.JavaConverters._
  import org.apache.s2graph.core.TraversalHelper._
  val builder = graph.elementBuilder

  var graphDef: Array[Byte] = _
  var labels: Seq[String] = _

  override def init(config: Config)(implicit ec: ExecutionContext): Unit = {
    val modelPath = config.getString(ModelPath)
    graphDef = LabelImage.readAllBytesOrExit(Paths.get(modelPath, "tensorflow_inception_graph.pb"))
    labels = LabelImage.readAllLinesOrExit(Paths.get(modelPath, "imagenet_comp_graph_label_strings.txt")).asScala
  }

  override def close(): Unit = {}

  override def fetches(queryRequests: Seq[QueryRequest],
                       prevStepEdges: Map[VertexId, Seq[EdgeWithScore]])(implicit ec: ExecutionContext): Future[Seq[StepResult]] = {
    val stepResultLs = queryRequests.map { queryRequest =>
      val vertex = queryRequest.vertex
      val queryParam = queryRequest.queryParam
      val shouldBuildParents = queryRequest.query.queryOption.returnTree || queryParam.whereHasParent
      val parentEdges = if (shouldBuildParents) prevStepEdges.getOrElse(queryRequest.vertex.id, Nil) else Nil

      val urlText = vertex.innerId.toIdString()

      val edgeWithScores = predict(graphDef, labels)(getImageBytes(urlText), queryParam.limit).flatMap { case (label, score) =>
        val tgtVertexId = builder.newVertexId(queryParam.label.service,
          queryParam.label.tgtColumnWithDir(queryParam.labelWithDir.dir), label)

        val props: Map[String, Any] = if (queryParam.label.metaPropsInvMap.contains("score")) Map("score" -> score) else Map.empty
        val edge = graph.toEdge(vertex.innerId.value, tgtVertexId.innerId.value, queryParam.labelName, queryParam.direction, props = props)

        edgeToEdgeWithScore(queryRequest, edge, parentEdges)
      }

      StepResult(edgeWithScores, Nil, Nil)
    }

    Future.successful(stepResultLs)
  }

  override def fetchEdgesAll()(implicit ec: ExecutionContext): Future[Seq[S2EdgeLike]] =
    Future.successful(Nil)
} 
Example 30
Source File: RocksMapTest.scala    From utils   with Apache License 2.0 5 votes vote down vote up
package com.indix.utils.store

import java.io.Serializable
import java.nio.file.{Paths, Files}

import org.apache.commons.io.FileUtils
import org.scalatest.{Matchers, FlatSpec}


case class TestObject(a: Int, b: String, c: Array[Int], d: Array[String]) extends Serializable {

  def equals(other: TestObject): Boolean = {
    this.a.equals(other.a) && this.b.equals(other.b) && this.c.sameElements(other.c) && this.d.sameElements(other.d)
  }

}

case class ComplexTestObject(a: Int, b: TestObject) extends Serializable {
  def equals(other: ComplexTestObject): Boolean = {
    this.a.equals(other.a) && this.b.equals(other.b)
  }
}

class RocksMapTest extends FlatSpec with Matchers {

  "RocksMap" should "serialize and deserialize the keys and values" in {
    val db = new RocksMap("test")

    val a: Int = 1
    val b: String = "hello"
    val c: Array[Int] = Array(1, 2, 3)

    val d: Array[String] = Array("a", "b", "c")

    val serialized_a = db.serialize(a)
    val serialized_b = db.serialize(b)
    val serialized_c = db.serialize(c)
    val serialized_d = db.serialize(d)
    val serialized_TestObject = db.serialize(TestObject(a, b, c, d))
    val serialized_ComplexObject = db.serialize(ComplexTestObject(a, TestObject(a, b, c, d)))

    db.deserialize[Int](serialized_a) should be(a)
    db.deserialize[String](serialized_b) should be(b)
    db.deserialize[Array[Int]](serialized_c) should be(c)
    db.deserialize[Array[String]](serialized_d) should be(d)
    db.deserialize[TestObject](serialized_TestObject).equals(TestObject(a, b, c, d)) should be(true)
    db.deserialize[ComplexTestObject](serialized_ComplexObject).equals(ComplexTestObject(a, TestObject(a, b, c, d))) should be(true)
    db.drop()
    db.close()
  }

  it should "put and get values" in {
    val db = new RocksMap("test")

    db.put(1, 1.0)
    db.get[Int, Double](1).getOrElse(0) should be(1.0)
    db.clear()
    db.drop()
    db.close()
  }

  it should "remove values" in {
    val db = new RocksMap("test")

    db.put(1, 1L)
    db.get[Int, Long](1).getOrElse(0) should be(1L)
    db.remove(1)
    db.get[Int, Long](1) should be(None)
    db.drop()
    db.close()
  }

  it should "clear all the values" in {
    val db = new RocksMap(name = "test")
    db.put(1, "hello")
    db.put(2, "yello")
    db.get(1) should not be (None)
    db.get(2) should not be (None)
    db.clear()
    db.get(1) should be(None)
    db.get(2) should be(None)
    db.drop()
    db.close()
  }

  it should "clear the data files when drop is called" in {
    val db = new RocksMap(name = "test")
    Files.exists(Paths.get(db.pathString)) should be (true)
    db.drop()
    Files.exists(Paths.get(db.pathString)) should be (false)
    db.close()
  }


} 
Example 31
Source File: OntologyHubClientTest.scala    From daf-semantics   with Apache License 2.0 5 votes vote down vote up
package clients

import java.nio.file.Paths
import org.junit.After
import org.junit.Assert
import org.junit.Assume
import org.junit.Before
import org.junit.BeforeClass
import org.junit.Test
import org.slf4j.LoggerFactory
import play.Logger
import utilities.Adapters.AwaitFuture
import clients.HTTPClient


object OntologyHubClientTest {

  val logger = LoggerFactory.getLogger(this.getClass)

  @BeforeClass
  def check_before() {
    Assume.assumeTrue(ontonethub_is_running)
    logger.info("Ontonethub is UP! [TESTING...]")
  }

  private def ontonethub_is_running = {
    val client = HTTPClient
    client.start()
    val ontonethub = new OntonetHubClient(client.ws)
    val check = ontonethub.status().await
    client.stop()
    check
  }

} 
Example 32
Source File: CatalogStandardizationService.scala    From daf-semantics   with Apache License 2.0 5 votes vote down vote up
package it.almawave.kb.http.endpoints

import javax.inject.Singleton
import javax.ws.rs.Path
import org.slf4j.LoggerFactory
import it.almawave.kb.http.models.OntologyMetaModel
import com.typesafe.config.ConfigFactory
import java.nio.file.Paths
import it.almawave.linkeddata.kb.catalog.CatalogBox
import it.almawave.linkeddata.kb.utils.JSONHelper
import it.almawave.daf.standardization.refactoring.CatalogStandardizer

@Singleton
@Path("conf://api-catalog-config")
class CatalogStandardizationService {

  private val logger = LoggerFactory.getLogger(this.getClass)

  val conf = ConfigFactory.parseFile(Paths.get("./conf/catalog.conf").normalize().toFile())
  val catalog = new CatalogBox(conf)
  catalog.start()

  val _standardizer = CatalogStandardizer(catalog)
  _standardizer.start

  def stardardizer = _standardizer

  //  TODO: STOP?

} 
Example 33
Source File: StandardizationQueryV1.scala    From daf-semantics   with Apache License 2.0 5 votes vote down vote up
package it.almawave.daf.standardization.v1

import com.typesafe.config.Config
import java.nio.file.Paths
import java.nio.file.Files
import it.almawave.linkeddata.kb.catalog.VocabularyBox
import java.io.FileFilter
import java.io.File
import java.nio.file.Path
import org.slf4j.LoggerFactory


  def details(voc_box: VocabularyBox, level: Int, uri: String, lang: String) = {

    val onto_id = detect_ontology(voc_box)

    val query_path: Path = detailsQueryFile(onto_id)
      .map(_.toPath())
      .getOrElse(default_query_details)

    // disabled for too many logs! logger.debug(s"daf.standardization> try ${voc_box.id} with details query: ${query_path}")

    val query = new String(Files.readAllBytes(query_path))
    query
      .replace("${vocabularyID}", voc_box.id)
      .replace("${level}", level.toString())
      .replace("${uri}", uri)
      .replace("${lang}", lang)

  }

} 
Example 34
Source File: MainSingleStandardization.scala    From daf-semantics   with Apache License 2.0 5 votes vote down vote up
package it.almawave.daf.standardization.refactoring

import org.slf4j.LoggerFactory
import java.nio.file.Paths
import it.almawave.linkeddata.kb.catalog.CatalogBox
import com.typesafe.config.ConfigFactory
import it.almawave.linkeddata.kb.utils.JSONHelper

import it.almawave.linkeddata.kb.catalog.VocabularyBox

object MainSingleStandardization extends App {

  private val logger = LoggerFactory.getLogger(this.getClass)

  val conf = ConfigFactory.parseFile(Paths.get("./conf/catalog.conf").normalize().toFile())

  val catalog = new CatalogBox(conf)
  catalog.start()

  //  val vocID = "legal-status"
  //  val vocID = "theme-subtheme-mapping"
  val vocID = "licences"
  val std: VocabularyStandardizer = CatalogStandardizer(catalog).getVocabularyStandardizerByID(vocID).get
  std.start

  //  println("\n\nCSV")
  //  std.toCSV()(System.out)
  //
  //  println("\n\nTREE")
  val tree = std.toJSONTree()
  val json_tree = JSONHelper.writeToString(tree)
  println(json_tree)

  println("\n\nMETA")
  val meta = std.getMetadata()
  val json_meta = JSONHelper.writeToString(meta)
  println(json_meta)

  std.stop
  catalog.stop()

  // TODO: verify the closing of all active connections

}

object MainStandardizationAll extends App {

  private val logger = LoggerFactory.getLogger(this.getClass)
  val conf = ConfigFactory.parseFile(Paths.get("./conf/catalog.conf").normalize().toFile())

  val catalog = new CatalogBox(conf)
  catalog.start()

  val std = CatalogStandardizer(catalog)
  std.start

  val list = std.getVocabularyStandardizersList()

  list.foreach { vstd =>
    //    println(s"\n\nCSV for ${vstd.vbox}")
    vstd.toCSV()(System.out)
  }

  std.stop
  catalog.stop()

  System.exit(0)
} 
Example 35
Source File: NO_MainAllStandardization.scala    From daf-semantics   with Apache License 2.0 5 votes vote down vote up
package it.almawave.daf.standardization.refactoring

import org.slf4j.LoggerFactory
import java.nio.file.Paths
import com.typesafe.config.ConfigFactory
import it.almawave.linkeddata.kb.catalog.CatalogBox
import scala.util.Try

object NO_MainAllStandardization extends App {

  private val logger = LoggerFactory.getLogger(this.getClass)

  val conf = ConfigFactory.parseFile(Paths.get("./conf/catalog.conf").normalize().toFile())

  val catalog = new CatalogBox(conf)
  catalog.start()

  CatalogStandardizer(catalog).getVocabularyStandardizersList()
    .zipWithIndex
    .slice(1, 2)
    .toList
    .foreach {
      case (std, i) =>

        Try {
          println(s"""\n\n$i: ${std.vbox}""")
          println("\n\nCSV_______________________________________")
          std.toCSV()(System.out)
          println("\n\n__________________________________________")
        }

    }

  catalog.stop()

} 
Example 36
Source File: KBModule.scala    From daf-semantics   with Apache License 2.0 5 votes vote down vote up
package modules

import javax.inject._

import play.api.inject.ApplicationLifecycle
import play.api.mvc._

import scala.concurrent.Future
import com.google.inject.ImplementedBy
import play.api.Play
import play.api.Application
import play.api.Environment
import play.api.Configuration
import scala.concurrent.ExecutionContext
import play.api.Logger
import it.almawave.linkeddata.kb.utils.ConfigHelper
import it.almawave.linkeddata.kb.repo._
import scala.concurrent.ExecutionContext.Implicits.global
import java.nio.file.Paths
import play.api.Mode
import java.io.File
import it.almawave.linkeddata.kb.repo.RDFRepository
import com.typesafe.config.ConfigFactory

@ImplementedBy(classOf[KBModuleBase])
trait KBModule

@Singleton
class KBModuleBase @Inject() (lifecycle: ApplicationLifecycle) extends KBModule {

  // TODO: SPI per dev / prod
  val kbrepo = RDFRepository.memory()

  val logger = Logger.underlyingLogger

  // when application starts...
  @Inject
  def onStart(
    env: Environment,
    configuration: Configuration)(implicit ec: ExecutionContext) {

    // get configs
    val app_type = configuration.underlying.getString("app.type")

    val data_dir = app_type match {
      case "dev"  => "./dist/data"
      case "prod" => "./data"
    }
    logger.debug(s"app_type: ${app_type}")
    logger.debug(s"data_dir: ${data_dir}")

    // starting VocabularyAPI service
    var conf_voc = ConfigFactory.parseFile(new File("./conf/semantic_repository.conf").getAbsoluteFile)
    conf_voc = ConfigHelper.injectParameters(conf_voc, ("data_dir", data_dir))

    kbrepo.configuration(conf_voc)

    logger.info("KBModule.START....")
    logger.debug("KBModule using configuration:\n" + ConfigHelper.pretty(conf_voc))

    println("KBModule using configuration:\n" + ConfigHelper.pretty(conf_voc))

    // this is needed for ensure proper connection(s) etc
    kbrepo.start()

    

    // CHECK the initial (total) triples count
    var triples = kbrepo.store.size()

    logger.info(s"KBModule> ${triples} triples loaded")

  }

  // when application stops...
  lifecycle.addStopHook({ () =>

    Future.successful {

      // this is useful for saving files, closing connections, release indexes, etc
      kbrepo.stop()
      logger.info("KBModule.STOP....")

    }

  })

} 
Example 37
Source File: ScalastyleInspectionsGenerator.scala    From sonar-scala   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.mwz.sonar.scala.metadata.scalastyle

import java.io.InputStream
import java.nio.file.Paths

import com.mwz.sonar.scala.metadata.scalastyle._
import com.typesafe.config.{Config, ConfigFactory}
import org.scalastyle.{Level, _}
import sbt.Keys._
import sbt._

import scala.meta._
import scala.xml.{Node, NodeSeq, XML}


  def transform(source: Tree, inspections: Seq[ScalastyleInspection]): Tree = {
    val stringified: Seq[String] = inspections.collect {
      case inspection =>
        // Is there a better way of embedding multi-line text?
        val extraDescription = inspection.extraDescription.map(s => "\"\"\"" + s + "\"\"\"")
        val justification = inspection.justification.map(s => "\"\"\"" + s + "\"\"\"")
        val params = inspection.params.map { p =>
          s"""
             |ScalastyleParam(
             |  name = "${p.name}",
             |  typ = ${p.typ},
             |  label = "${p.label}",
             |  description = \"\"\"${p.description}\"\"\",
             |  default = \"\"\"${p.default}\"\"\"
             |)
           """.stripMargin
        }

        // It doesn't seem to be straightforward to automatically convert a collection
        // into a tree using scalameta, so I'm turning it into a String so it can be parsed,
        // which is easier than constructing the tree manually.
        // Totally doable with shapeless though, but it would be a bit of an overkill in this case.
        s"""
           |ScalastyleInspection(
           |  clazz = "${inspection.clazz}",
           |  id = "${inspection.id}",
           |  label = "${inspection.label}",
           |  description = "${inspection.description}",
           |  extraDescription = $extraDescription,
           |  justification = $justification,
           |  defaultLevel = ${inspection.defaultLevel},
           |  params = ${params.toString.parse[Term].get.syntax}
           |)
         """.stripMargin
    }

    // Transform the template file.
    val term: Term = stringified.toString.parse[Term].get
    source.transform {
      case q"val AllInspections: $tpe = $expr" =>
        q"val AllInspections: $tpe = $term"
    }
  }
} 
Example 38
Source File: Metadata.scala    From sonar-scala   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.mwz.sonar.scala

import java.nio.file.Paths
import java.nio.file.StandardOpenOption

import cats.data.NonEmptyChain
import cats.effect.Blocker
import cats.effect.ExitCode
import cats.effect.IO
import cats.effect.IOApp
import cats.instances.string._
import com.mwz.sonar.scala.metadata._
import com.mwz.sonar.scala.metadata.scalastyle.ScalastyleRules
import com.mwz.sonar.scala.metadata.scalastyle.ScalastyleRulesRepository
import com.mwz.sonar.scala.metadata.scapegoat.ScapegoatRules
import com.mwz.sonar.scala.metadata.scapegoat.ScapegoatRulesRepository
import fs2.Stream
import fs2.io.file._
import fs2.text
import io.circe.Printer
import io.circe.generic.JsonCodec
import io.circe.syntax._

@JsonCodec
final case class SonarScalaMetadata(
  rules: Rules,
  repositories: Map[String, RulesRepository]
)

@JsonCodec
final case class Rules(
  scalastyle: NonEmptyChain[Rule],
  scapegoat: NonEmptyChain[Rule]
)

object Metadata extends IOApp {
  private val metadata: SonarScalaMetadata =
    SonarScalaMetadata(
      rules = Rules(sort(ScalastyleRules.rules), sort(ScapegoatRules.rules)),
      repositories = Map(
        ScalastyleRulesRepository.RepositoryKey ->
        ScalastyleRulesRepository.rulesRepository
          .copy(rules = sort(ScalastyleRulesRepository.rulesRepository.rules)),
        ScapegoatRulesRepository.RepositoryKey ->
        ScapegoatRulesRepository.rulesRepository
          .copy(rules = sort(ScapegoatRulesRepository.rulesRepository.rules))
      )
    )
  private val printer: Printer =
    Printer.spaces2SortKeys.copy(
      colonLeft = "",
      lbraceLeft = "",
      rbraceRight = "",
      lbracketLeft = "",
      lrbracketsEmpty = "",
      rbracketRight = "",
      arrayCommaLeft = "",
      objectCommaLeft = ""
    )

  // Chain is missing sortBy, which should be added in 2.2.0.
  private def sort(rules: NonEmptyChain[Rule]): NonEmptyChain[Rule] =
    NonEmptyChain.fromNonEmptyList(rules.toNonEmptyList.sortBy(_.name))

  def run(args: List[String]): IO[ExitCode] = {
    val write: Stream[IO, Unit] = Stream.resource(Blocker[IO]).flatMap { blocker =>
      Stream[IO, String](metadata.asJson.printWith(printer))
        .through(text.utf8Encode)
        .through(
          writeAll(
            Paths.get("sonar-scala-metadata.json"),
            blocker,
            List(StandardOpenOption.TRUNCATE_EXISTING)
          )
        )
    }
    write.compile.drain.as(ExitCode.Success)
  }
} 
Example 39
Source File: JUnitSensor.scala    From sonar-scala   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.mwz.sonar.scala
package junit

import java.io.File
import java.nio.file.{Path, Paths}

import scala.jdk.CollectionConverters._

import cats.instances.list._
import com.mwz.sonar.scala.util.Log
import com.mwz.sonar.scala.util.syntax.SonarConfig._
import com.mwz.sonar.scala.util.syntax.SonarFileSystem._
import com.mwz.sonar.scala.util.syntax.SonarSensorContext._
import org.sonar.api.batch.fs.{FileSystem, InputFile}
import org.sonar.api.batch.sensor.{Sensor, SensorContext, SensorDescriptor}
import org.sonar.api.config.Configuration
import org.sonar.api.measures.CoreMetrics


  private[junit] def save(
    context: SensorContext,
    reports: Map[InputFile, JUnitReport]
  ): Unit = {
    if (reports.nonEmpty)
      log.debug(s"Parsed reports:\n${reports.mkString(", ")}")
    else
      log.info("No test metrics were saved by this sensor.")

    reports.foreach {
      case (file, report) =>
        log.info(s"Saving junit test metrics for $file.")
        context.saveMeasure[Integer](file, CoreMetrics.SKIPPED_TESTS, report.skipped)
        context.saveMeasure[Integer](file, CoreMetrics.TESTS, report.tests - report.skipped)
        context.saveMeasure[Integer](file, CoreMetrics.TEST_ERRORS, report.errors)
        context.saveMeasure[Integer](file, CoreMetrics.TEST_FAILURES, report.failures)
        context.saveMeasure[java.lang.Long](
          file,
          CoreMetrics.TEST_EXECUTION_TIME,
          (report.time * 1000).longValue
        )
    }
  }
}

object JUnitSensor {
  val SensorName = "Scala JUnit Sensor"
  val TestsPropertyKey = "sonar.tests"
  val DefaultTests = List(Paths.get("src/test/scala"))
  val ReportsPropertyKey = "sonar.junit.reportPaths"
  val DefaultReportPaths = List(Paths.get("target/test-reports"))

  private[junit] def testPaths(conf: Configuration): List[Path] =
    conf.getPaths(TestsPropertyKey, DefaultTests)

  private[junit] def reportPaths(conf: Configuration): List[Path] =
    conf.getPaths(ReportsPropertyKey, DefaultReportPaths)
} 
Example 40
Source File: ScalaPlugin.scala    From sonar-scala   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.mwz.sonar.scala

import java.nio.file.{Path, Paths}

import com.mwz.sonar.scala.util.Log
import com.mwz.sonar.scala.util.syntax.Optionals._
import org.sonar.api.Plugin
import org.sonar.api.config.Configuration
import org.sonar.api.resources.AbstractLanguage
import scalariform.ScalaVersion
import scalariform.lexer.{ScalaLexer, Token}
import scalariform.utils.Utils._


final class ScalaPlugin extends Plugin {
  override def define(context: Plugin.Context): Unit = {
    context.addExtensions(
      // Global configuration.
      classOf[GlobalConfig],
      // Scala.
      classOf[Scala],
      classOf[sensor.ScalaSensor],
      // PR decoration.
      classOf[pr.GlobalIssues],
      classOf[pr.GithubPrReviewJob],
      // Scalastyle.
      classOf[scalastyle.ScalastyleRulesRepository],
      classOf[scalastyle.ScalastyleQualityProfile],
      classOf[scalastyle.ScalastyleChecker],
      classOf[scalastyle.ScalastyleSensor],
      // Scapegoat.
      classOf[scapegoat.ScapegoatRulesRepository],
      classOf[scapegoat.ScapegoatQualityProfile],
      classOf[scapegoat.ScapegoatReportParser],
      classOf[scapegoat.ScapegoatSensor],
      // Built-in quality profiles.
      classOf[qualityprofiles.ScalastyleScapegoatQualityProfile],
      classOf[qualityprofiles.RecommendedQualityProfile],
      // Scoverage.
      classOf[scoverage.ScoverageMeasures],
      classOf[scoverage.ScoverageMetrics],
      classOf[scoverage.ScoverageReportParser],
      classOf[scoverage.ScoverageSensor],
      // JUnit.
      classOf[junit.JUnitReportParser],
      classOf[junit.JUnitSensor]
    )
  }
} 
Example 41
Source File: ScoverageReportParser.scala    From sonar-scala   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.mwz.sonar.scala
package scoverage

import java.nio.file.{Path, Paths}

import scala.xml.{Node, XML}

import cats.syntax.semigroup.catsSyntaxSemigroup
import com.mwz.sonar.scala.util.PathUtils
import org.sonar.api.scanner.ScannerSide


  private[scoverage] def extractScoverageFromNode(node: Node): Scoverage = {
    val branches = (node \\ "statement")
      .filter(node => !(node \@ "ignored").toBoolean && (node \@ "branch").toBoolean)
    val coveredBranches = branches.filter(statement => (statement \@ "invocation-count").toInt > 0)
    Scoverage(
      statements = (node \@ "statement-count").toInt,
      coveredStatements = (node \@ "statements-invoked").toInt,
      statementCoverage = (node \@ "statement-rate").toDouble,
      branches = branches.size,
      coveredBranches = coveredBranches.size,
      branchCoverage = (node \@ "branch-rate").toDouble
    )
  }
} 
Example 42
Source File: SonarConfig.scala    From sonar-scala   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.mwz.sonar.scala
package util
package syntax

import java.nio.file.{Path, Paths}

import cats.instances.string._
import cats.syntax.eq._
import com.mwz.sonar.scala.util.syntax.Optionals._
import org.sonar.api.config.Configuration

object SonarConfig {
  implicit final class ConfigOps(private val configuration: Configuration) extends AnyVal {

    
    @SuppressWarnings(Array("UnusedMethodParameter"))
    def getAs[T <: String](key: String)(implicit ev: T =:= String): Option[String] = {
      configuration
        .get(key)
        .toOption
        .filterNot(_.trim.isEmpty)
    }
  }
} 
Example 43
Source File: ScapegoatReportParser.scala    From sonar-scala   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.mwz.sonar.scala
package scapegoat

import java.nio.file.{Path, Paths}

import scala.xml.XML

import org.sonar.api.scanner.ScannerSide

trait ScapegoatReportParserAPI {
  def parse(scapegoatReportPath: Path): Map[String, Seq[ScapegoatIssue]]
}


  override def parse(scapegoatReportPath: Path): Map[String, Seq[ScapegoatIssue]] = {
    val scapegoatXMLReport = XML.loadFile(scapegoatReportPath.toFile)

    val scapegoatIssues = for {
      issue <- scapegoatXMLReport \\ "warning"
      line = (issue \@ "line").toInt
      text = issue \@ "text"
      file = replaceAllDotsButLastWithSlashes(issue \@ "file")
      inspectionId = issue \@ "inspection"
    } yield ScapegoatIssue(line, text, file, inspectionId)

    scapegoatIssues.groupBy(issue => issue.file)
  }
} 
Example 44
Source File: JUnitReportParserSpec.scala    From sonar-scala   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.mwz.sonar
package scala
package junit

import java.io.File
import java.nio.file.Paths

import org.scalatest.LoneElement
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.sonar.api.batch.fs.InputFile
import org.sonar.api.batch.fs.internal.{DefaultFileSystem, TestInputFileBuilder}

class JUnitReportParserSpec extends AnyFlatSpec with Matchers with WithFiles with LoneElement {
  it should "get report files" in {
    withFiles("file.xml", "file2.xml", "other.txt") { files =>
      val directories = files.map(_.getParentFile).distinct.toList
      val baseDir = directories.loneElement
      val fs = new DefaultFileSystem(baseDir)
      val parser = new JUnitReportParser(fs)

      parser.reportFiles(directories) should contain theSameElementsAs List(
        baseDir.getAbsoluteFile.toPath.resolve("file.xml").toFile,
        baseDir.getAbsoluteFile.toPath.resolve("file2.xml").toFile
      )
    }
  }

  it should "parse report files" in {
    val fs = new DefaultFileSystem(Paths.get("./"))
    val parser = new JUnitReportParser(fs)

    val expected = JUnitReport("TestFile", tests = 8, errors = 3, failures = 2, skipped = 1, time = 0.049f)

    parser
      .parseReportFiles(List(new File("./src/test/resources/junit/report.xml")))
      .loneElement shouldBe expected
  }

  it should "resolve files" in {
    val fs = new DefaultFileSystem(Paths.get("./"))
    val parser = new JUnitReportParser(fs)

    val testFile = TestInputFileBuilder
      .create("", "path/to/tests/TestFile.scala")
      .build()

    val tests = List(Paths.get("path/to/tests"))
    val report = JUnitReport("TestFile", tests = 8, errors = 3, failures = 2, skipped = 1, time = 0.049f)
    val expected: Map[InputFile, JUnitReport] = Map(testFile -> report)

    fs.add(testFile)
    parser.resolveFiles(tests, List(report)) shouldBe expected
  }

  it should "parse" in {
    val fs = new DefaultFileSystem(Paths.get("./"))
    val parser = new JUnitReportParser(fs)

    val tests = List(Paths.get("path/to/tests"))
    val directories = List(new File("src/test/resources/junit"))
    val testFile = TestInputFileBuilder
      .create("", "path/to/tests/TestFile.scala")
      .build()
    val report = JUnitReport("TestFile", tests = 8, errors = 3, failures = 2, skipped = 1, time = 0.049f)
    val expected: Map[InputFile, JUnitReport] = Map(testFile -> report)

    fs.add(testFile)
    parser.parse(tests, directories) shouldBe expected
  }
} 
Example 45
Source File: ScalaSpec.scala    From sonar-scala   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.mwz.sonar.scala

import java.nio.file.Paths

import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.sonar.api.config.internal.MapSettings
import scalariform.ScalaVersion

class ScalaSpec extends AnyFlatSpec with Matchers {
  val defaultScala = ScalaVersion(2, 13)

  "getFileSuffixes" should "return Scala file suffixes" in {
    val conf = new MapSettings().asConfig()
    new Scala(conf).getFileSuffixes shouldBe Array(".scala")

    val conf2 =
      new MapSettings()
        .setProperty("sonar.scala.file.suffixes", ".scala")
        .asConfig()
    new Scala(conf2).getFileSuffixes shouldBe Array(".scala")
  }

  "getScalaVersion" should "return the available version, if properly set" in {
    val conf = new MapSettings()
      .setProperty("sonar.scala.version", "2.11.11")
      .asConfig()

    Scala.getScalaVersion(conf) shouldBe ScalaVersion(2, 11)
  }

  it should "be able to parse a milestone version" in {
    val conf = new MapSettings()
      .setProperty("sonar.scala.version", "2.13.0-M3")
      .asConfig()

    Scala.getScalaVersion(conf) shouldBe defaultScala
  }

  it should "be able to parse a version without patch" in {
    val conf = new MapSettings()
      .setProperty("sonar.scala.version", "2.12")
      .asConfig()

    Scala.getScalaVersion(conf) shouldBe ScalaVersion(2, 12)
  }

  it should "not return the default version if the property is set to '2.11.0'" in {
    val conf = new MapSettings()
      .setProperty("sonar.scala.version", "2.11.0")
      .asConfig()

    val parsedVersion = Scala.getScalaVersion(conf)
    parsedVersion should not be ScalaVersion(2, 12)
    parsedVersion shouldBe ScalaVersion(2, 11)
  }

  it should "return the default version, if the property is not set" in {
    val conf = new MapSettings().asConfig()
    Scala.getScalaVersion(conf) shouldBe defaultScala
  }

  it should "return the default version, if the version property has an empty patch" in {
    val conf = new MapSettings()
      .setProperty("sonar.scala.version", "2.12.")
      .asConfig()

    Scala.getScalaVersion(conf) shouldBe defaultScala
  }

  it should "return the default version, if the version property only contains the major version" in {
    val conf = new MapSettings()
      .setProperty("sonar.scala.version", "2")
      .asConfig()

    Scala.getScalaVersion(conf) shouldBe defaultScala
  }

  "getSourcesPaths" should "return the available sources" in {
    val conf1 = new MapSettings()
      .setProperty("sonar.sources", "sources/directory")
      .asConfig()

    Scala.getSourcesPaths(conf1) shouldBe List(Paths.get("sources/directory"))

    val conf2 = new MapSettings()
      .setProperty("sonar.sources", " sources/directory,  src/2 ")
      .asConfig()

    Scala.getSourcesPaths(conf2) shouldBe List(
      Paths.get("sources/directory"),
      Paths.get("src/2")
    )
  }

  it should "return the default value if not set" in {
    val conf = new MapSettings().asConfig()
    Scala.getSourcesPaths(conf) shouldBe List(Paths.get("src/main/scala"))
  }
} 
Example 46
Source File: ScalaSensorSpec.scala    From sonar-scala   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.mwz.sonar.scala
package sensor

import java.nio.file.Paths

import org.scalatest.LoneElement
import org.scalatest.flatspec.AnyFlatSpec
import org.sonar.api.batch.fs.internal.TestInputFileBuilder
import org.sonar.api.batch.sensor.internal.{DefaultSensorDescriptor, SensorContextTester}
import org.sonar.api.config.internal.MapSettings
import org.sonar.api.measures.{CoreMetrics => CM}


class ScalaSensorSpec extends AnyFlatSpec with SensorContextMatchers with LoneElement {
  val globalConfig = new GlobalConfig(new MapSettings().asConfig)
  val sensor = new ScalaSensor(globalConfig)
  behavior of "A ScalaSensor"

  it should "correctly set descriptor" in {
    val descriptor = new DefaultSensorDescriptor
    sensor.describe(descriptor)

    descriptor.name() shouldBe "Scala Sensor"
    descriptor.languages().loneElement shouldBe "scala"
  }

  it should "correctly measure ScalaFile1" in {
    val context = SensorContextTester.create(Paths.get("./src/test/resources"))
    val inputFile =
      TestInputFileBuilder.create("", "src/test/resources/ScalaFile1.scala").setLanguage("scala").build()
    context.fileSystem().add(inputFile)
    sensor.execute(context)

    val componentKey = inputFile.key()

    context should have(metric[java.lang.Integer](componentKey, CM.COMMENT_LINES_KEY, 0))
    context should have(metric[java.lang.Integer](componentKey, CM.CLASSES_KEY, 1))
    context should have(metric[java.lang.Integer](componentKey, CM.FUNCTIONS_KEY, 1))
    context should have(metric[java.lang.Integer](componentKey, CM.NCLOC_KEY, 6))
  }

  it should "correctly measure ScalaFile2" in {
    val context = SensorContextTester.create(Paths.get("./src/test/resources"))
    val inputFile =
      TestInputFileBuilder.create("", "src/test/resources/ScalaFile2.scala").setLanguage("scala").build()
    context.fileSystem().add(inputFile)
    sensor.execute(context)

    val componentKey = inputFile.key()

    context should have(metric[java.lang.Integer](componentKey, CM.COMMENT_LINES_KEY, 1))
    context should have(metric[java.lang.Integer](componentKey, CM.CLASSES_KEY, 2))
    context should have(metric[java.lang.Integer](componentKey, CM.FUNCTIONS_KEY, 2))
  }
} 
Example 47
Source File: PathUtilsSpec.scala    From sonar-scala   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.mwz.sonar.scala
package util

import java.nio.file.Paths

import com.mwz.sonar.scala.util.PathUtils._
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.sonar.api.batch.fs.internal.DefaultFileSystem

class PathUtilsSpec extends AnyFlatSpec with Matchers {
  "relativize" should "successfully resolve a relative suffix path against a 'next' path" in {
    PathUtils.relativize(
      base = Paths.get("."),
      next = Paths.get(""),
      fullOrSuffix = Paths.get("suffix")
    ) shouldBe Paths.get("suffix")

    PathUtils.relativize(
      base = Paths.get("."),
      next = Paths.get("next"),
      fullOrSuffix = Paths.get("suffix")
    ) shouldBe Paths.get(s"next/suffix")

    PathUtils.relativize(
      base = cwd,
      next = Paths.get("next"),
      fullOrSuffix = Paths.get("suffix/test")
    ) shouldBe Paths.get(s"next/suffix/test")
  }

  it should "construct a relative path between the 'base' path and an absolute suffix" in {
    PathUtils.relativize(
      base = cwd,
      next = Paths.get(""),
      fullOrSuffix = cwd.resolve("suffix/test")
    ) shouldBe Paths.get("suffix/test")
  }

  "stripOutPrefix" should "successfully strip out the prefix" in {
    PathUtils.stripOutPrefix(
      prefix = Paths.get("a/b"),
      path = Paths.get("a/b/c")
    ) shouldBe Paths.get("c")

    PathUtils.stripOutPrefix(
      prefix = Paths.get("x/y"),
      path = Paths.get("a/b/c")
    ) shouldBe Paths.get("a/b/c")
  }

  "getModuleBaseDirectory" should "get module base directory" in {
    getProjectBaseDirectory(new DefaultFileSystem(cwd)) shouldBe Paths.get("")
    getProjectBaseDirectory(
      new DefaultFileSystem(cwd.resolve("module"))
    ) shouldBe Paths.get("module")
  }
} 
Example 48
Source File: SonarSensorContextSpec.scala    From sonar-scala   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.mwz.sonar.scala
package util
package syntax

import java.nio.file.Paths

import com.mwz.sonar.scala.util.syntax.SonarSensorContext._
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.sonar.api.batch.fs.internal.TestInputFileBuilder
import org.sonar.api.batch.sensor.internal.SensorContextTester
import org.sonar.api.measures.CoreMetrics

class SonarSensorContextSpec extends AnyFlatSpec with Matchers with SensorContextMatchers {
  it should "save a measure for a given input file" in {
    val ctx = SensorContextTester.create(Paths.get("./"))
    val testFile = TestInputFileBuilder
      .create("", "TestFile.scala")
      .build()

    ctx.saveMeasure[Integer](testFile, CoreMetrics.TESTS, 5)
    ctx.saveMeasure[java.lang.Long](testFile, CoreMetrics.TEST_EXECUTION_TIME, 124L)

    ctx should have(metric[Integer](testFile.key, "tests", 5))
    ctx should have(metric[java.lang.Long](testFile.key, "test_execution_time", 124L))
  }
} 
Example 49
Source File: SonarConfigSpec.scala    From sonar-scala   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.mwz.sonar.scala
package util
package syntax

import java.nio.file.Paths

import com.mwz.sonar.scala.util.syntax.SonarConfig._
import org.scalatest.OptionValues
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.sonar.api.config.internal.MapSettings

class SonarConfigSpec extends AnyFlatSpec with Matchers with OptionValues {
  "config" should "get paths" in {
    val conf = new MapSettings()
      .setProperty("path", "this/is/a/path, another/path")
      .asConfig()
    val defaultPaths = List(Paths.get("default/path"), Paths.get("default/path2"))

    conf.getPaths("path", defaultPaths) shouldBe List(
      Paths.get("this/is/a/path"),
      Paths.get("another/path")
    )
    conf.getPaths("not.a.path", defaultPaths) shouldBe defaultPaths
  }

  it should "get a boolean" in {
    val conf = new MapSettings()
      .setProperty("bool.true", "true")
      .setProperty("bool.true2", "TRUE")
      .setProperty("bool.false", "false")
      .asConfig()

    conf.getAs[Boolean]("bool.true") shouldBe true
    conf.getAs[Boolean]("bool.true2") shouldBe true
    conf.getAs[Boolean]("bool.false") shouldBe false
    conf.getAs[Boolean]("not.a.bool") shouldBe false
  }

  it should "get a string" in {
    val conf = new MapSettings()
      .setProperty("text", "hello")
      .setProperty("number", "55")
      .setProperty("bool", "true")
      .asConfig()

    conf.getAs[String]("text").value shouldBe "hello"
    conf.getAs[String]("number").value shouldBe "55"
    conf.getAs[String]("bool").value shouldBe "true"
    conf.getAs[String]("empty") shouldBe empty
  }
} 
Example 50
Source File: SonarFileSystemSpec.scala    From sonar-scala   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.mwz.sonar.scala
package util
package syntax

import java.nio.file.{Path, Paths}

import cats.instances.list._
import cats.instances.option._
import com.mwz.sonar.scala.util.syntax.SonarFileSystem._
import org.mockito.ArgumentMatchers._
import org.mockito.Mockito._
import org.scalatest.OptionValues
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.scalatestplus.mockito.MockitoSugar
import org.sonar.api.batch.fs.FileSystem
import org.sonar.api.batch.fs.internal.DefaultFileSystem

class SonarFileSystemSpec extends AnyFlatSpec with Matchers with OptionValues with MockitoSugar {
  it should "attempt to resolve paths" in {
    val fs = new DefaultFileSystem(Paths.get("./"))

    val paths = List(Paths.get("path/1"), Paths.get("path/2"))
    fs.resolve(paths) shouldBe List(
      Paths.get("./").resolve("path/1").toAbsolutePath.normalize.toFile,
      Paths.get("./").resolve("path/2").toAbsolutePath.normalize.toFile
    )

    val path: Option[Path] = Some(Paths.get("another/path"))
    fs.resolve(path).value shouldBe
    Paths.get("./").resolve("another/path").toAbsolutePath.normalize.toFile
  }

  it should "handle exceptions gracefully" in {
    val fs = mock[FileSystem]
    val path = List(Paths.get("path"))

    when(fs.resolvePath(any())).thenThrow(new RuntimeException())

    fs.resolve(path) shouldBe empty
  }
} 
Example 51
Source File: HiveUtils.scala    From Linkis   with Apache License 2.0 5 votes vote down vote up
package com.webank.wedatasphere.linkis.engine.hive.common

import java.io.File
import java.nio.file.Paths

import com.webank.wedatasphere.linkis.common.conf.{Configuration => CommonConfiguration}
import com.webank.wedatasphere.linkis.engine.hive.exception.HadoopConfSetFailedException
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.hive.conf
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.ql.Driver


object HiveUtils {

  def jarOfClass(cls: Class[_]):Option[String] = {
    val uri = cls.getResource("/" + cls.getName.replace('.', '/') + ".class")
    if (uri != null) {
      val uriStr = uri.toString
      if (uriStr.startsWith("jar:file:")) {
        Some(uriStr.substring("jar:file:".length, uriStr.indexOf("!")))
      } else {
        None
      }
    } else {
      None
    }
  }

  def getHiveConf:HiveConf = {
    val confDir:File = new File(CommonConfiguration.hadoopConfDir)
    if (!confDir.exists() || confDir.isFile){
      throw HadoopConfSetFailedException(41001, "hadoop conf set failed, reason: conf dir does not exist")
    }
    val hadoopConf:Configuration = new Configuration()
    hadoopConf.addResource(new Path(Paths.get(CommonConfiguration.hadoopConfDir, "core-site.xml").toAbsolutePath.toFile.getAbsolutePath))
    hadoopConf.addResource(new Path(Paths.get(CommonConfiguration.hadoopConfDir, "hdfs-site.xml").toAbsolutePath.toFile.getAbsolutePath))
    hadoopConf.addResource(new Path(Paths.get(CommonConfiguration.hadoopConfDir, "yarn-site.xml").toAbsolutePath.toFile.getAbsolutePath))
    new conf.HiveConf(hadoopConf, classOf[Driver])
  }


  def msDurationToString(ms: Long): String = {
    val second = 1000
    val minute = 60 * second
    val hour = 60 * minute
    ms match {
      case t if t < second =>
        "%d ms".format(t)
      case t if t < minute =>
        "%.1f s".format(t.toFloat / second)
      case t if t < hour =>
        "%.1f m".format(t.toFloat / minute)
      case t =>
        "%.2f h".format(t.toFloat / hour)
    }
  }

  def main(args: Array[String]): Unit = {
    jarOfClass(classOf[Driver]).foreach(println)
  }
} 
Example 52
Source File: HiveQLProcessBuilder.scala    From Linkis   with Apache License 2.0 5 votes vote down vote up
package com.webank.wedatasphere.linkis.enginemanager.hive.process

import java.nio.file.Paths

import com.webank.wedatasphere.linkis.common.conf.Configuration
import com.webank.wedatasphere.linkis.enginemanager.conf.EnvConfiguration.{DEFAULT_JAVA_OPTS, JAVA_HOME, engineGCLogPath}
import com.webank.wedatasphere.linkis.enginemanager.hive.conf.HiveEngineConfiguration
import com.webank.wedatasphere.linkis.enginemanager.impl.UserEngineResource
import com.webank.wedatasphere.linkis.enginemanager.process.JavaProcessEngineBuilder
import com.webank.wedatasphere.linkis.enginemanager.{AbstractEngineCreator, EngineResource}
import com.webank.wedatasphere.linkis.protocol.engine.RequestEngine
import org.apache.commons.lang.StringUtils
import org.slf4j.LoggerFactory

import scala.collection.mutable.ArrayBuffer


  override protected def classpathCheck(jarOrFiles: Array[String]): Unit = {
    for(jarOrFile <- jarOrFiles){
      checkJarOrFile(jarOrFile)
    }
  }
  //todo Check the jar of the classpath(对classpath的jar进行检查)
  private def checkJarOrFile(jarOrFile:String):Unit = {

  }


  override def build(engineRequest: EngineResource, request: RequestEngine): Unit = {
    this.request = request
    userEngineResource = engineRequest.asInstanceOf[UserEngineResource]
    val javaHome = JAVA_HOME.getValue(request.properties)
    if(StringUtils.isEmpty(javaHome)) {
      warn("We cannot find the java home, use java to run storage repl web server.")
      commandLine += "java"
    } else {
      commandLine += Paths.get(javaHome, "bin/java").toAbsolutePath.toFile.getAbsolutePath
    }
    if (request.properties.containsKey(HiveEngineConfiguration.HIVE_CLIENT_MEMORY.key)){
      val settingClientMemory = request.properties.get(HiveEngineConfiguration.HIVE_CLIENT_MEMORY.key)
      if (!settingClientMemory.toLowerCase().endsWith("g")){
        request.properties.put(HiveEngineConfiguration.HIVE_CLIENT_MEMORY.key, settingClientMemory + "g")
      }
      //request.properties.put(HiveEngineConfiguration.HIVE_CLIENT_MEMORY.key, request.properties.get(HiveEngineConfiguration.HIVE_CLIENT_MEMORY.key)+"g")
    }
    val clientMemory = HiveEngineConfiguration.HIVE_CLIENT_MEMORY.getValue(request.properties).toString
    if (clientMemory.toLowerCase().endsWith("g")){
      commandLine += ("-Xmx" + clientMemory.toLowerCase())
      commandLine += ("-Xms" + clientMemory.toLowerCase())
    }else{
      commandLine += ("-Xmx" + clientMemory + "g")
      commandLine += ("-Xms" + clientMemory + "g")
    }
    val javaOPTS = getExtractJavaOpts
    val alias = getAlias(request)
    if(StringUtils.isNotEmpty(DEFAULT_JAVA_OPTS.getValue))
      DEFAULT_JAVA_OPTS.getValue.format(engineGCLogPath(port, userEngineResource.getUser, alias)).split("\\s+").foreach(commandLine += _)
    if(StringUtils.isNotEmpty(javaOPTS)) javaOPTS.split("\\s+").foreach(commandLine += _)
    //engineLogJavaOpts(port, alias).trim.split(" ").foreach(commandLine += _)
    if(Configuration.IS_TEST_MODE.getValue) {
      val port = AbstractEngineCreator.getNewPort
      info(s"$toString open debug mode with port $port.")
      commandLine += s"-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=$port"
    }
    var classpath = getClasspath(request.properties, getExtractClasspath)
    classpath = classpath ++ request.properties.get("jars").split(",")
    classpathCheck(classpath)
    commandLine += "-Djava.library.path=/appcom/Install/hadoop/lib/native"
    commandLine += "-cp"
    commandLine += classpath.mkString(":")
    commandLine += "com.webank.wedatasphere.linkis.engine.DataWorkCloudEngineApplication"
  }


//  override def build(engineRequest: EngineResource, request: RequestEngine): Unit = {
//    import scala.collection.JavaConversions._
//    request.properties foreach {case (k, v) => LOG.info(s"request key is $k, value is $v")}
//    this.request = request
//    super.build(engineRequest, request)
//
//  }

  override protected val addApacheConfigPath: Boolean = true
} 
Example 53
Source File: HDFSUtils.scala    From Linkis   with Apache License 2.0 5 votes vote down vote up
package com.webank.wedatasphere.linkis.hadoop.common.utils

import java.io.File
import java.nio.file.Paths
import java.security.PrivilegedExceptionAction

import com.webank.wedatasphere.linkis.common.conf.Configuration.hadoopConfDir
import com.webank.wedatasphere.linkis.hadoop.common.conf.HadoopConf._
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.hadoop.security.UserGroupInformation

object HDFSUtils {



  def getConfiguration(user: String): Configuration = getConfiguration(user, hadoopConfDir)

  def getConfiguration(user: String, hadoopConfDir: String): Configuration = {
    val confPath = new File(hadoopConfDir)
    if(!confPath.exists() || confPath.isFile) {
      throw new RuntimeException(s"Create hadoop configuration failed, path $hadoopConfDir not exists.")
    }
    val conf = new Configuration()
    conf.addResource(new Path(Paths.get(hadoopConfDir, "core-site.xml").toAbsolutePath.toFile.getAbsolutePath))
    conf.addResource(new Path(Paths.get(hadoopConfDir, "hdfs-site.xml").toAbsolutePath.toFile.getAbsolutePath))
    conf.addResource(new Path(Paths.get(hadoopConfDir, "yarn-site.xml").toAbsolutePath.toFile.getAbsolutePath))
    conf
  }

  def getHDFSRootUserFileSystem: FileSystem = getHDFSRootUserFileSystem(getConfiguration(HADOOP_ROOT_USER.getValue))

  def getHDFSRootUserFileSystem(conf: org.apache.hadoop.conf.Configuration): FileSystem =
    getHDFSUserFileSystem(HADOOP_ROOT_USER.getValue, conf)

  def getHDFSUserFileSystem(userName: String): FileSystem = getHDFSUserFileSystem(userName, getConfiguration(userName))

  def getHDFSUserFileSystem(userName: String, conf: org.apache.hadoop.conf.Configuration): FileSystem =
    getUserGroupInformation(userName)
      .doAs(new PrivilegedExceptionAction[FileSystem]{
        def run = FileSystem.get(conf)
      })
  def getUserGroupInformation(userName: String): UserGroupInformation ={
    if(KERBEROS_ENABLE.getValue) {
      val path = new File(KEYTAB_FILE.getValue , userName + ".keytab").getPath
      val user = getKerberosUser(userName)
      UserGroupInformation.setConfiguration(getConfiguration(userName))
      UserGroupInformation.loginUserFromKeytabAndReturnUGI(user, path)
    } else {
      UserGroupInformation.createRemoteUser(userName)
    }
  }

  def getKerberosUser(userName: String): String = {
    var user = userName
    if(KEYTAB_HOST_ENABLED.getValue){
      user = user+ "/" + KEYTAB_HOST.getValue
    }
    user
  }

} 
Example 54
Source File: TestUtil.scala    From pulse   with Apache License 2.0 5 votes vote down vote up
package io.phdata.pulse.solr

import java.io.File
import java.nio.file.Paths
import java.util.UUID

import org.apache.commons.io.FileUtils
import org.apache.solr.client.solrj.embedded.JettyConfig
import org.apache.solr.cloud.MiniSolrCloudCluster

object TestUtil {

  def miniSolrCloudCluster(): MiniSolrCloudCluster = {
    // clean up the solr files so we don't try to read collections from old runs
    FileUtils.deleteDirectory(new File("target/solr7"))

    // Set up a MiniSolrCloudCluster
    val clusterHome =
      s"${System.getProperty("user.dir")}/target/solr7/solrHome/${UUID.randomUUID()}"
    val jettyConfig =
      JettyConfig.builder().setContext("/solr").setPort(8983).stopAtShutdown(true).build()

    new MiniSolrCloudCluster(1,
                             null,
                             Paths.get(clusterHome),
                             MiniSolrCloudCluster.DEFAULT_CLOUD_SOLR_XML,
                             null,
                             null)
  }

  def randomIdentifier() = UUID.randomUUID().toString.substring(0, 5)
} 
Example 55
Source File: TestUtil.scala    From pulse   with Apache License 2.0 5 votes vote down vote up
package io.phdata.pulse.solr

import java.io.File
import java.nio.file.Paths
import java.util.UUID

import org.apache.commons.io.FileUtils
import org.apache.solr.client.solrj.embedded.JettyConfig
import org.apache.solr.cloud.MiniSolrCloudCluster

object TestUtil {

  def miniSolrCloudCluster(): MiniSolrCloudCluster = {

    val DEFAULT_SOLR_CLOUD_XML =
      """<solr>
        |
        |  <str name="shareSchema">${shareSchema:false}</str>
        |  <str name="configSetBaseDir">${configSetBaseDir:configsets}</str>
        |  <str name="coreRootDirectory">${coreRootDirectory:target/solr4/cores}</str>
        |
        |  <shardHandlerFactory name="shardHandlerFactory" class="HttpShardHandlerFactory">
        |    <str name="urlScheme">${urlScheme:}</str>
        |    <int name="socketTimeout">${socketTimeout:90000}</int>
        |    <int name="connTimeout">${connTimeout:15000}</int>
        |  </shardHandlerFactory>
        |
        |  <solrcloud>
        |    <str name="host">127.0.0.1</str>
        |    <int name="hostPort">${hostPort:8983}</int>
        |    <str name="hostContext">${hostContext:solr}</str>
        |    <int name="zkClientTimeout">${solr.zkclienttimeout:30000}</int>
        |    <bool name="genericCoreNodeNames">${genericCoreNodeNames:true}</bool>
        |    <int name="leaderVoteWait">10000</int>
        |    <int name="distribUpdateConnTimeout">${distribUpdateConnTimeout:45000}</int>
        |    <int name="distribUpdateSoTimeout">${distribUpdateSoTimeout:340000}</int>
        |  </solrcloud>
        |
        |</solr>""".stripMargin

    System.setProperty("solr.directoryFactory", "solr.RAMDirectoryFactory")
    // clean up the solr files so we don't try to read collections from old runs
    FileUtils.deleteDirectory(new File("target/solr4"))

    // Set up a MiniSolrCloudCluster
    val clusterHome =
      s"${System.getProperty("user.dir")}/target/solr4/solrHome/${UUID.randomUUID()}"
    val jettyConfig =
      JettyConfig.builder().setContext("/solr").setPort(8983).stopAtShutdown(true).build()

    new MiniSolrCloudCluster(1, Paths.get(clusterHome), DEFAULT_SOLR_CLOUD_XML, jettyConfig)
  }

  def randomIdentifier() = UUID.randomUUID().toString.substring(0, 5)

} 
Example 56
Source File: IDEPathHelper.scala    From keycloak-benchmark   with Apache License 2.0 5 votes vote down vote up
import java.net.URI
import java.nio.file.attribute.{FileAttribute, BasicFileAttributes}
import java.nio.file.{StandardCopyOption, Paths, Files, Path}

import io.gatling.core.util.PathHelper._

class Directories(
						 val data: Path,
						 val bodies: Path,
						 val binaries: Path,
						 val results: Path
)

object IDEPathHelper {
	private val uri: URI = getClass.getClassLoader.getResource("gatling.conf").toURI

	val directories: Directories = if (uri.getScheme.startsWith("jar")) {
		val testDir = System.getProperty("test.dir");
		val mainDir: Path = if (testDir != null) {
			val dir = Paths.get(testDir);
			if (dir.exists) {
				if (!dir.isDirectory) {
					throw new IllegalArgumentException(testDir + " is not a directory")
				}
				dir
			} else {
				Files.createDirectory(dir)
			}
		} else {
			Files.createTempDirectory("gatling-")
		}
		System.out.println("Using " + mainDir + " as gatling directory")
		// unpack gatling.conf
		Files.copy(getClass.getResourceAsStream("gatling.conf"), mainDir.resolve("gatling.conf"), StandardCopyOption.REPLACE_EXISTING)
		// using createDirectories to ignore existing
		val directories = new Directories(
			Files.createDirectories(mainDir.resolve("data")),
			Files.createDirectories(mainDir.resolve("bodies")),
			Files.createDirectories(mainDir.resolve("binaries")),
			Files.createDirectories(mainDir.resolve("results")))
		val simulationFile: String = Engine.simulationClass.replace('.', '/') + ".class"
		// unpack simulation
		val targetFile: Path = mainDir.resolve("binaries").resolve(simulationFile)
		Files.createDirectories(targetFile.getParent)
		Files.copy(getClass.getResourceAsStream(simulationFile), targetFile, StandardCopyOption.REPLACE_EXISTING)
		directories
	} else {
		val projectRootDir = RichPath(uri).ancestor(3)
		val mavenResourcesDirectory = projectRootDir / "src" / "test" / "resources"
		val mavenTargetDirectory = projectRootDir / "target"

		new Directories(
			mavenResourcesDirectory / "data",
			mavenResourcesDirectory / "bodies",
			mavenTargetDirectory / "test-classes",
			mavenTargetDirectory / "results")
	}
} 
Example 57
Source File: FahrenheitToCelsius.scala    From benchmarks   with Apache License 2.0 5 votes vote down vote up
// https://github.com/functional-streams-for-scala/fs2/blob/3522315043ce31beb121c44cec4a7b1ce4e509e3/docs/src/ReadmeExample.md
package com.rossabaker
package benchmarks

import org.openjdk.jmh.annotations._

@State(Scope.Thread)
@Fork(2)
@Measurement(iterations = 10)
@Warmup(iterations = 10)
@Threads(1)
class FahrenheitToCelsius extends BenchmarkUtils {
  def fahrenheitToCelsius(f: Double): Double =
    (f - 32.0) * (5.0/9.0)

  @Benchmark
  def fs2(): Unit = {
    import _root_.fs2._
    import java.nio.file.Paths
    io.file.readAll[Task](Paths.get("testdata/fahrenheit.txt"), 4096)
      .through(text.utf8Decode)
      .through(text.lines)
      .filter(s => !s.trim.isEmpty && !s.startsWith("//"))
      .map(line => fahrenheitToCelsius(line.toDouble).toString)
      .intersperse("\n")
      .through(text.utf8Encode)
      .through(io.file.writeAll(Paths.get("testdata/celsius.txt")))
      .run
      .unsafeRun
  }

  @Benchmark
  def scalazStream(): Unit = {
    import scalaz.stream._
    import java.nio.file.Paths
    io.linesR("testdata/fahrenheit.txt")
      .filter(s => !s.trim.isEmpty && !s.startsWith("//"))
      .map(line => fahrenheitToCelsius(line.toDouble).toString)
      .intersperse("\n")
      .pipe(text.utf8Encode)
      .to(io.fileChunkW("testdata/celsius.txt"))
      .run
      .unsafePerformSync
  }
} 
Example 58
Source File: Cp.scala    From benchmarks   with Apache License 2.0 5 votes vote down vote up
package com.rossabaker
package benchmarks

import org.openjdk.jmh.annotations._

@State(Scope.Thread)
@Fork(2)
@Measurement(iterations = 10)
@Warmup(iterations = 10)
@Threads(1)
class Cp extends BenchmarkUtils {
  @Benchmark
  def fs2Sync(): Unit = {
    import _root_.fs2._, Stream._
    import java.nio.file.Paths
    io.file.readAll[Task](Paths.get("testdata/lorem-ipsum.txt"), 4096)
      .to(io.file.writeAll[Task](Paths.get("out/lorem-ipsum.txt")))
      .run
      .unsafeRun
  }

  @Benchmark
  def fs2Async(): Unit = {
    import _root_.fs2._, Stream._
    import java.nio.file.Paths
    io.file.readAllAsync[Task](Paths.get("testdata/lorem-ipsum.txt"), 4096)
      .to(io.file.writeAllAsync[Task](Paths.get("out/lorem-ipsum.txt")))
      .run
      .unsafeRun
  }

  @Benchmark
  def scalazStreamIo(): Unit = {
    import _root_.scalaz.stream._, Process._
    constant(4096)
      .through(io.fileChunkR("testdata/lorem-ipsum.txt"))
      .to(io.fileChunkW("out/lorem-ipsum.txt"))
      .run
      .unsafePerformSync
  }

  @Benchmark
  def scalazStreamNio(): Unit = {
    import _root_.scalaz.stream._, Process._
    constant(4096)
      .through(nio.file.chunkR("testdata/lorem-ipsum.txt"))
      .to(nio.file.chunkW("out/lorem-ipsum.txt"))
      .run
      .unsafePerformSync
  }

   }
            callback.onError(ex)
          }

          def onComplete(): Unit = {
            try {
              out.close()
              callback.onSuccess(())
            } catch {
              case NonFatal(ex) =>
                callback.onError(ex)
            }
          }
        }
      }

    Await.result(
      copyFile(new File("testdata/lorem-ipsum.txt"), new File("out/lorem-ipsum.txt"), 4096)
        .runAsync(monixScheduler),
      Duration.Inf
    )
  }
} 
Example 59
Source File: WaitForThreeFlowsToComplete.scala    From akka_streams_tutorial   with MIT License 5 votes vote down vote up
package sample.stream

import java.nio.file.Paths

import akka.actor.ActorSystem
import akka.stream._
import akka.stream.scaladsl._
import akka.util.ByteString
import org.slf4j.{Logger, LoggerFactory}

import scala.concurrent._
import scala.concurrent.duration._


object WaitForThreeFlowsToComplete extends App {
  val logger: Logger = LoggerFactory.getLogger(this.getClass)
  implicit val system = ActorSystem("WaitForThreeFlowsToComplete")
  implicit val ec = system.dispatcher

  def lineSink(filename: String): Sink[String, Future[IOResult]] =
    Flow[String]
      .map(s => ByteString(s + "\n"))
      .wireTap(_ => logger.info(s"Add line to file: $filename"))
      .toMat(FileIO.toPath(Paths.get(filename)))(Keep.right) //retain to the Future[IOResult]
      .withAttributes(ActorAttributes.dispatcher("custom-dispatcher-for-blocking"))

  val origSource = Source(1 to 10)
  //scan (= transform) the source
  val factorialsSource = origSource.scan(BigInt(1))((acc, next) => acc * next)

  val fastFlow = origSource.runForeach(i => logger.info(s"Reached sink: $i"))

  val slowFlow1 = factorialsSource
    .map(_.toString)
    .runWith(lineSink("factorial1.txt"))

  val slowFlow2 = factorialsSource
    .zipWith(Source(0 to 10))((num, idx) => s"$idx! = $num")
    .throttle(1, 1.second, 1, ThrottleMode.shaping)
    .runWith(lineSink("factorial2.txt"))

  val allDone = for {
    fastFlowDone <- fastFlow
    slowFlow1Done <- slowFlow1
    slowFlow2Done <- slowFlow2
  } yield (fastFlowDone, slowFlow1Done, slowFlow2Done)

  allDone.onComplete { results =>
    logger.info(s"Resulting futures from flows: $results - about to terminate")
    system.terminate()
  }
} 
Example 60
Source File: SplitWhen.scala    From akka_streams_tutorial   with MIT License 5 votes vote down vote up
package sample.stream_shared_state

import java.nio.file.Paths

import akka.NotUsed
import akka.actor.ActorSystem
import akka.stream.IOResult
import akka.stream.scaladsl.{FileIO, Flow, Framing, Keep, Sink, Source}
import akka.util.ByteString
import org.slf4j.{Logger, LoggerFactory}

import scala.concurrent.Future
import scala.util.{Failure, Success}


object SplitWhen extends App {
  val logger: Logger = LoggerFactory.getLogger(this.getClass)
  implicit val system = ActorSystem("SplitWhen")
  implicit val executionContext = system.dispatcher

  val nonLinearCapacityFactor = 100 //raise to see how it scales
  val filename = "splitWhen.csv"

  def genResourceFile() = {
    logger.info(s"Writing resource file: $filename...")

    def fileSink(filename: String): Sink[String, Future[IOResult]] =
      Flow[String]
        .map(s => ByteString(s + "\n"))
        .toMat(FileIO.toPath(Paths.get(filename)))(Keep.right)

    Source.fromIterator(() => (1 to nonLinearCapacityFactor).toList.combinations(2))
      .map(each => s"${each.head},${each.last}")
      .runWith(fileSink(filename))
  }

  val sourceOfLines = FileIO.fromPath(Paths.get(filename))
    .via(Framing.delimiter(ByteString("\n"), maximumFrameLength = 1024, allowTruncation = true)
      .map(_.utf8String))

  val csvToRecord: Flow[String, Record, NotUsed] = Flow[String]
    .map(_.split(",").map(_.trim))
    .map(stringArrayToRecord)

  val terminationHook: Flow[Record, Record, Unit] = Flow[Record]
    .watchTermination() { (_, done) =>
      done.onComplete {
        case Failure(err) => logger.info(s"Flow failed: $err")
        case _ => system.terminate(); logger.info(s"Flow terminated")
      }
    }

  val printSink = Sink.foreach[Vector[Record]](each => println(s"Reached sink: $each"))

  private def stringArrayToRecord(cols: Array[String]) = Record(cols(0), cols(1))

  private def hasKeyChanged = {
    () => {
      var lastRecordKey: Option[String] = None
      currentRecord: Record =>
        lastRecordKey match {
          case Some(currentRecord.key) | None =>
            lastRecordKey = Some(currentRecord.key)
            List((currentRecord, false))
          case _ =>
            lastRecordKey = Some(currentRecord.key)
            List((currentRecord, true))
        }
    }
  }

  genResourceFile().onComplete {
    case Success(_) =>
      logger.info(s"Start processing...")
      sourceOfLines
        .via(csvToRecord)
        .via(terminationHook)
        .statefulMapConcat(hasKeyChanged)   // stateful decision
        .splitWhen(_._2)                    // split when key has changed
        .map(_._1)                          // proceed with payload
        .fold(Vector.empty[Record])(_ :+ _) // sum payload
        .mergeSubstreams                    // better performance, but why?
        .runWith(printSink)
    case Failure(exception) => logger.info(s"Exception: $exception")
  }

  case class Record(key: String, value: String)
} 
Example 61
Source File: WritePrimes.scala    From akka_streams_tutorial   with MIT License 5 votes vote down vote up
package sample.graphdsl

import java.nio.file.Paths
import java.util.concurrent.ThreadLocalRandom

import akka.actor.ActorSystem
import akka.stream._
import akka.stream.scaladsl._
import akka.util.ByteString

import scala.concurrent.duration._
import scala.util.{Failure, Success}


object WritePrimes extends App {
    implicit val system = ActorSystem("WritePrimes")
    implicit val ec = system.dispatcher

    val maxRandomNumberSize = 100
    val primeSource: Source[Int, akka.NotUsed] =
      Source.fromIterator(() => Iterator.continually(ThreadLocalRandom.current().nextInt(maxRandomNumberSize)))
      .take(100)
        .filter(rnd => isPrime(rnd))
        // neighbor +2 is also prime?
        .filter(prime => isPrime(prime + 2))

    val fileSink = FileIO.toPath(Paths.get("target/primes.txt"))
    val slowSink = Flow[Int]
      .throttle(1, 1.seconds, 1, ThrottleMode.shaping)
      .map(i => ByteString(i.toString) )
      .toMat(fileSink)((_, bytesWritten) => bytesWritten)
    val consoleSink = Sink.foreach[Int](each => println(s"Reached console sink: $each"))

    // Additional processing flow, to show the nature of the composition
    val sharedDoubler = Flow[Int].map(_ * 2)

    // send primes to both sinks using graph API
    val graph = GraphDSL.create(slowSink, consoleSink)((x, _) => x) { implicit builder =>
      (slow, console) =>
        import GraphDSL.Implicits._
        val broadcastSplitter = builder.add(Broadcast[Int](2)) // the splitter - like a Unix tee
        primeSource ~> broadcastSplitter ~> sharedDoubler ~> slow // connect source to splitter, other side to slow sink (via sharedDoubler)
        broadcastSplitter ~> sharedDoubler ~> console // connect other side of splitter to console sink (via sharedDoubler)
        ClosedShape
    }
    val materialized = RunnableGraph.fromGraph(graph).run()

    materialized.onComplete {
      case Success(_) =>
        system.terminate()
      case Failure(e) =>
        println(s"Failure: ${e.getMessage}")
        system.terminate()
    }

  def isPrime(n: Int): Boolean = {
    if (n <= 1) false
    else if (n == 2) true
    else !(2 until n).exists(x => n % x == 0)
  }
} 
Example 62
Source File: KafkaServer.scala    From akka_streams_tutorial   with MIT License 5 votes vote down vote up
package alpakka.env

import java.io.File
import java.net.InetSocketAddress
import java.nio.file.{Files, Paths}
import java.util.Properties

import kafka.server.{KafkaConfig, KafkaServerStartable}
import org.apache.commons.io.FileUtils
import org.apache.zookeeper.server.quorum.QuorumPeerConfig
import org.apache.zookeeper.server.{ServerConfig, ZooKeeperServerMain}


object KafkaServer extends App {

  val zookeeperPort = 2181

  val kafkaLogs = "/tmp/kafka-logs"
  val kafkaLogsPath = Paths.get(kafkaLogs)

  // See: https://stackoverflow.com/questions/59592518/kafka-broker-doesnt-find-cluster-id-and-creates-new-one-after-docker-restart/60864763#comment108382967_60864763
  def fix25Behaviour() = {
    val fileWithConflictingContent = kafkaLogsPath.resolve("meta.properties").toFile
    if (fileWithConflictingContent.exists())  FileUtils.forceDelete(fileWithConflictingContent)
  }

  def removeKafkaLogs(): Unit = {
    if (kafkaLogsPath.toFile.exists()) FileUtils.forceDelete(kafkaLogsPath.toFile)
  }

  // Keeps the persistent data
  fix25Behaviour()
  // If everything fails
  //removeKafkaLogs()

  val quorumConfiguration = new QuorumPeerConfig {
    // Since we do not run a cluster, we are not interested in zookeeper data
    override def getDataDir: File = Files.createTempDirectory("zookeeper").toFile
    override def getDataLogDir: File = Files.createTempDirectory("zookeeper-logs").toFile
    override def getClientPortAddress: InetSocketAddress = new InetSocketAddress(zookeeperPort)
  }

  class StoppableZooKeeperServerMain extends ZooKeeperServerMain {
    def stop(): Unit = shutdown()
  }

  val zooKeeperServer = new StoppableZooKeeperServerMain()

  val zooKeeperConfig = new ServerConfig()
  zooKeeperConfig.readFrom(quorumConfiguration)

  val zooKeeperThread = new Thread {
    override def run(): Unit = zooKeeperServer.runFromConfig(zooKeeperConfig)
  }

  zooKeeperThread.start()

  val kafkaProperties = new Properties()
  kafkaProperties.put("zookeeper.connect", s"localhost:$zookeeperPort")
  kafkaProperties.put("broker.id", "0")
  kafkaProperties.put("offsets.topic.replication.factor", "1")
  kafkaProperties.put("log.dirs", kafkaLogs)
  kafkaProperties.put("delete.topic.enable", "true")
  kafkaProperties.put("group.initial.rebalance.delay.ms", "0")
  kafkaProperties.put("transaction.state.log.min.isr", "1")
  kafkaProperties.put("transaction.state.log.replication.factor", "1")
  kafkaProperties.put("zookeeper.connection.timeout.ms", "6000")
  kafkaProperties.put("num.partitions", "10")

  val kafkaConfig = KafkaConfig.fromProps(kafkaProperties)

  val kafka = new KafkaServerStartable(kafkaConfig)

  println("About to start...")
  kafka.startup()

  scala.sys.addShutdownHook{
    println("About to shutdown...")
    kafka.shutdown()
    kafka.awaitShutdown()
    zooKeeperServer.stop()
  }

  zooKeeperThread.join()
} 
Example 63
Source File: XmlProcessing.scala    From akka_streams_tutorial   with MIT License 5 votes vote down vote up
package alpakka.xml

import java.nio.file.Paths
import java.util.Base64

import akka.actor.ActorSystem
import akka.stream.alpakka.xml.scaladsl.XmlParsing
import akka.stream.alpakka.xml.{EndElement, ParseEvent, StartElement, TextEvent}
import akka.stream.scaladsl.{FileIO, Sink, Source}
import akka.util.ByteString

import scala.collection.immutable
import scala.concurrent.Future
import scala.util.{Failure, Success}



object XmlProcessing extends App {
  implicit val system = ActorSystem("XmlProcessing")
  implicit val executionContext = system.dispatcher

  val resultFileName = "testfile_result.jpg"

  val done = FileIO.fromPath(Paths.get("./src/main/resources/xml_with_base64_embedded.xml"))
    .via(XmlParsing.parser)
    .statefulMapConcat(() => {

      // state
      val stringBuilder: StringBuilder = StringBuilder.newBuilder
      var counter: Int = 0

      // aggregation function
      parseEvent: ParseEvent =>
        parseEvent match {
          case s: StartElement if s.attributes.contains("mediaType") =>
            stringBuilder.clear()
            val mediaType = s.attributes.head._2
            println("mediaType: " + mediaType)
            immutable.Seq(mediaType)
          case s: EndElement if s.localName == "embeddedDoc" =>
            val text = stringBuilder.toString
            println("File content: " + text) //large embedded files are read into memory
            Source.single(ByteString(text))
              .map(each => ByteString(Base64.getMimeDecoder.decode(each.toByteBuffer)))
              .runWith(FileIO.toPath(Paths.get(s"$counter-$resultFileName")))
            counter = counter + 1
            immutable.Seq(text)
          case t: TextEvent =>
            stringBuilder.append(t.text)
            immutable.Seq.empty
          case _ =>
            immutable.Seq.empty
        }
    })
    .runWith(Sink.ignore)

  terminateWhen(done)


  def terminateWhen(done: Future[_]) = {
    done.onComplete {
      case Success(_) =>
        println("Flow Success. About to terminate...")
        system.terminate()
      case Failure(e) =>
        println(s"Flow Failure: $e. About to terminate...")
        system.terminate()
    }
  }
} 
Example 64
Source File: FileIOEcho.scala    From akka_streams_tutorial   with MIT License 5 votes vote down vote up
package alpakka.file

import java.nio.file.Paths

import akka.actor.ActorSystem
import akka.stream.IOResult
import akka.stream.scaladsl.FileIO

import scala.concurrent.Future
import scala.util.{Failure, Success}


object FileIOEcho extends App {
  implicit val system = ActorSystem("FileIOEcho")
  implicit val executionContext = system.dispatcher

  val sourceFileName = "./src/main/resources/testfile.jpg"
  val encFileName = "testfile.enc"
  val resultFileName = "testfile_result.jpg"

  val sourceOrig = FileIO.fromPath(Paths.get(sourceFileName), 3000)
  val sinkEnc = FileIO.toPath(Paths.get(encFileName))

  val doneEnc = sourceOrig
    //.wireTap(each => println(s"Chunk enc: $each"))
    .map(each => each.encodeBase64)
    .runWith(sinkEnc)

  doneEnc.onComplete {
    case Success(_) =>
      val sourceEnc = FileIO.fromPath(Paths.get(encFileName))
      val sinkDec = FileIO.toPath(Paths.get(resultFileName))

      val doneDec = sourceEnc
        //.wireTap(each => println(s"Chunk dec: $each"))
        .map(each => each.decodeBase64)
        .runWith(sinkDec)
      terminateWhen(doneDec)
    case Failure(ex) => println(s"Exception: $ex")
  }

  def terminateWhen(done: Future[IOResult]) = {
    done.onComplete {
      case Success(_) =>
        println(s"Flow Success. Written file: $resultFileName About to terminate...")
        system.terminate()
      case Failure(e) =>
        println(s"Flow Failure: $e. About to terminate...")
        system.terminate()
    }
  }
} 
Example 65
Source File: files.scala    From spatial   with MIT License 5 votes vote down vote up
package utils.io

import java.io._
import java.nio.file._
import java.util.function.Consumer
import java.nio.file.{Files,Paths}

import scala.io.Source

object files {
  def sep: String = java.io.File.separator
  def cwd: String = new java.io.File("").getAbsolutePath
  final val BUFFER_SIZE: Int = 1024 * 4
  final val EOF = -1

  
  def copyResource(src: String, dest: String): Unit = {
    val outFile = new File(dest)
    val outPath = outFile.getParentFile
    outPath.mkdirs()
    val url = getClass.getResource(src)
    val in: InputStream = url.openStream()
    val out: OutputStream = new FileOutputStream(outFile)
    val buffer = new Array[Byte](BUFFER_SIZE)
    var n: Int = 0
    while ({n = in.read(buffer); n != EOF}) {
      out.write(buffer, 0, n)
    }
    out.close()
    in.close()
  }

  def listFiles(dir:String, exts:List[String]=Nil):List[java.io.File] = {
    val d = new java.io.File(dir)
    if (d.exists && d.isDirectory) {
      d.listFiles.filter { file =>
        file.isFile && exts.exists { ext => file.getName.endsWith(ext) }
      }.toList
    } else {
      Nil
    }
  }

  def splitPath(path:String) = {
    val file = new File(path)
    (file.getParent, file.getName)
  }

  def buildPath(parts:String*):String = {
    parts.mkString(sep)
  }

  def dirName(fullPath:String) = fullPath.split(sep).dropRight(1).mkString(sep)

  def createDirectories(dir:String) = {
    val path = Paths.get(dir)
    if (!Files.exists(path)) Files.createDirectories(path)
  }

} 
Example 66
Source File: AvroSourceTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.avro

import java.nio.file.Paths

import com.typesafe.config.ConfigFactory
import io.eels.schema.{Field, StructType}
import org.apache.avro.util.Utf8
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.scalatest.{Matchers, WordSpec}

class AvroSourceTest extends WordSpec with Matchers {

  private implicit val conf = new Configuration()
  private implicit val fs = FileSystem.get(new Configuration())

  "AvroSource" should {
    "read schema" in {
      val people = AvroSource(Paths.get(getClass.getResource("/test.avro").toURI).toAbsolutePath)
      people.schema shouldBe StructType(Field("name", nullable = false), Field("job", nullable = false), Field("location", nullable = false))
    }
    "read strings as java.lang.String when eel.avro.java.string is true" in {
      System.setProperty("eel.avro.java.string", "true")
      ConfigFactory.invalidateCaches()
      val people = AvroSource(Paths.get(getClass.getResource("/test.avro").toURI).toAbsolutePath).toDataStream().toSet
      people.map(_.values) shouldBe Set(
        List("clint eastwood", "actor", "carmel"),
        List("elton john", "musician", "pinner"),
        List("issac newton", "scientist", "heaven")
      )
      System.setProperty("eel.avro.java.string", "false")
      ConfigFactory.invalidateCaches()
    }
    "read strings as utf8 when eel.avro.java.string is false" in {
      System.setProperty("eel.avro.java.string", "false")
      ConfigFactory.invalidateCaches()
      val people = AvroSource(Paths.get(getClass.getResource("/test.avro").toURI).toAbsolutePath).toDataStream().toSet
      people.map(_.values) shouldBe Set(
        List(new Utf8("clint eastwood"), new Utf8("actor"), new Utf8("carmel")),
        List(new Utf8("elton john"), new Utf8("musician"), new Utf8("pinner")),
        List(new Utf8("issac newton"), new Utf8("scientist"), new Utf8("heaven"))
      )
      System.setProperty("eel.avro.java.string", "true")
      ConfigFactory.invalidateCaches()
    }
  }
} 
Example 67
Source File: CsvSourceTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.csv

import java.nio.file.Paths

import io.eels.schema.{Field, StringType, StructType}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.scalatest.{Matchers, WordSpec}

class CsvSourceTest extends WordSpec with Matchers {

  implicit val conf = new Configuration()
  implicit val fs = FileSystem.getLocal(conf)

  "CsvSource" should {
    "read schema" in {
      val file = getClass.getResource("/io/eels/component/csv/csvtest.csv").toURI()
      val path = Paths.get(file)
      CsvSource(path).schema shouldBe StructType(
        Field("a", StringType, true),
        Field("b", StringType, true),
        Field("c", StringType, true)
      )
    }
    "support null cell value option as null" in {
      val file = getClass.getResource("/io/eels/component/csv/csvwithempty.csv").toURI()
      val path = Paths.get(file)
      CsvSource(path).withNullValue(null).toDataStream().toSet.map(_.values) shouldBe
        Set(Vector("1", null, "3"))
    }
    "support null cell value replacement value" in {
      val file = getClass.getResource("/io/eels/component/csv/csvwithempty.csv").toURI()
      val path = Paths.get(file)
      CsvSource(path).withNullValue("foo").toDataStream().toSet.map(_.values) shouldBe
        Set(Vector("1", "foo", "3"))
    }
    "read from path" in {
      val file = getClass.getResource("/io/eels/component/csv/csvtest.csv").toURI()
      val path = Paths.get(file)
      CsvSource(path).withHeader(Header.FirstRow).toDataStream().size shouldBe 3
      CsvSource(path).withHeader(Header.None).toDataStream().size shouldBe 4
    }
    "allow specifying manual schema" in {
      val file = getClass.getResource("/io/eels/component/csv/csvtest.csv").toURI()
      val path = Paths.get(file)
      val schema = StructType(
        Field("test1", StringType, true),
        Field("test2", StringType, true),
        Field("test3", StringType, true)
      )
      CsvSource(path).withSchema(schema).toDataStream().schema shouldBe schema
    }
    "support reading header" in {
      val file = getClass.getResource("/io/eels/component/csv/csvtest.csv").toURI()
      val path = Paths.get(file)
      CsvSource(path).withHeader(Header.FirstRow).toDataStream().collect.map(_.values).toSet shouldBe
        Set(Vector("e", "f", "g"), Vector("1", "2", "3"), Vector("4", "5", "6"))
    }
    "support skipping header" in {
      val file = getClass.getResource("/io/eels/component/csv/csvtest.csv").toURI()
      val path = Paths.get(file)
      CsvSource(path).withHeader(Header.None).toDataStream().toSet.map(_.values) shouldBe
        Set(Vector("a", "b", "c"), Vector("e", "f", "g"), Vector("1", "2", "3"), Vector("4", "5", "6"))
    }
    "support delimiters" in {
      val file = getClass.getResource("/io/eels/component/csv/psv.psv").toURI()
      val path = Paths.get(file)
      CsvSource(path).withDelimiter('|').toDataStream().collect.map(_.values).toSet shouldBe
        Set(Vector("e", "f", "g"))
      CsvSource(path).withDelimiter('|').withHeader(Header.None).toDataStream().toSet.map(_.values) shouldBe
        Set(Vector("a", "b", "c"), Vector("e", "f", "g"))
    }
    "support comments for headers" in {
      val file = getClass.getResource("/io/eels/component/csv/comments.csv").toURI()
      val path = Paths.get(file)
      CsvSource(path).withHeader(Header.FirstComment).schema shouldBe StructType(
        Field("a", StringType, true),
        Field("b", StringType, true),
        Field("c", StringType, true)
      )
      CsvSource(path).withHeader(Header.FirstComment).toDataStream().toSet.map(_.values) shouldBe
        Set(Vector("1", "2", "3"), Vector("e", "f", "g"), Vector("4", "5", "6"))
    }
    "terminate if asking for first comment but no comments" in {
      val file = getClass.getResource("/io/eels/component/csv/csvtest.csv").toURI()
      val path = Paths.get(file)
      CsvSource(path).withHeader(Header.FirstComment).schema shouldBe StructType(
        Field("", StringType, true)
      )
    }
    "support skipping corrupt rows" ignore {
      val file = getClass.getResource("/io/eels/component/csv/corrupt.csv").toURI()
      val path = Paths.get(file)
      CsvSource(path).withHeader(Header.FirstRow).toDataStream().toVector.map(_.values) shouldBe
        Vector(Vector("1", "2", "3"))
    }
  }
} 
Example 68
Source File: StructTypeInferrerTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.schema

import java.nio.file.Paths

import io.eels.component.csv.{CsvSource, Header}
import io.eels.{DataTypeRule, SchemaInferrer}
import org.scalatest.{Matchers, WordSpec}

class StructTypeInferrerTest extends WordSpec with Matchers {

  val file = getClass.getResource("/io/eels/component/csv/csvtest.csv").toURI()
  val path = Paths.get(file)

  "SchemaInferrer" should {
    "use rules to infer column types" in {
      val inferrer = SchemaInferrer(StringType, DataTypeRule("a", IntType(true), false), DataTypeRule("b", BooleanType))
      CsvSource(path).withHeader(Header.FirstRow).withSchemaInferrer(inferrer).schema shouldBe StructType(
        Field("a", IntType(true), false),
        Field("b", BooleanType, true),
        Field("c", StringType, true)
      )
    }
  }
} 
Example 69
Source File: ApplySpecMain.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.cli

import java.io.PrintStream
import java.nio.file.{Path, Paths}

import io.eels.{Constants, SourceParser}
import io.eels.component.hive.{HiveOps, HiveSource, HiveSpec}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient

object ApplySpecMain {

  implicit val fs = FileSystem.get(new Configuration)
  implicit val hiveConf = new HiveConf
  implicit val client = new HiveMetaStoreClient(hiveConf)

  def apply(args: Seq[String], out: PrintStream = System.out): Unit = {

    val parser = new scopt.OptionParser[Options]("eel") {
      head("eel apply-spec", Constants.EelVersion)

      opt[String]("dataset") required() action { (source, o) =>
        o.copy(source = source)
      } text "specify dataset, eg hive:database:table"

      opt[String]("spec") required() action { (schema, o) =>
        o.copy(specPath = Paths.get(schema))
      } text "specify path to eel spec"
    }

    parser.parse(args, Options()) match {
      case Some(options) =>
        val builder = SourceParser(options.source).getOrElse(sys.error(s"Unsupported source ${options.source}"))
        val source = builder()
        source match {
          case hive: HiveSource =>
            HiveOps.applySpec(HiveSpec(options.specPath), false)
          case _ =>
            sys.error(s"Unsupported source $source")
        }
      case _ =>
    }
  }

  case class Options(source: String = null, specPath: Path = null)
} 
Example 70
Source File: HbaseTests.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.hbase

import java.nio.file.Paths
import java.util.UUID

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hbase._
import org.apache.hadoop.hdfs.MiniDFSCluster

trait HbaseTests {
  val MINI_CLUSTER_ROOT = "miniclusters"

  def startHBaseCluster(clusterName: String): MiniHBaseCluster = {
    // Setup the underlying HDFS mini cluster for HBASE mini cluster
    System.clearProperty(MiniDFSCluster.PROP_TEST_BUILD_DATA)
    val clusterFolder = s"${clusterName}_${UUID.randomUUID().toString}"
    val clusterPath = Paths.get(MINI_CLUSTER_ROOT, clusterFolder)
    val conf = new Configuration()
    conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, clusterPath.toAbsolutePath.toString)
    val miniDFSCluster = new MiniDFSCluster.Builder(conf).build()

    // Now setup and start the HBASE mini cluster
    val hBaseTestingUtility = new HBaseTestingUtility
    hBaseTestingUtility.setDFSCluster(miniDFSCluster)
    hBaseTestingUtility.startMiniCluster(1, 1)
    val cluster = hBaseTestingUtility.getHBaseCluster
    cluster.waitForActiveAndReadyMaster()
    cluster
  }

} 
Example 71
Source File: HiveTableFilesFnTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.hive

import java.nio.file.Paths

import com.sksamuel.exts.Logging
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.hdfs.MiniDFSCluster
import org.apache.hadoop.hive.metastore.IMetaStoreClient
import org.apache.hadoop.hive.metastore.api.Table
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{FlatSpec, Matchers}

class HiveTableFilesFnTest extends FlatSpec with Matchers with Logging with MockitoSugar {

  System.clearProperty(MiniDFSCluster.PROP_TEST_BUILD_DATA)
  val clusterPath = Paths.get("miniclusters", "cluster")
  val conf = new Configuration()
  conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, clusterPath.toAbsolutePath.toString)
  val cluster = new MiniDFSCluster.Builder(conf).build()
  implicit val fs = cluster.getFileSystem

  "HiveTableFilesFn" should "detect all files in root when no partitions" in {

    implicit val client = mock[IMetaStoreClient]
    org.mockito.Mockito.when(client.getTable("default", "mytable")).thenReturn(new Table)

    val root = new Path("tab1")
    fs.mkdirs(root)

    // table scanner will skip 0 length files
    val a = fs.create(new Path(root, "a"))
    a.write(1)
    a.close()

    val b = fs.create(new Path(root, "b"))
    b.write(1)
    b.close()

    HiveTableFilesFn("default", "mytable", fs.resolvePath(root), Nil).values.flatten.map(_.getPath.getName).toSet shouldBe Set("a", "b")
  }

  it should "ignore hidden files in root when no partitions" in {
    implicit val client = mock[IMetaStoreClient]
    org.mockito.Mockito.when(client.getTable("default", "mytable")).thenReturn(new Table)

    val root = new Path("tab2")
    fs.mkdirs(root)

    // table scanner will skip 0 length files
    val a = fs.create(new Path(root, "a"))
    a.write(1)
    a.close()

    val b = fs.create(new Path(root, "_b"))
    b.write(1)
    b.close()

    HiveTableFilesFn("default", "mytable", fs.resolvePath(root), Nil).values.flatten.map(_.getPath.getName).toSet shouldBe Set("a")
  }
} 
Example 72
Source File: package.scala    From sbt-reactive-app   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.rp.sbtreactiveapp

import java.io.File
import java.nio.file.Paths
import org.apache.tools.ant.filters.StringInputStream
import sbt.Logger
import scala.collection.immutable.Seq
import scala.sys.process.{ Process, ProcessLogger }

package object cmd {
  
  private[cmd] def run(
    cwd: File = Paths.get(".").toRealPath().toFile,
    env: Map[String, String] = Map.empty,
    input: Option[String] = None,
    logStdErr: Option[Logger] = None,
    logStdOut: Option[Logger] = None)(args: String*): (Int, Seq[String], Seq[String]) = {
    var outList = List.empty[String]
    var errList = List.empty[String]

    val stringLogger = ProcessLogger(
      { s =>
        outList = s :: outList

        logStdOut.foreach(_.info(s))
      },
      { s =>
        errList = s :: errList

        logStdErr.foreach(_.error(s))
      })

    val exitCode =
      input
        .map(new StringInputStream(_))
        .foldLeft(Process(args, cwd = cwd, env.toVector: _*))(_ #< _)
        .run(stringLogger)
        .exitValue()

    (exitCode, outList.reverse, errList.reverse)
  }

  private[cmd] def runSuccess(failMsg: String)(result: (Int, Seq[String], Seq[String])): Unit = {
    if (result._1 != 0) {
      sys.error(s"$failMsg [${result._1}]")
    }
  }
} 
Example 73
Source File: SftpStoreTest.scala    From fs2-blobstore   with Apache License 2.0 5 votes vote down vote up
package blobstore
package sftp

import java.nio.file.Paths
import java.util.Properties

import cats.effect.IO
import cats.effect.concurrent.MVar
import com.jcraft.jsch.{ChannelSftp, JSch}

class SftpStoreTest extends AbstractStoreTest {

  val session = try {
    val jsch = new JSch()

    val session = jsch.getSession("blob", "sftp-container", 22)
    session.setTimeout(10000)
    session.setPassword("password")

    val config = new Properties
    config.put("StrictHostKeyChecking", "no")
    session.setConfig(config)

    session.connect()

    session
  } catch {
    // this is UGLY!!! but just want to ignore errors if you don't have sftp container running
    case e: Throwable =>
      e.printStackTrace()
      null
  }

  private val rootDir = Paths.get("tmp/sftp-store-root/").toAbsolutePath.normalize
  val mVar = MVar.empty[IO, ChannelSftp].unsafeRunSync()
  override val store: Store[IO] = new SftpStore[IO]("/", session, blocker, mVar, None, 10000)
  override val root: String = "sftp_tests"

  // remove dirs created by AbstractStoreTest
  override def afterAll(): Unit = {
    super.afterAll()

    try {
      session.disconnect()
    } catch {
      case _: Throwable =>
    }

    cleanup(rootDir.resolve(s"$root/test-$testRun"))

  }

} 
Example 74
Source File: KubernetesTestComponents.scala    From spark-integration   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.k8s.integrationtest

import java.nio.file.{Path, Paths}
import java.util.UUID

import scala.collection.mutable
import scala.collection.JavaConverters._
import io.fabric8.kubernetes.client.DefaultKubernetesClient
import org.scalatest.concurrent.Eventually

private[spark] class KubernetesTestComponents(defaultClient: DefaultKubernetesClient) {

  val namespaceOption = Option(System.getProperty("spark.kubernetes.test.namespace"))
  val hasUserSpecifiedNamespace = namespaceOption.isDefined
  val namespace = namespaceOption.getOrElse(UUID.randomUUID().toString.replaceAll("-", ""))
  private val serviceAccountName =
    Option(System.getProperty("spark.kubernetes.test.serviceAccountName"))
      .getOrElse("default")
  val kubernetesClient = defaultClient.inNamespace(namespace)
  val clientConfig = kubernetesClient.getConfiguration

  def createNamespace(): Unit = {
    defaultClient.namespaces.createNew()
      .withNewMetadata()
      .withName(namespace)
      .endMetadata()
      .done()
  }

  def deleteNamespace(): Unit = {
    defaultClient.namespaces.withName(namespace).delete()
    Eventually.eventually(KubernetesSuite.TIMEOUT, KubernetesSuite.INTERVAL) {
      val namespaceList = defaultClient
        .namespaces()
        .list()
        .getItems
        .asScala
      require(!namespaceList.exists(_.getMetadata.getName == namespace))
    }
  }

  def newSparkAppConf(): SparkAppConf = {
    new SparkAppConf()
      .set("spark.master", s"k8s://${kubernetesClient.getMasterUrl}")
      .set("spark.kubernetes.namespace", namespace)
      .set("spark.executor.memory", "500m")
      .set("spark.executor.cores", "1")
      .set("spark.executors.instances", "1")
      .set("spark.app.name", "spark-test-app")
      .set("spark.ui.enabled", "true")
      .set("spark.testing", "false")
      .set("spark.kubernetes.submission.waitAppCompletion", "false")
      .set("spark.kubernetes.authenticate.driver.serviceAccountName", serviceAccountName)
  }
}

private[spark] class SparkAppConf {

  private val map = mutable.Map[String, String]()

  def set(key: String, value: String): SparkAppConf = {
    map.put(key, value)
    this
  }

  def get(key: String): String = map.getOrElse(key, "")

  def setJars(jars: Seq[String]): Unit = set("spark.jars", jars.mkString(","))

  override def toString: String = map.toString

  def toStringArray: Iterable[String] = map.toList.flatMap(t => List("--conf", s"${t._1}=${t._2}"))
}

private[spark] case class SparkAppArguments(
    mainAppResource: String,
    mainClass: String,
    appArgs: Array[String])

private[spark] object SparkAppLauncher extends Logging {

  def launch(
      appArguments: SparkAppArguments,
      appConf: SparkAppConf,
      timeoutSecs: Int,
      sparkHomeDir: Path): Unit = {
    val sparkSubmitExecutable = sparkHomeDir.resolve(Paths.get("bin", "spark-submit"))
    logInfo(s"Launching a spark app with arguments $appArguments and conf $appConf")
    val commandLine = mutable.ArrayBuffer(sparkSubmitExecutable.toFile.getAbsolutePath,
      "--deploy-mode", "cluster",
      "--class", appArguments.mainClass,
      "--master", appConf.get("spark.master")
    ) ++ appConf.toStringArray :+
      appArguments.mainAppResource
    if (appArguments.appArgs.nonEmpty) {
      commandLine += appArguments.appArgs.mkString(" ")
    }
    logInfo(s"Launching a spark app with command line: ${commandLine.mkString(" ")}")
    ProcessUtils.executeProcess(commandLine.toArray, timeoutSecs)
  }
} 
Example 75
Source File: Minikube.scala    From spark-integration   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.k8s.integrationtest.backend.minikube

import java.io.File
import java.nio.file.Paths

import io.fabric8.kubernetes.client.{ConfigBuilder, DefaultKubernetesClient}

import org.apache.spark.deploy.k8s.integrationtest.{Logging, ProcessUtils}

// TODO support windows
private[spark] object Minikube extends Logging {

  private val MINIKUBE_STARTUP_TIMEOUT_SECONDS = 60

  def getMinikubeIp: String = {
    val outputs = executeMinikube("ip")
      .filter(_.matches("^\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}$"))
    assert(outputs.size == 1, "Unexpected amount of output from minikube ip")
    outputs.head
  }

  def getMinikubeStatus: MinikubeStatus.Value = {
    val statusString = executeMinikube("status")
      .filter(line => line.contains("minikubeVM: ") || line.contains("minikube:"))
      .head
      .replaceFirst("minikubeVM: ", "")
      .replaceFirst("minikube: ", "")
    MinikubeStatus.unapply(statusString)
        .getOrElse(throw new IllegalStateException(s"Unknown status $statusString"))
  }

  def getKubernetesClient: DefaultKubernetesClient = {
    val kubernetesMaster = s"https://${getMinikubeIp}:8443"
    val userHome = System.getProperty("user.home")
    val kubernetesConf = new ConfigBuilder()
      .withApiVersion("v1")
      .withMasterUrl(kubernetesMaster)
      .withCaCertFile(Paths.get(userHome, ".minikube", "ca.crt").toFile.getAbsolutePath)
      .withClientCertFile(Paths.get(userHome, ".minikube", "apiserver.crt").toFile.getAbsolutePath)
      .withClientKeyFile(Paths.get(userHome, ".minikube", "apiserver.key").toFile.getAbsolutePath)
      .build()
    new DefaultKubernetesClient(kubernetesConf)
  }

  private def executeMinikube(action: String, args: String*): Seq[String] = {
    ProcessUtils.executeProcess(
      Array("bash", "-c", s"minikube $action") ++ args, MINIKUBE_STARTUP_TIMEOUT_SECONDS)
  }
}

private[spark] object MinikubeStatus extends Enumeration {

  // The following states are listed according to
  // https://github.com/docker/machine/blob/master/libmachine/state/state.go.
  val STARTING = status("Starting")
  val RUNNING = status("Running")
  val PAUSED = status("Paused")
  val STOPPING = status("Stopping")
  val STOPPED = status("Stopped")
  val ERROR = status("Error")
  val TIMEOUT = status("Timeout")
  val SAVED = status("Saved")
  val NONE = status("")

  def status(value: String): Value = new Val(nextId, value)
  def unapply(s: String): Option[Value] = values.find(s == _.toString)
} 
Example 76
Source File: Utils.scala    From tispark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.test

import java.io.{File, PrintWriter}
import java.nio.file.{Files, Paths}
import java.util.Properties

import org.slf4j.Logger

import scala.collection.JavaConversions._

object Utils {

  def writeFile(content: String, path: String): Unit =
    TryResource(new PrintWriter(path))(_.close()) {
      _.print(content)
    }

  def TryResource[T](res: T)(closeOp: T => Unit)(taskOp: T => Unit): Unit =
    try {
      taskOp(res)
    } finally {
      closeOp(res)
    }

  def readFile(path: String): List[String] =
    Files.readAllLines(Paths.get(path)).toList

  def getOrThrow(prop: Properties, key: String): String = {
    val jvmProp = System.getProperty(key)
    if (jvmProp != null) {
      jvmProp
    } else {
      val v = prop.getProperty(key)
      if (v == null) {
        throw new IllegalArgumentException(key + " is null")
      } else {
        v
      }
    }
  }

  def getFlagOrFalse(prop: Properties, key: String): Boolean =
    getFlag(prop, key, "false")

  private def getFlag(prop: Properties, key: String, defValue: String): Boolean =
    getOrElse(prop, key, defValue).equalsIgnoreCase("true")

  def getOrElse(prop: Properties, key: String, defValue: String): String = {
    val jvmProp = System.getProperty(key)
    if (jvmProp != null) {
      jvmProp
    } else {
      Option(prop.getProperty(key)).getOrElse(defValue)
    }
  }

  def getFlagOrTrue(prop: Properties, key: String): Boolean =
    getFlag(prop, key, "true")

  def time[R](block: => R)(logger: Logger): R = {
    val t0 = System.nanoTime()
    val result = block
    val t1 = System.nanoTime()
    logger.info("Elapsed time: " + (t1 - t0) / 1000.0 / 1000.0 / 1000.0 + "s")
    result
  }

  def ensurePath(basePath: String, paths: String*): Boolean =
    new File(joinPath(basePath, paths: _*)).mkdirs()

  def joinPath(basePath: String, paths: String*): String =
    Paths.get(basePath, paths: _*).toAbsolutePath.toString
} 
Example 77
Source File: GlobalWatchService.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up
package org.apache.iota.fey

import java.nio.file.{Files, Path, Paths, WatchEvent}

import akka.actor.{Actor, ActorLogging, ActorRef}
import org.apache.iota.fey.GlobalWatchService.REGISTER_WATCHER_PERFORMER
import org.apache.iota.fey.WatchingDirectories.STOPPED

class GlobalWatchService extends Actor with ActorLogging{

  //WatchService
  var watchThread:Thread = null
  val watchFileTask:GlobalWatchServiceTask = new GlobalWatchServiceTask(self)

  override def preStart(): Unit = {
    startWatcher("PRE-START")
  }

  override def postStop(): Unit = {
    stopWatcher("POST-STOP")
  }

  private def startWatcher(from: String) = {
    log.info(s"Starting Global Watcher from $from")
    watchThread = new Thread(watchFileTask, "FEY_GLOBAL_WATCH_SERVICE_PERFORMERS")
    watchThread.setDaemon(true)
    watchThread.start()
  }

  private def stopWatcher(from: String) = {
    log.info(s"Stopping Global Watcher from $from")
    if(watchThread != null && watchThread.isAlive){
      watchThread.interrupt()
      watchThread = null
    }
  }

  override def receive: Receive = {
    case REGISTER_WATCHER_PERFORMER(path, file_name, actor, events, loadExists) =>
      registerPath(path,file_name,actor,events,loadExists)
    case STOPPED =>
      stopWatcher("STOPPED-THREAD")
      startWatcher("STOPPED-THREAD")
    case x => log.error(s"Unknown message $x")
  }

  private def broadcastMessageIfFileExists(actor: ActorRef, pathWithFile: String) = {
    val filePath = Paths.get(pathWithFile)
    if(Files.exists(filePath)){
      log.info(s"File $pathWithFile exists. Broadcasting message to actor ${actor.path.toString}")
      actor ! GlobalWatchService.ENTRY_CREATED(filePath)
    }
  }

  private def registerPath(dir_path: String, file_name:Option[String], actor: ActorRef, events: Array[WatchEvent.Kind[_]], loadExists: Boolean) = {
    WatchingDirectories.actorsInfo.get((dir_path,file_name)) match {
      case Some(info) =>
        val newInfo:Map[WatchEvent.Kind[_], Array[ActorRef]] = events.map(event => {
          info.get(event) match {
            case Some(actors) => (event, (Array(actor) ++ actors))
            case None => (event, Array(actor))
          }
        }).toMap
        WatchingDirectories.actorsInfo.put((dir_path,file_name), info ++ newInfo)
        watchFileTask.watch(Paths.get(dir_path),actor.path.toString,events)
      case None =>
        val tmpEvents:Map[WatchEvent.Kind[_], Array[ActorRef]] = events.map(event => {(event, Array(actor))}).toMap
        WatchingDirectories.actorsInfo.put((dir_path,file_name), tmpEvents)
        watchFileTask.watch(Paths.get(dir_path),actor.path.toString,events)
    }

    if(file_name.isDefined && loadExists){
      log.info(s"Checking if file $dir_path/${file_name.get} already exist")
      broadcastMessageIfFileExists(actor, s"$dir_path/${file_name.get}")
    }

  }

}

object GlobalWatchService{
  sealed case class ENTRY_CREATED(path:Path)
  sealed case class ENTRY_MODIFIED(path:Path)
  sealed case class ENTRY_DELETED(path:Path)
  sealed case class REGISTER_WATCHER_PERFORMER(dir_path: String, file_name:Option[String],
                                               actor: ActorRef, events: Array[WatchEvent.Kind[_]],
                                               loadIfExists: Boolean)
} 
Example 78
Source File: JsonReceiverActor.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up
package org.apache.iota.fey

import java.nio.file.Paths
import java.io.File

import akka.actor.{Actor, ActorLogging, ActorRef, Props}
import play.api.libs.json.{JsValue, Json}

class JsonReceiverActor extends Actor with ActorLogging {

  import JsonReceiverActor._

  val monitoring_actor = FEY_MONITOR.actorRef
  var watchFileTask: WatchServiceReceiver = _
  var watchThread: Thread = _

  override def preStart() {
    prepareDynamicJarRepo()
    processCheckpointFiles()

    watchFileTask = new WatchServiceReceiver(self)
    watchThread = new Thread(watchFileTask, GLOBAL_DEFINITIONS.WATCH_SERVICE_THREAD)

    monitoring_actor  ! Monitor.START(Utils.getTimestamp)
    watchThread.setDaemon(true)
    watchThread.start()

    watchFileTask.watch(Paths.get(CONFIG.JSON_REPOSITORY))
  }

  private def prepareDynamicJarRepo() = {
    val jarDir = new File(CONFIG.DYNAMIC_JAR_REPO)
    if (!jarDir.exists()){
      jarDir.mkdir()
    }else if(CONFIG.DYNAMIC_JAR_FORCE_PULL){
      jarDir.listFiles().foreach(_.delete())
    }
  }


  private def processCheckpointFiles() = {
    if (CONFIG.CHEKPOINT_ENABLED) {
      val checkpoint = new CheckpointProcessor(self)
      checkpoint.run()
    }
  }

  override def postStop() {
    monitoring_actor  ! Monitor.STOP(Utils.getTimestamp)
    watchThread.interrupt()
    watchThread.join()
  }

  override def postRestart(reason: Throwable): Unit = {
    monitoring_actor  ! Monitor.RESTART(reason, Utils.getTimestamp)
    preStart()
  }

  override def receive: Receive = {
    case JSON_RECEIVED(json, file) =>
      log.info(s"JSON RECEIVED => ${Json.stringify(json)}")
      context.parent ! FeyCore.ORCHESTRATION_RECEIVED(json, Some(file))

    case _ =>
  }

}

object JsonReceiverActor {

  case class JSON_RECEIVED(json: JsValue, file: File)

} 
Example 79
Source File: FeyGenericActorReceiver.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up
package org.apache.iota.fey

import java.io.{File, FileOutputStream}
import java.net.URL
import java.nio.file.{Files, Paths}
import com.eclipsesource.schema._
import akka.actor.ActorRef
import com.eclipsesource.schema.SchemaValidator
import org.apache.commons.io.IOUtils
import play.api.libs.json._
import scala.concurrent.duration._
import scala.util.Properties._

abstract class FeyGenericActorReceiver(override val params: Map[String,String] = Map.empty,
                                       override val backoff: FiniteDuration = 1.minutes,
                                       override val connectTo: Map[String,ActorRef] = Map.empty,
                                       override val schedulerTimeInterval: FiniteDuration = 2.seconds,
                                       override val orchestrationName: String = "",
                                       override val orchestrationID: String = "",
                                       override val autoScale: Boolean = false) extends FeyGenericActor{

  private[fey] val feyCore = FEY_CORE_ACTOR.actorRef

  override final def processMessage[T](message: T, sender: ActorRef): Unit = {
    try {
      val jsonString = getJSONString(message)
      if(jsonString != "{}") {
        processJson(jsonString)
      }
      startBackoff()
    }catch{
      case e: Exception => log.error(e, s"Could not process message $message")
    }
  }

  private[fey] def processJson(jsonString: String) = {
    var orchID:String = "None"
    try{
      val orchestrationJSON = Json.parse(jsonString)
      orchID = (orchestrationJSON \ JSON_PATH.GUID).as[String]
      val valid = validJson(orchestrationJSON)
      if(valid && (orchestrationJSON \ JSON_PATH.COMMAND).as[String].toUpperCase != "DELETE"){
        checkForLocation(orchestrationJSON)
      }
      if(valid) {
        feyCore ! FeyCore.ORCHESTRATION_RECEIVED(orchestrationJSON, None)
      }else{
        log.warning(s"Could not forward Orchestration $orchID. Invalid JSON schema")
      }
    } catch {
      case e: Exception =>
        log.error(e, s"Orchestration $orchID could not be forwarded")
    }
  }

  
  def resolveCredentials(credentials: Option[JsObject]):Option[(String, String)] = {
    credentials match {
      case None => None
      case Some(cred) =>
        val user = (cred \ JSON_PATH.JAR_CRED_USER).as[String]
        val password = (cred \ JSON_PATH.JAR_CRED_PASSWORD).as[String]
        Option(envOrElse(user,user), envOrElse(password,password))
    }
  }

} 
Example 80
Source File: JsonReceiver.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up
package org.apache.iota.fey

import java.io.FileOutputStream
import java.net.URL
import java.io.File

import com.eclipsesource.schema._
import org.slf4j.LoggerFactory
import play.api.libs.json._
import JSON_PATH._
import java.nio.file.{Files, Paths}

import org.apache.commons.io.IOUtils
import org.apache.commons.codec.binary.Base64
import scala.util.Properties._


  def exceptionOnRun(e: Exception): Unit
}

object HttpBasicAuth {
  val BASIC = "Basic"
  val AUTHORIZATION = "Authorization"

  def encodeCredentials(username: String, password: String): String = {
    new String(Base64.encodeBase64((username + ":" + password).getBytes))
  }

  def getHeader(username: String, password: String): String =
    BASIC + " " + encodeCredentials(username, password)
} 
Example 81
Source File: JsonReceiverSpec.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up
package org.apache.iota.fey

import java.nio.file.{Files, Paths}

import akka.actor.ActorRef
import akka.testkit.{EventFilter, TestProbe}
import ch.qos.logback.classic.Level
import scala.concurrent.duration.{DurationInt, FiniteDuration}

class JsonReceiverSpec extends BaseAkkaSpec with LoggingTest{


  class ReceiverTest(verifyActor: ActorRef) extends JsonReceiver{

    override def execute(): Unit = {
      verifyActor ! "EXECUTED"
      Thread.sleep(500)
    }

    override def exceptionOnRun(e: Exception): Unit = {
      verifyActor ! "INTERRUPTED"
    }

  }

  val verifyTB = TestProbe("RECEIVER-TEST")
  val receiver = new ReceiverTest(verifyTB.ref)

  "Executing validJson in JsonReceiver" should {
    "return false when json schema is not right" in {
      receiver.validJson(getJSValueFromString(Utils_JSONTest.test_json_schema_invalid)) should be(false)
    }
    "log message to Error" in {
      ("Incorrect JSON schema \n/ensembles/0 \n\tErrors: Property command missing") should beLoggedAt(Level.ERROR)
    }
    "return true when Json schema is valid" in {
      receiver.validJson(getJSValueFromString(Utils_JSONTest.create_json_test)) should be(true)
    }
  }

  "Executing checkForLocation in JsonReceiver" should {
    "log message at Debug level" in {
      receiver.checkForLocation(getJSValueFromString(Utils_JSONTest.test_json_schema_invalid))
      "Location not defined in JSON" should beLoggedAt(Level.DEBUG)
    }
    "download jar dynamically from URL" in {
      receiver.checkForLocation(getJSValueFromString(Utils_JSONTest.location_test))
      Files.exists(Paths.get(s"${CONFIG.DYNAMIC_JAR_REPO}/fey-stream.jar")) should be(true)
    }
  }

  var watchThread: Thread = _
  "Start a Thread with the JSON receiver" should {
    "Start Thread" in {
      watchThread = new Thread(receiver, "TESTING-RECEIVERS-IN-THREAD")
      watchThread.setDaemon(true)
      watchThread.start()
      TestProbe().isThreadRunning("TESTING-RECEIVERS-IN-THREAD") should be(true)
    }
    "execute execute() method inside run" in {
      verifyTB.expectMsgAllOf(600.milliseconds,"EXECUTED","EXECUTED")
    }
  }

  "Interrupting the receiver Thread" should {
    "Throw Interrupted exception" in {
      EventFilter[InterruptedException]() intercept {
        watchThread.interrupt()
        watchThread.join()
      }
    }
    "execute exceptionOnRun method" in {
      verifyTB.receiveWhile(1200.milliseconds) {
        case "EXECUTED" =>
      }
      verifyTB.expectMsg("INTERRUPTED")
    }
  }


} 
Example 82
Source File: WatchServiceReceiverSpec.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up
package org.apache.iota.fey

import java.nio.file.{Files, Paths}
import java.nio.charset.StandardCharsets

import akka.testkit.{EventFilter, TestProbe}

import scala.concurrent.duration.{DurationInt, FiniteDuration}
import java.io.File

import ch.qos.logback.classic.Level

class WatchServiceReceiverSpec extends BaseAkkaSpec{

  val watcherTB = TestProbe("WATCH-SERVICE")
  var watchFileTask:WatchServiceReceiver = _
  val watchTestDir = s"${CONFIG.JSON_REPOSITORY}/watchtest"

  "Creating WatchServiceReceiver" should {
    "process initial files in the JSON repository" in {
      CONFIG.JSON_EXTENSION = "json.not"
      watchFileTask = new WatchServiceReceiver(watcherTB.ref)
      watcherTB.expectMsgAllClassOf(classOf[JsonReceiverActor.JSON_RECEIVED])
      CONFIG.JSON_EXTENSION = "json.test"
    }
  }

  var watchThread: Thread = _
  "Start a Thread with WatchServiceReceiver" should {
    "Start Thread" in {
      watchThread = new Thread(watchFileTask, "TESTING-WATCHER-IN-THREAD")
      watchThread.setDaemon(true)
      watchThread.start()
      TestProbe().isThreadRunning("TESTING-WATCHER-IN-THREAD") should be(true)
    }
  }

  "Start watching directory" should {
    "Starting receiving CREATED event" taggedAs(SlowTest) in {
      watchFileTask.watch(Paths.get(watchTestDir))
      Files.write(Paths.get(s"$watchTestDir/watched.json.test"), Utils_JSONTest.create_json_test.getBytes(StandardCharsets.UTF_8))
      watcherTB.expectMsgAllClassOf(20.seconds, classOf[JsonReceiverActor.JSON_RECEIVED])
    }
    "Starting receiving UPDATE event" taggedAs(SlowTest) in {
      Files.write(Paths.get(s"$watchTestDir/watched-update.json.test"), Utils_JSONTest.delete_json_test.getBytes(StandardCharsets.UTF_8))
      Thread.sleep(200)
      Files.write(Paths.get(s"$watchTestDir/watched-update.json.test"), Utils_JSONTest.create_json_test.getBytes(StandardCharsets.UTF_8))
      watcherTB.expectMsgAllClassOf(20.seconds, classOf[JsonReceiverActor.JSON_RECEIVED])
    }
  }

  "processJson" should {
    "log to warn level when json has invalid schema" in {
      Files.write(Paths.get(s"$watchTestDir/watched-invalid.json.test"), Utils_JSONTest.test_json_schema_invalid.getBytes(StandardCharsets.UTF_8))
      watchFileTask.processJson(s"$watchTestDir/watched-invalid.json.test",new File(s"$watchTestDir/watched-invalid.json.test"))
      s"File $watchTestDir/watched-invalid.json.test not processed. Incorrect JSON schema" should beLoggedAt(Level.WARN)
    }
  }

  "interrupt watchservice" should{
    "interrupt thread" in {
      watchThread.interrupt()
    }
  }

} 
Example 83
Source File: TestSetup.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up
package org.apache.iota.fey

import java.io.File
import java.nio.file.Paths

import org.apache.commons.io.FileUtils
import org.scalatest.Tag

object TestSetup {

  private var runSetup = true

  val configTest = getClass.getResource("/test-fey-configuration.conf")

  def setup(): Unit = {
    if(runSetup){
      println("SETTING UP ...")
      createFeyTmpDirectoriesForTest()
      copyTestActorToTmp()
      copyJSONstoTmp()
      runSetup = false
    }
  }

  private def copyTestActorToTmp(): Unit = {
    copyResourceFileToLocal("/fey-test-actor.jar",s"${CONFIG.JAR_REPOSITORY}/fey-test-actor.jar")
  }

  private def copyJSONstoTmp(): Unit = {
    copyResourceFileToLocal("/json/valid-json.json",s"${CONFIG.JSON_REPOSITORY}/valid-json.json.not")
    copyResourceFileToLocal("/json/invalid-json.json",s"${CONFIG.JSON_REPOSITORY}/invalid-json.json.not")
  }

  private def copyResourceFileToLocal(resourcePath: String, destination: String): Unit = {
    val resourceFile = getClass.getResource(resourcePath)
    val dest = new File(destination)
    FileUtils.copyURLToFile(resourceFile, dest)
  }

  private def createFeyTmpDirectoriesForTest(): Unit = {
    var file = new File(s"/tmp/fey/test/checkpoint")
    file.mkdirs()
    file = new File(s"/tmp/fey/test/json")
    file.mkdirs()
    file = new File(s"/tmp/fey/test/json/watchtest")
    file.mkdirs()
    file = new File(s"/tmp/fey/test/jars")
    file.mkdirs()
    file = new File(s"/tmp/fey/test/jars/dynamic")
    file.mkdirs()
  }

}

object SlowTest extends Tag("org.apache.iota.fey.SlowTest") 
Example 84
Source File: BaseAkkaSpec.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up
package org.apache.iota.fey

import java.nio.file.Paths

import akka.actor.{ActorIdentity, ActorRef, ActorSystem, Identify, Props}
import akka.testkit.{EventFilter, TestEvent, TestProbe}
import com.typesafe.config.ConfigFactory
import org.scalatest.BeforeAndAfterAll
import play.api.libs.json._

import scala.concurrent.duration.{DurationInt, FiniteDuration}
import scala.concurrent.Await

class BaseAkkaSpec extends BaseSpec with BeforeAndAfterAll with LoggingTest{

  //Load default configuration for Fey when running tests
  resetCapturedLogs()
  CONFIG.loadUserConfiguration(Paths.get(TestSetup.configTest.toURI()).toFile().getAbsolutePath)
  TestSetup.setup()

  val systemName = "FEY-TEST"
  implicit val system = ActorSystem(systemName, ConfigFactory.parseString("""akka.loggers = ["akka.testkit.TestEventListener"]"""))
  system.eventStream.publish(TestEvent.Mute(EventFilter.debug()))
  system.eventStream.publish(TestEvent.Mute(EventFilter.info()))
  system.eventStream.publish(TestEvent.Mute(EventFilter.warning()))
  system.eventStream.publish(TestEvent.Mute(EventFilter.error()))

  val globalIdentifierName = "GLOBAL-IDENTIFIER"
  val globalIdentifierRef = system.actorOf(Props[IdentifyFeyActors],globalIdentifierName)

  override protected def afterAll(): Unit = {
    //Force reload of GenericActor's jar
    Utils.loadedJars.remove("fey-test-actor.jar")
    Monitor.events.removeAllNodes()
    Await.ready(system.terminate(), 20.seconds)
  }

  implicit class TestProbeOps(probe: TestProbe) {

    def expectActor(path: String, max: FiniteDuration = 3.seconds): ActorRef = {
      probe.within(max) {
        var actor = null: ActorRef
        probe.awaitAssert {
          (probe.system actorSelection path).tell(Identify(path), probe.ref)
          probe.expectMsgPF(100 milliseconds) {
            case ActorIdentity(`path`, Some(ref)) => actor = ref
          }
        }
        actor
      }
    }

    def expectActorInSystem(path: String, lookInSystem: ActorSystem, max: FiniteDuration = 3.seconds): ActorRef = {
      probe.within(max) {
        var actor = null: ActorRef
        probe.awaitAssert {
          (lookInSystem actorSelection path).tell(Identify(path), probe.ref)
          probe.expectMsgPF(100 milliseconds) {
            case ActorIdentity(`path`, Some(ref)) => actor = ref
          }
        }
        actor
      }
    }

    def verifyActorTermination(actor: ActorRef)(implicit system: ActorSystem): Unit = {
      val watcher = TestProbe()
      watcher.watch(actor)
      watcher.expectTerminated(actor)
    }

    def notExpectActor(path: String, max: FiniteDuration = 3.seconds): Unit = {
      probe.within(max) {
        probe.awaitAssert {
          (probe.system actorSelection path).tell(Identify(path), probe.ref)
          probe.expectMsgPF(100 milliseconds) {
            case ActorIdentity(`path`, None) =>
          }
        }
      }
    }

    def isThreadRunning(threadName: String): Boolean = {
      Thread.getAllStackTraces.keySet().toArray
        .map(_.asInstanceOf[Thread])
        .find(_.getName == threadName) match {
        case Some(thread) =>
          if(thread.isAlive) true else false
        case None => false
      }
    }
  }

  //Utils Functions
  def getJSValueFromString(json: String): JsValue = {
    Json.parse(json)
  }

} 
Example 85
Source File: ZincAnalysisParserTest.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.build.zinc.analysis

import java.io.InputStream
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Paths}
import java.util.UUID

import com.github.marschall.memoryfilesystem.MemoryFileSystemBuilder
import com.wixpress.build.maven.Coordinates
import org.specs2.mutable.SpecificationWithJUnit
import org.specs2.specification.Scope

class ZincAnalysisParserTest extends SpecificationWithJUnit {
  "ZincAnalysisParser" should {
    "parse repo with zinc analysis" in new baseCtx {
      private val parser = new ZincAnalysisParser(repoRoot)
      private val coordinatesToAnalyses: Map[Coordinates, List[ZincModuleAnalysis]] = parser.readModules()
      coordinatesToAnalyses must haveLength(greaterThan(0))
      private val analysisList: List[ZincModuleAnalysis] = coordinatesToAnalyses.head._2
      analysisList must haveLength(greaterThan(0))
    }
  }

  abstract class baseCtx extends Scope {
    val fileSystem = MemoryFileSystemBuilder.newLinux().build()
    val repoRoot = fileSystem.getPath("repoRoot")
    Files.createDirectories(repoRoot)
    writeResourceAsFileToPath("/pom.xml", "pom.xml", "java-junit-sample/")
    writeResourceAsFileToPath("/aggregate-pom.xml", "pom.xml", "")
    writeResourceAsFileToPath("/compile.relations", "compile.relations","java-junit-sample/target/analysis/")
    writeResourceAsFileToPath("/test-compile.relations", "test-compile.relations","java-junit-sample/target/analysis/")

    private def writeResourceAsFileToPath(resource: String, fileName: String, path: String) = {
      if (path.nonEmpty)
        Files.createDirectories(repoRoot.resolve(path))
      val stream: InputStream = getClass.getResourceAsStream(s"$resource")
      val compileRelations = scala.io.Source.fromInputStream(stream).mkString
      Files.write(repoRoot.resolve(s"$path$fileName"), compileRelations.getBytes(StandardCharsets.UTF_8))
    }

    def path(withName: String) = repoRoot.resolve(withName)
    def random = UUID.randomUUID().toString
  }
} 
Example 86
Source File: RelativePathSupport.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator.analyze

import java.io.IOException
import java.nio.file.{Path, Paths}

import com.fasterxml.jackson.core.{JsonGenerator, JsonParser, JsonToken}
import com.fasterxml.jackson.databind._
import com.fasterxml.jackson.databind.module.SimpleModule

class RelativePathSupportingModule extends SimpleModule {
  addDeserializer(classOf[Path], new RelativePathSupportingDeserializer)
  addSerializer(classOf[Path], new RelativePathSupportingSerializer)
}

class RelativePathSupportingSerializer extends JsonSerializer[Path] {
  @throws[IOException]
  def serialize(value: Path, gen: JsonGenerator, serializers: SerializerProvider): Unit =
    value match {
      case null => gen.writeNull()
      case _ => gen.writeString(value.toString)
    }
}

class RelativePathSupportingDeserializer extends JsonDeserializer[Path] {
  @throws[IOException]
  def deserialize(p: JsonParser, ctxt: DeserializationContext): Path =
    p.getCurrentToken match {
      case JsonToken.VALUE_NULL => null
      case JsonToken.VALUE_STRING => Paths.get(p.readValueAs(classOf[String]))
      case _ => throw ctxt.wrongTokenException(p, JsonToken.VALUE_STRING, "The value of a java.nio.file.Path must be a string")
    }
} 
Example 87
Source File: JDepsCommandImpl.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator.analyze.jdk

import java.nio.file.{Files, Path, Paths}


class JDepsCommandImpl(repoRoot: Path) extends JDepsCommand {

  override def analyzeClassesDependenciesPerJar(jarPath: String, classPath: List[String]): Option[ClassDependencies] = {
    val fileName = Paths.get(jarPath).getFileName.toString
    val dotDirectory = Files.createTempDirectory("dot")
    val classpath = classPath.mkString(":")
    val cmdArgs = List("jdeps",
      "-dotoutput",
      dotDirectory.toString,
      "-v",
      "-cp",
      classpath,
      jarPath)
    val process = (new ProcessBuilder).directory(repoRoot.toFile).command(cmdArgs:_*)
    process.redirectOutput()
    val process1 = process.start()
    val stream = process1.getInputStream
    process1.waitFor()
    val path = dotDirectory.resolve(fileName + ".dot")
    if (Files.exists(path)) Some(ClassDependencies(path)) else None
  }
} 
Example 88
Source File: CachingEagerEvaluatingDependencyAnalyzer.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator.analyze

import java.nio.file.{Files, Path, Paths}
import java.util
import java.util.concurrent.atomic.AtomicInteger

import com.fasterxml.jackson.annotation.JsonTypeInfo
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.wix.bazel.migrator.model._
import com.wixpress.build.maven.MavenScope
import org.slf4j.LoggerFactory

import scala.collection.JavaConverters._
import scala.collection.parallel.ParMap

//this is needed since currently the transformer isn't thread safe but the dependency analyzer is
class CachingEagerEvaluatingDependencyAnalyzer(sourceModules: Set[SourceModule], dependencyAnalyzer: DependencyAnalyzer, performSourceAnalysis: Boolean) extends DependencyAnalyzer {
  private val log = LoggerFactory.getLogger(getClass)
  private val cachePath = Files.createDirectories(Paths.get("./cache"))
  private val objectMapper = new ObjectMapper()
    .registerModule(DefaultScalaModule)
    .registerModule(new RelativePathSupportingModule)
    .registerModule(new SourceModuleSupportingModule(sourceModules))
    .addMixIn(classOf[Target], classOf[TypeAddingMixin])
    .addMixIn(classOf[CodePurpose], classOf[TypeAddingMixin])
    .addMixIn(classOf[TestType], classOf[TypeAddingMixin])
    .addMixIn(classOf[MavenScope], classOf[TypeAddingMixin])

  private val collectionType = objectMapper.getTypeFactory.constructCollectionType(classOf[util.Collection[Code]], classOf[Code])
  private val clean = performSourceAnalysis

  private def cachePathForSourceModule(m: SourceModule) = {
    cachePath.resolve(m.relativePathFromMonoRepoRoot + ".cache")
  }

  private val size = sourceModules.size
  private val counter = new AtomicInteger()
  private val tenthSize = size / 10

  private def initCachePathForSourceModule(p: Path) = Files.createDirectories(p.getParent)

  private def maybeCodeFromCache(p: Path): Option[List[Code]] = {
    if (clean || !Files.exists(p)) return None
    try {
      val value: util.Collection[Code] = objectMapper.readValue(p.toFile, collectionType)
      val codeList = value.asScala.toList
      Some(codeList)
    } catch {
      case e: Exception =>
        log.warn(s"Error reading $p ,deleting cache file.")
        log.warn(e.getMessage)
        Files.deleteIfExists(p)
        None
    }
  }

  private def retrieveCodeAndCache(m: SourceModule, cachePath: Path): List[Code] = {
    val codeList = dependencyAnalyzer.allCodeForModule(m)
    Files.deleteIfExists(cachePath)
    initCachePathForSourceModule(cachePath)
    Files.createFile(cachePath)
    try {
      objectMapper.writeValue(cachePath.toFile, codeList)
    } catch {
      case e: InterruptedException =>
        log.warn(s"aborting write to file $cachePath")
        Files.deleteIfExists(cachePath)
        throw e
      case e: Exception =>
        log.warn(s"could not write to file $cachePath")
        log.warn(e.getMessage)
    }
    codeList
  }

  private def calculateMapEntryFor(sourceModule: SourceModule) = {
    printProgress()
    val cachePath = cachePathForSourceModule(sourceModule)
    (sourceModule, maybeCodeFromCache(cachePath).getOrElse(retrieveCodeAndCache(sourceModule, cachePath)))
  }

  private def printProgress(): Unit = {
    if (tenthSize > 0) {
      val currentCount = counter.incrementAndGet()
      if (currentCount % tenthSize == 0) {
        log.info(s"DependencyAnalyzer:allCodeForModule:\t ${currentCount / tenthSize * 10}% done")
      }
    }
  }

  private val allCode: ParMap[SourceModule, List[Code]] = sourceModules.par.map(calculateMapEntryFor).toMap

  override def allCodeForModule(module: SourceModule): List[Code] = allCode(module)
}

@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS, property = "__class")
trait TypeAddingMixin 
Example 89
Source File: ZincDependencyAnalyzer.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator.analyze

import java.nio.file.{Path, Paths}

import com.wix.bazel.migrator.model.SourceModule
import com.wix.build.zinc.analysis.{ZincAnalysisParser, ZincCodePath, ZincModuleAnalysis, ZincSourceModule}
import com.wixpress.build.maven.Coordinates

class ZincDepednencyAnalyzer(repoPath: Path) extends DependencyAnalyzer {
  private val modules: Map[Coordinates, List[ZincModuleAnalysis]] = new ZincAnalysisParser(Paths.get(repoPath.toAbsolutePath.toString)).readModules()

  override def allCodeForModule(module: SourceModule): List[Code] = {
    val emptyDependencies = module.dependencies.copy(directDependencies = Set(), allDependencies = Set())
    // TODO: change type of passed module to not include dependencies!!!
    val strippedModule = module.copy(dependencies = emptyDependencies)

    allCodeForStrippedModule(strippedModule)
  }

  private def allCodeForStrippedModule(strippedModule: SourceModule) = {
    modules.getOrElse(strippedModule.coordinates, Nil).map { moduleAnalysis =>
      Code(toCodePath(strippedModule, moduleAnalysis.codePath), toDependencies(moduleAnalysis))
    }
  }

  private def toCodePath(module: SourceModule, v: ZincCodePath) = {
    CodePath(module, v.relativeSourceDirPathFromModuleRoot, v.filePath)
  }

  private def toDependencies( analysis: ZincModuleAnalysis) = {
    // TODO: figure out runtime deps!!!!!!!
    analysis.dependencies.map(d => {
      Dependency(toCodePath(moduleFrom(d.module), d), isCompileDependency = true)
    })
  }

  private def moduleFrom(m: ZincSourceModule) =
    SourceModule(m.moduleName, m.coordinates)
} 
Example 90
Source File: Persister.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator

import java.io.File
import java.nio.file.attribute.BasicFileAttributes
import java.nio.file.{Files, Paths}
import java.time.Instant
import java.time.temporal.TemporalUnit
import java.util

import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper}
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.wix.bazel.migrator.model.{CodePurpose, Package, Target, TestType}
import com.wix.bazel.migrator.utils.{IgnoringIsArchiveDefMixin, IgnoringIsProtoArtifactDefMixin, IgnoringIsWarDefMixin, TypeAddingMixin}
import com.wix.build.maven.analysis.SourceModules
import com.wixpress.build.maven.{Coordinates, MavenScope, Packaging}

import scala.collection.JavaConverters._

object Persister {

  private val transformedFile = new File("dag.bazel")
  private val mavenCache = Paths.get("classpathModules.cache")
  val objectMapper = new ObjectMapper().registerModule(DefaultScalaModule)
    .addMixIn(classOf[Target], classOf[TypeAddingMixin])
    .addMixIn(classOf[CodePurpose], classOf[TypeAddingMixin])
    .addMixIn(classOf[TestType], classOf[TypeAddingMixin])
    .addMixIn(classOf[MavenScope], classOf[TypeAddingMixin])
    .addMixIn(classOf[Packaging], classOf[IgnoringIsArchiveDefMixin])
    .addMixIn(classOf[Packaging], classOf[IgnoringIsWarDefMixin])
    .addMixIn(classOf[Coordinates], classOf[IgnoringIsProtoArtifactDefMixin])
    .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)

  def persistTransformationResults(bazelPackages: Set[Package]): Unit = {
    println("Persisting transformation")
    objectMapper.writeValue(transformedFile, bazelPackages)
  }

  def readTransformationResults(): Set[Package] = {
    val collectionType = objectMapper.getTypeFactory.constructCollectionType(classOf[util.Collection[Package]], classOf[Package])
    val value: util.Collection[Package] = objectMapper.readValue(transformedFile, collectionType)
    val bazelPackages = value.asScala.toSet
    bazelPackages
  }

  def persistMavenClasspathResolution(sourceModules: SourceModules): Unit = {
    println("Persisting maven")
    objectMapper.writeValue(mavenCache.toFile, sourceModules)
  }

  def readTransMavenClasspathResolution(): SourceModules = {
    objectMapper.readValue[SourceModules](mavenCache.toFile, classOf[SourceModules])
  }

  def mavenClasspathResolutionIsUnavailableOrOlderThan(amount: Int, unit: TemporalUnit): Boolean =
    !Files.isReadable(mavenCache) ||
      lastModifiedMavenCache().toInstant.isBefore(Instant.now().minus(amount, unit))

  private def lastModifiedMavenCache() =
    Files.readAttributes(mavenCache, classOf[BasicFileAttributes]).lastModifiedTime()

} 
Example 91
Source File: Chapter10.scala    From Learning-Spark-SQL   with MIT License 5 votes vote down vote up
//Code for Chapter 10 to be executed in Spark shell. For all other code from the BigDL library follow the instructions and commands in the book.
//Note that the code in this Chapter uses Spark 2.1 due to some bugs.

//Execute the following on the command prompt to start the Spark shell
source /Users/aurobindosarkar/Downloads/BigDL-master/scripts/bigdl.sh
Aurobindos-MacBook-Pro-2:spark-2.1.0-bin-hadoop2.7 aurobindosarkar$ bin/spark-shell --properties-file /Users/aurobindosarkar/Downloads/BigDL-master/spark/dist/target/bigdl-0.2.0-SNAPSHOT-spark-2.0.0-scala-2.11.8-mac-dist/conf/spark-bigdl.conf --jars /Users/aurobindosarkar/Downloads/BigDL-master/spark/dist/target/bigdl-0.2.0-SNAPSHOT-spark-2.0.0-scala-2.11.8-mac-dist/lib/bigdl-0.2.0-SNAPSHOT-jar-with-dependencies.jar

import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.dataset.DataSet
import com.intel.analytics.bigdl.dataset.image.{BytesToGreyImg, GreyImgNormalizer, GreyImgToBatch, GreyImgToSample}
import com.intel.analytics.bigdl.nn.{ClassNLLCriterion, Module}
import com.intel.analytics.bigdl.numeric.NumericFloat
import com.intel.analytics.bigdl.optim._
import com.intel.analytics.bigdl.utils.{Engine, LoggerFilter, T, Table}
import com.intel.analytics.bigdl.nn._
import java.nio.ByteBuffer
import java.nio.file.{Files, Path, Paths}
import com.intel.analytics.bigdl.dataset.ByteRecord
import com.intel.analytics.bigdl.utils.File

val trainData = "/Users/aurobindosarkar/Downloads/mnist/train-images-idx3-ubyte"
val trainLabel = "/Users/aurobindosarkar/Downloads/mnist/train-labels-idx1-ubyte"
val validationData = "/Users/aurobindosarkar/Downloads/mnist/t10k-images-idx3-ubyte"
val validationLabel = "/Users/aurobindosarkar/Downloads/mnist/t10k-labels-idx1-ubyte"
val nodeNumber = 1
val coreNumber = 2
Engine.init
val model = Sequential[Float]()
val classNum = 10
val batchSize = 12
model.add(Reshape(Array(1, 28, 28))).add(SpatialConvolution(1, 6, 5, 5)).add(Tanh()).add(SpatialMaxPooling(2, 2, 2, 2)).add(Tanh()).add(SpatialConvolution(6, 12, 5, 5)).add(SpatialMaxPooling(2, 2, 2, 2)).add(Reshape(Array(12 * 4 * 4))).add(Linear(12 * 4 * 4, 100)).add(Tanh()).add(Linear(100, classNum)).add(LogSoftMax())

def load(featureFile: String, labelFile: String): Array[ByteRecord] = {
    val featureBuffer = ByteBuffer.wrap(Files.readAllBytes(Paths.get(featureFile)))
    val labelBuffer = ByteBuffer.wrap(Files.readAllBytes(Paths.get(labelFile)));
    val labelMagicNumber = labelBuffer.getInt();
    require(labelMagicNumber == 2049);
    val featureMagicNumber = featureBuffer.getInt();
    require(featureMagicNumber == 2051);
    val labelCount = labelBuffer.getInt();
    val featureCount = featureBuffer.getInt();
    require(labelCount == featureCount);
    val rowNum = featureBuffer.getInt();
    val colNum = featureBuffer.getInt();
    val result = new Array[ByteRecord](featureCount);
    var i = 0;
    while (i < featureCount) {
      val img = new Array[Byte]((rowNum * colNum));
      var y = 0;
      while (y < rowNum) {
        var x = 0;
        while (x < colNum) {
          img(x + y * colNum) = featureBuffer.get();
          x += 1;
        }
        y += 1;
      }
      result(i) = ByteRecord(img, labelBuffer.get().toFloat + 1.0f);
      i += 1;
    }
    result;
  }
val trainMean = 0.13066047740239506
val trainStd = 0.3081078
val trainSet = DataSet.array(load(trainData, trainLabel), sc) -> BytesToGreyImg(28, 28) -> GreyImgNormalizer(trainMean, trainStd) -> GreyImgToBatch(batchSize)
val optimizer = Optimizer(model = model, dataset = trainSet, criterion = ClassNLLCriterion[Float]())   
val testMean = 0.13251460696903547
val testStd = 0.31048024
val maxEpoch = 2
val validationSet = DataSet.array(load(validationData, validationLabel), sc) -> BytesToGreyImg(28, 28) -> GreyImgNormalizer(testMean, testStd) -> GreyImgToBatch(batchSize)
optimizer.setEndWhen(Trigger.maxEpoch(2))
optimizer.setState(T("learningRate" -> 0.05, "learningRateDecay" -> 0.0))
optimizer.setCheckpoint("/Users/aurobindosarkar/Downloads/mnist/checkpoint", Trigger.severalIteration(500))
optimizer.setValidation(trigger = Trigger.everyEpoch, dataset = validationSet, vMethods = Array(new Top1Accuracy, new Top5Accuracy[Float], new Loss[Float]))
optimizer.optimize()
model.save("/Users/aurobindosarkar/Downloads/mnist/model")
val model = Module.load[Float]("/Users/aurobindosarkar/Downloads/mnist/model")
val partitionNum = 2
val rddData = sc.parallelize(load(validationData, validationLabel), partitionNum)
val transformer = BytesToGreyImg(28, 28) -> GreyImgNormalizer(testMean, testStd) -> GreyImgToSample()
val evaluationSet = transformer(rddData)
val result = model.evaluate(evaluationSet, Array(new Top1Accuracy[Float]), Some(batchSize))
result.foreach(r => println(s"${r._2} is ${r._1}")) 
Example 92
Source File: Dhall.scala    From http4s-jdk-http-client   with Apache License 2.0 5 votes vote down vote up
import cats.effect._
import java.nio.file.{Files, Paths}
import org.dhallj.core.Expr
import org.dhallj.core.converters.JsonConverter
import org.dhallj.imports.syntax._
import org.dhallj.parser.DhallParser
import org.dhallj.yaml.YamlConverter
import org.http4s.client.Client
import org.http4s.client.jdkhttpclient.JdkHttpClient
import sbt.{IO => _, _}
import scala.concurrent.ExecutionContext
import upickle.default.{ReadWriter, macroRW}

object Dhall {

  lazy val convertDhall = taskKey[Unit]("Generate YAML/JSON from Dhall.")

  private lazy val http = {
    implicit val cs: ContextShift[IO] = IO.contextShift(ExecutionContext.global)
    JdkHttpClient.simple[IO].unsafeRunSync()
  }

  private def loadDhall(expr: String): Expr = {
    implicit val c: Client[IO] = http
    DhallParser
      .parse(expr)
      .normalize()
      .resolveImports[IO]
      .unsafeRunSync()
      .normalize()
  }

  val convertDhallTask = convertDhall := {
    val baseDir = (Keys.baseDirectory in LocalRootProject).value.absolutePath
    def convertYaml(from: String, to: String): Unit = {
      val dhall = loadDhall(s"$baseDir/dhall/$from.dhall")
      val yaml = YamlConverter.toYamlString(dhall)
      Files.writeString(Paths.get(s"$baseDir/$to"), yaml)
    }
    List("ci", "release", "dhall").foreach { file =>
      convertYaml(file, s".github/workflows/$file.yml")
    }
    convertYaml("mergify", s".mergify.yml")
  }

  case class ScalaVersions(default: String, all: List[String])
  object ScalaVersions { implicit val rw: ReadWriter[ScalaVersions] = macroRW }

  val scalaVersions = settingKey[ScalaVersions]("Read the Scala versions via Dhall")

  val scalaVersionsImpl = scalaVersions := {
    val baseDir = (Keys.baseDirectory in LocalRootProject).value.absolutePath
    val dhall = loadDhall(s"$baseDir/dhall/scalaVersions.dhall")
    val json = JsonConverter.toCompactString(dhall)
    upickle.default.read[ScalaVersions](json)
  }

} 
Example 93
Source File: NeuralNetwork.scala    From Scala-Machine-Learning-Projects   with MIT License 5 votes vote down vote up
package Yelp.Trainer

import org.deeplearning4j.nn.conf.MultiLayerConfiguration
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork
import org.nd4j.linalg.factory.Nd4j
import java.io.File
import org.apache.commons.io.FileUtils
import java.io.{DataInputStream, DataOutputStream, FileInputStream}
import java.nio.file.{Files, Paths}

object NeuralNetwork {  
  def loadNN(NNconfig: String, NNparams: String) = {
    // get neural network config
    val confFromJson: MultiLayerConfiguration = MultiLayerConfiguration.fromJson(FileUtils.readFileToString(new File(NNconfig)))    
     // get neural network parameters 
    val dis: DataInputStream = new DataInputStream(new FileInputStream(NNparams))
    val newParams = Nd4j.read(dis)    
     // creating network object
    val savedNetwork: MultiLayerNetwork = new MultiLayerNetwork(confFromJson)
    savedNetwork.init()
    savedNetwork.setParameters(newParams)    
    savedNetwork
  }
  
  def saveNN(model: MultiLayerNetwork, NNconfig: String, NNparams: String) = {
    // save neural network config
    FileUtils.write(new File(NNconfig), model.getLayerWiseConfigurations().toJson())     
    // save neural network parms
    val dos: DataOutputStream = new DataOutputStream(Files.newOutputStream(Paths.get(NNparams)))
    Nd4j.write(model.params(), dos)
  }  
} 
Example 94
Source File: XSDToSchemaSuite.scala    From spark-xml   with Apache License 2.0 5 votes vote down vote up
package com.databricks.spark.xml.util

import java.nio.file.Paths

import org.apache.spark.sql.types.{ArrayType, StructField, StructType, StringType}
import org.scalatest.funsuite.AnyFunSuite

class XSDToSchemaSuite extends AnyFunSuite {

  test("Basic parsing") {
    val parsedSchema = XSDToSchema.read(Paths.get("src/test/resources/basket.xsd"))
    val expectedSchema = StructType(Array(
      StructField("basket", StructType(Array(
        StructField("entry", ArrayType(
          StructType(Array(
            StructField("key", StringType),
            StructField("value", StringType)
          )))
        ))
      )))
    )
    assert(expectedSchema === parsedSchema)
  }

} 
Example 95
Source File: MemoryFootprint.scala    From collection-strawman   with Apache License 2.0 5 votes vote down vote up
package bench

import strawman.collection.immutable.{LazyList, List, Range, NumericRange, Vector}
import strawman.collection.mutable.{ArrayBuffer, ListBuffer}

import scala.{Any, AnyRef, App, Int, Long, Seq, StringContext}
import scala.Predef.{ArrowAssoc, println, intWrapper}
import scala.compat.Platform
import java.lang.Runtime
import java.nio.file.{Files, Paths}


object MemoryFootprint extends App {

  val reportPath = Paths.get(args(0))

  val sizes = scala.List(8, 64, 512, 4096, 32768, 262144, 2097152)

  val runtime = Runtime.getRuntime
  val obj: AnyRef = null
  var placeholder: Any = _

  def benchmark[A](gen: Int => A): scala.List[(Int, Long)] = (
    // We run 5 iterations and pick the last result only
    for (_ <- scala.Range(0, 5)) yield {
      for (size <- sizes) yield {
        placeholder = null
        Platform.collectGarbage()
        val memBefore = runtime.totalMemory() - runtime.freeMemory()
        placeholder = gen(size)
        Platform.collectGarbage()
        val memAfter = runtime.totalMemory() - runtime.freeMemory()
        size -> (memAfter - memBefore)
      }
    }
  ).last

  val memories =
    scala.Predef.Map(
      "scala.List"    -> benchmark(scala.List.fill(_)(obj)),
      "List"          -> benchmark(List.fill(_)(obj)),
      "LazyList"      -> benchmark(LazyList.fill(_)(obj)),
      "scala.Vector"  -> benchmark(scala.Vector.fill(_)(obj)),
      "Vector"        -> benchmark(Vector.fill(_)(obj)),
      "scala.HashSet" -> benchmark(n => scala.collection.immutable.HashSet((1 to n).map(_.toString): _*)),
      "HashSet"       -> benchmark(n => strawman.collection.immutable.HashSet((1 to n).map(_.toString): _*)),
      "scala.TreeSet" -> benchmark(n => scala.collection.immutable.TreeSet((1 to n).map(_.toString): _*)),
      "TreeSet"       -> benchmark(n => strawman.collection.immutable.TreeSet((1 to n).map(_.toString): _*)),
      "ArrayBuffer"   -> benchmark(ArrayBuffer.fill(_)(obj)),
      "ListBuffer"    -> benchmark(ListBuffer.fill(_)(obj)),
      "ImmutableArray" -> benchmark(strawman.collection.immutable.ImmutableArray.fill(_)(obj)),
      "ImmutableArray (primitive)" -> benchmark(strawman.collection.immutable.ImmutableArray.fill(_)(123)),
      "Range"         -> benchmark(Range(0, _)),
      "NumericRange"  -> benchmark(NumericRange(0, _, 1))
    )

  // We use a format similar to the one used by JMH so that
  // our charts can be generated in the same way
  import jawn.ast._
  val report =
    JArray.fromSeq(
      memories.flatMap { case (name, values) =>
        values.map { case (size, value) =>
          JObject.fromSeq(Seq(
            "benchmark" -> JString(s"$name.memory-footprint"),
            "params" -> JObject.fromSeq(Seq(
              "size" -> JString(size.toString)
            )),
            "primaryMetric" -> JObject.fromSeq(Seq(
              "score" -> JNum(value),
              "scoreConfidence" -> JArray.fromSeq(Seq(JNum(value), JNum(value)))
            ))
          ))
        }
      }.to[Seq]
    )
  Files.write(reportPath, FastRenderer.render(report).getBytes)

} 
Example 96
Source File: FileUploadServlet.scala    From udash-core   with Apache License 2.0 5 votes vote down vote up
package io.udash.rpc.utils

import java.io.InputStream
import java.nio.file.Paths
import javax.servlet.http.{HttpServlet, HttpServletRequest, HttpServletResponse}
import com.avsystem.commons._


  protected def handleFile(name: String, content: InputStream): Unit

  override protected def doPost(request: HttpServletRequest, response: HttpServletResponse): Unit = {
    request.getParts.asScala
      .filter(part => fileFields.contains(part.getName))
      .foreach(filePart => {
        val fileName = Paths.get(filePart.getSubmittedFileName).getFileName.toString
        val fileContent = filePart.getInputStream
        handleFile(fileName, fileContent)
        fileContent.close()
      })
  }
} 
Example 97
Source File: FileUtils.scala    From skeuomorph   with Apache License 2.0 5 votes vote down vote up
package higherkindness.skeuomorph

import java.io.{File, FileOutputStream, InputStream}
import java.nio.file.{Files, Paths, StandardOpenOption}

import cats.effect.{Resource, Sync}

object FileUtils {
  def fileHandle[F[_]: Sync](name: String): Resource[F, File] =
    Resource.make(
      Sync[F].delay(new File(name))
    )(file => Sync[F].delay(file.deleteOnExit()))

  def fileOutputStream[F[_]: Sync](file: File): Resource[F, FileOutputStream] =
    Resource.make(
      Sync[F].delay(new FileOutputStream(file))
    )(fos => Sync[F].delay(fos.close()))

  def fileInputStream[F[_]: Sync](name: String): Resource[F, InputStream] =
    Resource.make(
      Sync[F].delay(Files.newInputStream(Paths.get(name), StandardOpenOption.DELETE_ON_CLOSE))
    )(is => Sync[F].delay(is.close()))
} 
Example 98
Source File: ArtifactWithFileStorageActivationStore.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.database

import java.nio.file.Paths

import akka.actor.ActorSystem
import akka.stream._
import org.apache.openwhisk.common.{Logging, TransactionId}
import org.apache.openwhisk.core.ConfigKeys
import org.apache.openwhisk.core.entity.{DocInfo, _}
import pureconfig._
import pureconfig.generic.auto._
import spray.json._

import scala.concurrent.Future

case class ArtifactWithFileStorageActivationStoreConfig(logFilePrefix: String,
                                                        logPath: String,
                                                        userIdField: String,
                                                        writeResultToFile: Boolean)

class ArtifactWithFileStorageActivationStore(
  actorSystem: ActorSystem,
  actorMaterializer: ActorMaterializer,
  logging: Logging,
  config: ArtifactWithFileStorageActivationStoreConfig =
    loadConfigOrThrow[ArtifactWithFileStorageActivationStoreConfig](ConfigKeys.activationStoreWithFileStorage))
    extends ArtifactActivationStore(actorSystem, actorMaterializer, logging) {

  private val activationFileStorage =
    new ActivationFileStorage(
      config.logFilePrefix,
      Paths.get(config.logPath),
      config.writeResultToFile,
      actorMaterializer,
      logging)

  def getLogFile = activationFileStorage.getLogFile

  override def store(activation: WhiskActivation, context: UserContext)(
    implicit transid: TransactionId,
    notifier: Option[CacheChangeNotification]): Future[DocInfo] = {
    val additionalFieldsForLogs =
      Map(config.userIdField -> context.user.namespace.uuid.toJson, "namespace" -> context.user.namespace.name.toJson)
    val additionalFieldsForActivation = Map(config.userIdField -> context.user.namespace.uuid.toJson)

    activationFileStorage.activationToFileExtended(
      activation,
      context,
      additionalFieldsForLogs,
      additionalFieldsForActivation)
    super.store(activation, context)
  }

}

object ArtifactWithFileStorageActivationStoreProvider extends ActivationStoreProvider {
  override def instance(actorSystem: ActorSystem, actorMaterializer: ActorMaterializer, logging: Logging) =
    new ArtifactWithFileStorageActivationStore(actorSystem, actorMaterializer, logging)
} 
Example 99
Source File: DockerClientWithFileAccess.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.containerpool.docker

import java.io.File
import java.nio.file.Paths

import akka.actor.ActorSystem
import akka.stream.alpakka.file.scaladsl.FileTailSource
import akka.stream.scaladsl.{FileIO, Source => AkkaSource}
import akka.util.ByteString

import scala.concurrent.ExecutionContext
import scala.concurrent.Future
import scala.concurrent.blocking
import spray.json.DefaultJsonProtocol._
import spray.json._
import org.apache.openwhisk.common.Logging
import org.apache.openwhisk.common.TransactionId
import org.apache.openwhisk.core.containerpool.ContainerId
import org.apache.openwhisk.core.containerpool.ContainerAddress

import scala.io.Source
import scala.concurrent.duration.FiniteDuration

class DockerClientWithFileAccess(dockerHost: Option[String] = None,
                                 containersDirectory: File = Paths.get("containers").toFile)(
  executionContext: ExecutionContext)(implicit log: Logging, as: ActorSystem)
    extends DockerClient(dockerHost)(executionContext)
    with DockerApiWithFileAccess {

  implicit private val ec = executionContext

  
  def rawContainerLogs(containerId: ContainerId,
                       fromPos: Long,
                       pollInterval: Option[FiniteDuration]): AkkaSource[ByteString, Any]
} 
Example 100
Source File: MleapSupportSpec.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.runtime

import java.net.URI
import java.nio.file.{Files, Paths}

import ml.combust.mleap.core.feature.StringIndexerModel
import ml.combust.mleap.core.types.NodeShape
import ml.combust.mleap.runtime.transformer.feature.StringIndexer
import MleapSupport._

import org.scalatest.FunSpec

class MleapSupportSpec extends FunSpec {
  private val testDir = Files.createTempDirectory("MleapSupportSpec")

  private val stringIndexer = StringIndexer(shape = NodeShape().
    withStandardInput("feature").
    withStandardOutput("feature_index"),
    model = StringIndexerModel(Seq("label1", "label2")))

  describe("URIBundleFileOps") {
    it("can save/load a bundle using a URI") {
      val testFile = Paths.get(testDir.toString, "URIBundleFileOps.zip")
      testFile.toFile.deleteOnExit()

      val uri = new URI(s"jar:file://$testFile")
      stringIndexer.writeBundle.save(uri)

      val loadedStringIndexer = uri.loadMleapBundle().get.root

      assert(stringIndexer == loadedStringIndexer)
    }
  }
} 
Example 101
Source File: BundleWriter.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.bundle

import java.net.URI
import java.nio.file.{Files, Paths}

import ml.combust.bundle.dsl.Bundle
import ml.combust.bundle.fs.BundleFileSystem
import ml.combust.bundle.serializer.{BundleSerializer, SerializationFormat}

import scala.util.Try
import resource._


case class BundleWriter[Context <: HasBundleRegistry,
Transformer <: AnyRef](root: Transformer,
                       name: Option[String] = None,
                       format: SerializationFormat = SerializationFormat.Json,
                       meta: Option[ml.bundle.Attributes] = None) {
  def name(value: String): BundleWriter[Context, Transformer] = copy(name = Some(value))
  def format(value: SerializationFormat): BundleWriter[Context, Transformer] = copy(format = value)
  def meta(value: ml.bundle.Attributes): BundleWriter[Context, Transformer] = copy(meta = Some(value))

  def save(file: BundleFile)
          (implicit context: Context): Try[Bundle[Transformer]] = {
    val n = name.getOrElse {
      context.bundleRegistry.opForObj[Any, Any, Any](root).name(root)
    }

    BundleSerializer(context, file).write(Bundle(name = n,
      format = format,
      root = root,
      meta = meta))
  }

  def save(uri: URI)
          (implicit context: Context): Try[Bundle[Transformer]] = uri.getScheme match {
    case "jar" | "file" =>
        (for (bf <- managed(BundleFile(uri))) yield {
          save(bf).get
        }).tried
    case _ =>
    val tmpDir = Files.createTempDirectory("bundle")
    val tmp = Paths.get(tmpDir.toString, "tmp.zip")

    (for (bf <- managed(BundleFile(tmp.toFile))) yield {
      save(bf).get
    }).tried.map {
      r =>
        context.bundleRegistry.fileSystemForUri(uri).save(uri, tmp.toFile)
        r
    }
  }
} 
Example 102
Source File: HadoopBundleFileSystem.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.bundle.hdfs

import java.io.File
import java.net.URI
import java.nio.file.{Files, Paths}

import com.typesafe.config.Config
import ml.combust.bundle.fs.BundleFileSystem
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}

import scala.util.Try
import scala.collection.JavaConverters._

object HadoopBundleFileSystem {
  lazy val defaultSchemes: Seq[String] = Seq("hdfs")

  def createHadoopConfiguration(config: Config): Configuration = {
    val options: Map[String, String] = if(config.hasPath("options")) {
      config.getConfig("options").entrySet().asScala.map {
        entry => (entry.getKey, entry.getValue.unwrapped().toString)
      }.toMap
    } else {
      Map()
    }

    val c = new Configuration()
    for ((key, value) <- options) { c.set(key, value) }
    c
  }

  def createSchemes(config: Config): Seq[String] = if (config.hasPath("schemes")) {
    config.getStringList("schemes").asScala
  } else { Seq("hdfs") }
}

class HadoopBundleFileSystem(fs: FileSystem,
                             override val schemes: Seq[String] = HadoopBundleFileSystem.defaultSchemes) extends BundleFileSystem {
  def this(config: Config) = {
    this(FileSystem.get(HadoopBundleFileSystem.createHadoopConfiguration(config)),
      HadoopBundleFileSystem.createSchemes(config))
  }

  override def load(uri: URI): Try[File] = Try {
    val tmpDir = Files.createTempDirectory("hdfs-bundle")
    val tmpFile = Paths.get(tmpDir.toString, "bundle.zip")
    fs.copyToLocalFile(new Path(uri.toString), new Path(tmpFile.toString))
    tmpFile.toFile
  }

  override def save(uri: URI, localFile: File): Unit = {
    fs.copyFromLocalFile(new Path(localFile.toString), new Path(uri.toString))
  }
} 
Example 103
Source File: HadoopBundleFileSystemSpec.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.bundle.hdfs

import java.net.URI
import java.nio.file.{Files, Paths}

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.scalatest.FunSpec

class HadoopBundleFileSystemSpec extends FunSpec {
  private val fs = FileSystem.get(new Configuration())
  private val bundleFs = new HadoopBundleFileSystem(fs)

  describe("scheme") {
    it("returns hdfs") {
      assert(bundleFs.schemes == Seq("hdfs"))
    }
  }

  describe("load") {
    it("loads a file from hadoop and saves to a local file") {
      val testFile = Files.createTempFile("HadoopBundleFileSystemSpec", ".txt")
      Files.write(testFile.toAbsolutePath, "HELLO".getBytes())

      val loadedFile = bundleFs.load(testFile.toUri).get
      val contents = new String(Files.readAllBytes(loadedFile.toPath))

      assert(contents == "HELLO")
    }
  }

  describe("save") {
    it("saves local file to HDFS") {
      val testFile = Files.createTempFile("HadoopBundleFileSystemSpec", ".txt")
      Files.write(testFile.toAbsolutePath, "HELLO".getBytes())

      val tmpDir = Files.createTempDirectory("HadoopBundleFileSystemSpec")
      val tmpFile = new URI(s"file://$tmpDir/test.txt")

      bundleFs.save(tmpFile, testFile.toFile)
      val contents = new String(Files.readAllBytes(Paths.get(tmpFile)))

      assert(contents == "HELLO")
    }
  }
} 
Example 104
Source File: ModelLoader.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.springboot

import TypeConverters._
import javax.annotation.PostConstruct
import org.slf4j.LoggerFactory
import ml.combust.mleap.pb
import org.springframework.beans.factory.annotation.{Autowired, Value}
import org.springframework.stereotype.Component

import scala.collection.JavaConverters._
import java.nio.file.{Files, Path, Paths}

import ml.combust.mleap.executor.MleapExecutor
import scalapb.json4s.Parser

@Component
class ModelLoader(@Autowired val mleapExecutor: MleapExecutor,
                  @Autowired val jsonParser: Parser) {

  @Value("${mleap.model.config:#{null}}")
  private val modelConfigPath: String = null

  private val logger = LoggerFactory.getLogger(classOf[ModelLoader])
  private val timeout = 60000

  @PostConstruct
  def loadModel(): Unit = {
    if (modelConfigPath == null) {
      logger.info("Skipping loading model on startup")
      return
    }

    val configPath = Paths.get(modelConfigPath)

    if (!Files.exists(configPath)) {
      logger.warn(s"Model path does not exist: $modelConfigPath")
      return
    }

    val configFiles: List[Path] = if (Files.isDirectory(configPath)) {
      Files.list(configPath).iterator().asScala.toList
    } else {
      List(configPath)
    }

    for (configFile <- configFiles) {
      logger.info(s"Loading model from ${configFile.toString}")

      val request = new String(Files.readAllBytes(configFile))

      mleapExecutor.loadModel(jsonParser.fromJsonString[pb.LoadModelRequest](request))(timeout)
    }
  }
} 
Example 105
Source File: IssuesReportSerializerSpec.scala    From codacy-analysis-cli   with GNU Affero General Public License v3.0 5 votes vote down vote up
package core.serializer

import java.nio.file.Paths

import com.codacy.analysis.core.model.{FullLocation, Issue, IssuesAnalysis, ToolResults}
import com.codacy.analysis.core.serializer.IssuesReportSerializer
import com.codacy.plugins.api.results.Pattern
import org.specs2.mutable.Specification
import org.specs2.control.NoLanguageFeatures

class IssuesReportSerializerSpec extends Specification with NoLanguageFeatures {
  "An IssuesAnalysis" should {
    val filePath = Paths.get("example")
    val toolName = "exampleTool"
    val patternId = "test"
    val patternMessage = "example message"
    val lineNumber = 1

    "be converted to json" in {
      val issuesReporterAsJson = IssuesReportSerializer.toJsonString(
        Set(ToolResults(
          toolName,
          IssuesAnalysis.Success(Set(IssuesAnalysis.FileResults(
            filePath,
            Set(Issue(
              Pattern.Id(patternId),
              filePath,
              Issue.Message(patternMessage),
              com.codacy.plugins.api.results.Result.Level.Info,
              None,
              FullLocation(lineNumber, lineNumber)))))))))

      val expectedJSON =
        s"""[{"tool":"$toolName","issues":{"Success":{"results":[{"filename":"${filePath.toString}","results":[{"Issue":{"patternId":{"value":"$patternId"},"filename":"${filePath.toString}","message":{"text":"$patternMessage"},"level":"Info","location":{"FullLocation":{"line":$lineNumber,"column":$lineNumber}}}}]}]}}}]"""

      expectedJSON mustEqual issuesReporterAsJson
    }

    "have category in generated json" in {
      val issuesReporterAsJson = IssuesReportSerializer.toJsonString(
        Set(ToolResults(
          toolName,
          IssuesAnalysis.Success(Set(IssuesAnalysis.FileResults(
            filePath,
            Set(Issue(
              Pattern.Id(patternId),
              filePath,
              Issue.Message(patternMessage),
              com.codacy.plugins.api.results.Result.Level.Info,
              Some(com.codacy.plugins.api.results.Pattern.Category.UnusedCode),
              FullLocation(lineNumber, lineNumber)))))))))

      val expectedJSON =
        s"""[{"tool":"$toolName","issues":{"Success":{"results":[{"filename":"${filePath.toString}","results":[{"Issue":{"patternId":{"value":"$patternId"},"filename":"${filePath.toString}","message":{"text":"$patternMessage"},"level":"Info","category":"UnusedCode","location":{"FullLocation":{"line":$lineNumber,"column":$lineNumber}}}}]}]}}}]"""

      expectedJSON mustEqual issuesReporterAsJson
    }

    "return failure json" in {
      val errorMsg = "This is a failure"
      val failureJson =
        IssuesReportSerializer.toJsonString(Set(ToolResults(toolName, IssuesAnalysis.Failure(errorMsg))))

      val expectedJSON = s"""[{"tool":"$toolName","issues":{"Failure":{"message":"$errorMsg"}}}]"""

      expectedJSON mustEqual failureJson
    }
  }
} 
Example 106
Source File: MetricsTool.scala    From codacy-analysis-cli   with GNU Affero General Public License v3.0 5 votes vote down vote up
package com.codacy.analysis.core.tools

import java.nio.file.Paths

import better.files.File
import com.codacy.analysis.core.model.FileMetrics
import com.codacy.plugins.api
import com.codacy.plugins.api.Source
import com.codacy.plugins.api.languages.Language
import com.codacy.plugins.api.metrics.MetricsTool.CodacyConfiguration
import com.codacy.plugins.metrics.traits
import com.codacy.plugins.metrics.traits.{MetricsRequest, MetricsRunner}
import com.codacy.plugins.runners.{BinaryDockerRunner, DockerRunner}
import com.codacy.plugins.utils.PluginHelper
import org.log4s.getLogger

import scala.concurrent.duration.Duration
import scala.util.Try

class MetricsTool(private val metricsTool: traits.MetricsTool, val languageToRun: Language) extends ITool {
  override def name: String = "metrics"

  override def supportedLanguages: Set[Language] = metricsTool.languages.to[Set]

  def run(directory: File,
          files: Option[Set[Source.File]],
          timeout: Option[Duration] = Option.empty[Duration]): Try[List[FileMetrics]] = {
    val request = MetricsRequest(directory.pathAsString)

    val dockerRunner = new BinaryDockerRunner[api.metrics.FileMetrics](metricsTool)
    val runner = new MetricsRunner(metricsTool, dockerRunner)

    val configuration = CodacyConfiguration(files, Some(languageToRun), None)

    val toolFileMetrics =
      runner.run(request, configuration, timeout.getOrElse(DockerRunner.defaultRunTimeout), None)

    toolFileMetrics.map {
      _.collect {
        case fileMetrics if unignoredFile(fileMetrics, files) =>
          FileMetrics(
            filename = Paths.get(fileMetrics.filename),
            complexity = fileMetrics.complexity,
            loc = fileMetrics.loc,
            cloc = fileMetrics.cloc,
            nrMethods = fileMetrics.nrMethods,
            nrClasses = fileMetrics.nrClasses,
            lineComplexities = fileMetrics.lineComplexities)
      }
    }
  }

  def unignoredFile(metrics: api.metrics.FileMetrics, files: Option[Set[Source.File]]): Boolean = {
    files.forall(_.exists(_.path == metrics.filename))
  }
}

object MetricsToolCollector {

  private val logger: org.log4s.Logger = getLogger

  private val availableTools = PluginHelper.dockerMetricsPlugins

  def fromLanguages(languages: Set[Language]): Set[MetricsTool] = {
    languages.flatMap { lang =>
      val collectedTools = availableTools.collect {
        case tool if tool.languages.contains(lang) =>
          new MetricsTool(tool, lang)
      }
      if (collectedTools.isEmpty) {
        logger.info(s"No metrics tools found for language ${lang.name}")
      }
      collectedTools
    }
  }

} 
Example 107
Source File: TestUtils.scala    From codacy-analysis-cli   with GNU Affero General Public License v3.0 5 votes vote down vote up
package com.codacy.analysis.core.utils

import java.nio.file.attribute.PosixFilePermission
import java.nio.file.{Path, Paths}

import better.files.File
import com.codacy.plugins.api.results
import io.circe.Decoder
import org.specs2.concurrent.ExecutionEnv
import org.specs2.matcher.MatchResult

import scala.sys.process.Process

object TestUtils {

  implicit val categoryDecoder: Decoder[results.Pattern.Category.Value] =
    Decoder.decodeEnumeration(results.Pattern.Category)

  implicit val levelDecoder: Decoder[results.Result.Level.Value] =
    Decoder.decodeEnumeration(results.Result.Level)
  implicit val fileDecoder: Decoder[Path] = Decoder[String].map(Paths.get(_))
  implicit val executionEnv: ExecutionEnv = ExecutionEnv.fromGlobalExecutionContext

  def withClonedRepo[T](gitUrl: String, commitUUid: String)(block: (File, File) => MatchResult[T]): MatchResult[T] =
    (for {
      directory <- File.temporaryDirectory()
      file <- File.temporaryFile()
    } yield {
      directory
        .addPermission(PosixFilePermission.OWNER_READ)
        .addPermission(PosixFilePermission.GROUP_READ)
        .addPermission(PosixFilePermission.OTHERS_READ)
        .addPermission(PosixFilePermission.OWNER_EXECUTE)
        .addPermission(PosixFilePermission.GROUP_EXECUTE)
        .addPermission(PosixFilePermission.OTHERS_EXECUTE)
      Process(Seq("git", "clone", gitUrl, directory.pathAsString)).!
      Process(Seq("git", "reset", "--hard", commitUUid), directory.toJava).!
      block(file, directory)
    }).get()

  def withTemporaryGitRepo[T](fn: File => MatchResult[T]): MatchResult[T] = {
    (for {
      temporaryDirectory <- File.temporaryDirectory()
    } yield {
      Process(Seq("git", "init"), temporaryDirectory.toJava).!
      Process(Seq("git", "commit", "--allow-empty", "-m", "initial commit"), temporaryDirectory.toJava).!
      fn(temporaryDirectory)
    }).get

  }
} 
Example 108
Source File: MetricsToolSpec.scala    From codacy-analysis-cli   with GNU Affero General Public License v3.0 5 votes vote down vote up
package com.codacy.analysis.core.tools

import java.nio.file.Paths

import com.codacy.analysis.core.model.FileMetrics
import com.codacy.analysis.core.utils.TestUtils._
import com.codacy.plugins.api.Source
import com.codacy.plugins.api.languages.{Language, Languages}
import com.codacy.plugins.metrics.docker.Cloc
import org.specs2.control.NoLanguageFeatures
import org.specs2.mutable.Specification

import scala.util.Success

class MetricsToolSpec extends Specification with NoLanguageFeatures {

  val jsTest2Metrics =
    FileMetrics(Paths.get("test2.js"), None, Some(25), Some(0), None, None, Set())
  val jsTestMetrics = FileMetrics(Paths.get("test.js"), None, Some(60), Some(0), None, None, Set())

  "MetricsTool" should {
    "analyse metrics on a project" in {
      val commitUuid = "625e19cd9be4898939a7c40dbeb2b17e40df9d54"
      withClonedRepo("git://github.com/qamine-test/duplication-delta.git", commitUuid) { (_, directory) =>
        val testProjectFileMetrics = List(jsTest2Metrics, jsTestMetrics)

        val metricsTool = new MetricsTool(Cloc, Languages.Javascript)

        val result = metricsTool.run(directory, None)

        result must beSuccessfulTry
        result must beLike {
          case Success(metricsResults) =>
            metricsResults must haveSize(testProjectFileMetrics.size)
            metricsResults must containTheSameElementsAs(testProjectFileMetrics)
        }
      }
    }

    "analyse metrics on a project, ignoring a file" in {
      val commitUuid = "625e19cd9be4898939a7c40dbeb2b17e40df9d54"
      withClonedRepo("git://github.com/qamine-test/duplication-delta.git", commitUuid) { (_, directory) =>
        val testProjectFileMetrics = List(jsTestMetrics)

        val metricsTool = new MetricsTool(Cloc, Languages.Javascript)

        val result = metricsTool.run(directory, Some(Set(Source.File("test.js"))))

        result must beSuccessfulTry
        result must beLike {
          case Success(metricsResults) =>
            metricsResults must haveSize(testProjectFileMetrics.size)
            metricsResults must containTheSameElementsAs(testProjectFileMetrics)
        }
      }
    }
  }

  "MetricsToolCollector" should {
    val languagesWithTools: Set[Language] = Set(Languages.Kotlin, Languages.Go, Languages.LESS)
    s"detect the metrics tools for the given languages: ${languagesWithTools.mkString(", ")}" in {

      val tools = MetricsToolCollector.fromLanguages(languagesWithTools)

      tools must haveSize(4)

      tools.map(_.languageToRun) must containTheSameElementsAs(languagesWithTools.to[Seq])
    }
  }
} 
Example 109
Source File: JGitSystemReader.scala    From sbt-dynver   with Apache License 2.0 5 votes vote down vote up
package sbtdynver

import java.io.{ File, IOException }
import java.net.{ InetAddress, UnknownHostException }
import java.nio.file.{ Files, InvalidPathException, Path, Paths }

import org.eclipse.jgit.internal.JGitText
import org.eclipse.jgit.lib.{ Config, Constants }
import org.eclipse.jgit.storage.file.FileBasedConfig
import org.eclipse.jgit.util.{ FS, StringUtils, SystemReader }
import org.slf4j.LoggerFactory

// Copy of org.eclipse.jgit.util.SystemReader.Default with:
// * calls to Files.createDirectories guarded by if !Files.isDirectory
//   necessary because my ~/.config is a symlink to a directory
//   which Files.createDirectories isn't happy with
object JGitSystemReader extends SystemReader {
  private val LOG = LoggerFactory.getLogger(getClass)

  lazy val init: Unit = SystemReader.setInstance(this)

  override lazy val getHostname = {
    try InetAddress.getLocalHost.getCanonicalHostName
    catch { case _: UnknownHostException => "localhost" }
  }.ensuring(_ != null)

  override def getenv(variable: String): String = System.getenv(variable)
  override def getProperty(key: String): String = System.getProperty(key)
  override def getCurrentTime: Long             = System.currentTimeMillis
  override def getTimezone(when: Long): Int     = getTimeZone.getOffset(when) / (60 * 1000)

  override def openUserConfig(parent: Config, fs: FS) =
    new FileBasedConfig(parent, new File(fs.userHome, ".gitconfig"), fs)

  override def openSystemConfig(parent: Config, fs: FS): FileBasedConfig = {
    if (StringUtils.isEmptyOrNull(getenv(Constants.GIT_CONFIG_NOSYSTEM_KEY))) {
      val configFile = fs.getGitSystemConfig
      if (configFile != null) return new FileBasedConfig(parent, configFile, fs)
    }
    new FileBasedConfig(parent, null, fs) {
      override def load(): Unit = () // do not load
      override def isOutdated   = false // regular class would bomb here
    }
  }

  override def openJGitConfig(parent: Config, fs: FS): FileBasedConfig = {
    val xdgPath = getXDGConfigHome(fs)
    if (xdgPath != null) {
      var configPath: Path = null
      try {
        configPath = xdgPath.resolve("jgit")
        if (!Files.isDirectory(configPath))
          Files.createDirectories(configPath)
        configPath = configPath.resolve(Constants.CONFIG)
        return new FileBasedConfig(parent, configPath.toFile, fs)
      } catch {
        case e: IOException =>
          LOG.error(JGitText.get.createJGitConfigFailed, configPath: Any, e)
      }
    }
    new FileBasedConfig(parent, new File(fs.userHome, ".jgitconfig"), fs)
  }

  private def getXDGConfigHome(fs: FS): Path = {
    var configHomePath = getenv(Constants.XDG_CONFIG_HOME)
    if (StringUtils.isEmptyOrNull(configHomePath))
      configHomePath = new File(fs.userHome, ".config").getAbsolutePath
    try {
      val xdgHomePath = Paths.get(configHomePath)
      if (!Files.isDirectory(xdgHomePath))
        Files.createDirectories(xdgHomePath)
      xdgHomePath
    } catch {
      case e @ (_: IOException | _: InvalidPathException) =>
        LOG.error(JGitText.get.createXDGConfigHomeFailed, configHomePath: Any, e)
        null
    }
  }
} 
Example 110
Source File: ProcessJobRunnerSrv.scala    From Cortex   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.thp.cortex.services

import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Path, Paths}

import akka.actor.ActorSystem
import javax.inject.{Inject, Singleton}
import org.elastic4play.utils.RichFuture
import org.thp.cortex.models._
import play.api.Logger
import play.api.libs.json.Json

import scala.concurrent.duration.FiniteDuration
import scala.concurrent.{ExecutionContext, Future}
import scala.sys.process.{Process, ProcessLogger, _}
import scala.util.Try

@Singleton
class ProcessJobRunnerSrv @Inject()(implicit val system: ActorSystem) {

  lazy val logger = Logger(getClass)

  private val pythonPackageVersionRegex = "^Version: ([0-9]*)\\.([0-9]*)\\.([0-9]*)".r

  def checkCortexUtilsVersion(pythonVersion: String): Option[(Int, Int, Int)] =
    Try {
      (s"pip$pythonVersion" :: "show" :: "cortexutils" :: Nil)
        .lineStream
        .collectFirst {
          case pythonPackageVersionRegex(major, minor, patch) ⇒ (major.toInt, minor.toInt, patch.toInt)
        }
    }.getOrElse(None)

  def run(jobDirectory: Path, command: String, job: Job, timeout: Option[FiniteDuration])(implicit ec: ExecutionContext): Future[Unit] = {
    val baseDirectory = Paths.get(command).getParent.getParent
    val output        = StringBuilder.newBuilder
    logger.info(s"Execute $command in $baseDirectory, timeout is ${timeout.fold("none")(_.toString)}")
    val process = Process(Seq(command, jobDirectory.toString), baseDirectory.toFile)
      .run(ProcessLogger { s ⇒
        logger.info(s"  Job ${job.id}: $s")
        output ++= s
      })
    val execution = Future
      .apply {
        process.exitValue()
        ()
      }
      .map { _ ⇒
        val outputFile = jobDirectory.resolve("output").resolve("output.json")
        if (!Files.exists(outputFile) || Files.size(outputFile) == 0) {
          val report = Json.obj("success" → false, "errorMessage" → output.toString)
          Files.write(outputFile, report.toString.getBytes(StandardCharsets.UTF_8))
        }
        ()
      }
      .recoverWith {
        case error ⇒
          logger.error(s"Execution of command $command failed", error)
          Future.apply {
            val report = Json.obj("success" → false, "errorMessage" → s"${error.getMessage}\n$output")
            Files.write(jobDirectory.resolve("output").resolve("output.json"), report.toString.getBytes(StandardCharsets.UTF_8))
            ()
          }
      }
    timeout.fold(execution)(t ⇒ execution.withTimeout(t, killProcess(process)))
  }

  def killProcess(process: Process): Unit = {
    logger.info("Timeout reached, killing process")
    process.destroy()
  }
} 
Example 111
Source File: L8-38SparkR.scala    From prosparkstreaming   with Apache License 2.0 5 votes vote down vote up
package org.apress.prospark

import scala.reflect.runtime.universe
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.streaming.Seconds
import org.apache.spark.streaming.StreamingContext
import java.nio.file.Paths
import org.apache.spark.SparkFiles

object CdrStreamingSparkRApp {

  case class Cdr(squareId: Int, timeInterval: Long, countryCode: Int,
    smsInActivity: Float, smsOutActivity: Float, callInActivity: Float,
    callOutActivity: Float, internetTrafficActivity: Float)

  def main(args: Array[String]) {
    if (args.length != 7) {
      System.err.println(
        "Usage: CdrStreamingSparkRApp <appname> <batchInterval> <hostname> <port> <tableName> <RScriptPath> <RScriptLogsPath>")
      System.exit(1)
    }
    val Seq(appName, batchInterval, hostname, port, tableName, rScriptPath, logsPath) = args.toSeq

    val conf = new SparkConf()
      .setAppName(appName)
      .setJars(SparkContext.jarOfClass(this.getClass).toSeq)

    val ssc = new StreamingContext(conf, Seconds(batchInterval.toInt))

    val cl = Thread.currentThread().getContextClassLoader()
    val hiveC = new HiveContext(ssc.sparkContext)
    Thread.currentThread().setContextClassLoader(cl)

    import hiveC.implicits._

    ssc.sparkContext.addFile(rScriptPath)
    val rScriptName = SparkFiles.get(Paths.get(rScriptPath).getFileName.toString)
    val master = hiveC.sparkContext.getConf.get("spark.master")

    val cdrStream = ssc.socketTextStream(hostname, port.toInt)
      .map(_.split("\\t", -1))
      .foreachRDD((rdd, time) => {
        val iTableName = tableName + time.milliseconds
        seqToCdr(rdd).toDF().write.saveAsTable(iTableName)
        hiveC.sparkContext.parallelize(Array(iTableName)).pipe("%s %s".format(rScriptName, master)).saveAsTextFile(Paths.get(logsPath, iTableName).toString)
      })

    ssc.start()
    ssc.awaitTermination()
  }

  def seqToCdr(rdd: RDD[Array[String]]): RDD[Cdr] = {
    rdd.map(c => c.map(f => f match {
      case x if x.isEmpty() => "0"
      case x => x
    })).map(c => Cdr(c(0).toInt, c(1).toLong, c(2).toInt, c(3).toFloat,
      c(4).toFloat, c(5).toFloat, c(6).toFloat, c(7).toFloat))
  }
} 
Example 112
Source File: ClientSpec.scala    From gemini   with GNU General Public License v3.0 5 votes vote down vote up
package tech.sourced.featurext;

import io.grpc.{ManagedChannel, ManagedChannelBuilder}
import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}
import tech.sourced.featurext.generated.service._
import gopkg.in.bblfsh.sdk.v1.uast.generated.Node
import java.nio.file.{Files, Paths}

@tags.FEIntegration
class ClientSpec extends FlatSpec
  with Matchers
  with BeforeAndAfterAll {

  val serverHost = "localhost"
  val serverPort = 9001
  val fixturePath = "src/test/resources/protomsgs/server.py.proto"

  var channel: ManagedChannel = _
  var blockingStub: FeatureExtractorGrpc.FeatureExtractorBlockingStub = _
  var uast: Node = _

  override def beforeAll(): Unit = {
    super.beforeAll()

    val byteArray: Array[Byte] = Files.readAllBytes(Paths.get(fixturePath))
    uast = Node.parseFrom(byteArray)

    channel = ManagedChannelBuilder.forAddress(serverHost, serverPort).usePlaintext(true).build()
    blockingStub = FeatureExtractorGrpc.blockingStub(channel)
  }

  override def afterAll(): Unit = {
    channel.shutdownNow()
  }

  "identifiers call" should "return correct response" in {
    val request = IdentifiersRequest(uast=Some(uast), options=Some(IdentifiersOptions(docfreqThreshold=5)))
    val reply = blockingStub.identifiers(request)
    var features = reply.features.sortBy(_.name)

    // check correct shape of response
    features.size should be(49)
    features(0).name should be("i.ArgumentParser")
    features(0).weight should be(1)
  }

  "literals call" should "return correct response" in {
    val request = LiteralsRequest(uast=Some(uast), options=Some(LiteralsOptions(docfreqThreshold=5)))
    val reply = blockingStub.literals(request)
    var features = reply.features.sortBy(_.name)

    // check correct shape of response
    features.size should be(16)
    features(0).name should be("l.149420d2b7f04801")
    features(0).weight should be(1)
  }

  "uast2seq call" should "return correct response" in {
    val request = Uast2seqRequest(uast=Some(uast), options=Some(Uast2seqOptions(docfreqThreshold=5)))
    val reply = blockingStub.uast2Seq(request)
    var features = reply.features.sortBy(_.name)

    // check correct shape of response
    features.size should be(207)
    features(0).name should be("s.Assign>Name>Attribute>Call>Expr")
    features(0).weight should be(1)
  }

  "graphlet call" should "return correct response" in {
    val request = GraphletRequest(uast=Some(uast), options=Some(GraphletOptions(docfreqThreshold=5)))
    val reply = blockingStub.graphlet(request)
    var features = reply.features.sortBy(_.name)

    // check correct shape of response
    features.size should be(106)
    features(1).name should be("g.Assign_Call_Attribute")
    features(0).weight should be(1)
  }
} 
Example 113
Source File: FileSinkFunction.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.flink.runtime

import java.io.OutputStream
import java.nio.file.{Files, Paths, StandardOpenOption}

import com.amazon.milan.dataformats.DataOutputFormat
import org.apache.flink.streaming.api.functions.sink.SinkFunction


class FileSinkFunction[T](path: String, dataFormat: DataOutputFormat[T]) extends SinkFunction[T] {
  @transient private lazy val outputStream = this.openOutputStream()

  override def invoke(value: T, context: SinkFunction.Context[_]): Unit = {
    this.dataFormat.writeValue(value, this.outputStream)
    this.outputStream.flush()
  }

  private def openOutputStream(): OutputStream = {
    Files.newOutputStream(Paths.get(this.path), StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING)
  }
} 
Example 114
Source File: MapreduceDriverTest.scala    From schedoscope   with Apache License 2.0 5 votes vote down vote up
package org.schedoscope.scheduler.driver

import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Paths}

import org.apache.hadoop.fs.Path
import org.apache.hadoop.mapreduce.Job
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
import org.scalatest.{FlatSpec, Matchers}
import org.schedoscope.dsl.View
import org.schedoscope.dsl.transformations.{FailingMapper, MapreduceTransformation}
import org.schedoscope.test.resources.LocalTestResources
import org.schedoscope.test.resources.TestDriverRunCompletionHandlerCallCounter._

class MapreduceDriverTest extends FlatSpec with Matchers with TestFolder {
  lazy val driver = new LocalTestResources().driverFor[MapreduceTransformation]("mapreduce")

  def invalidJob: (Map[String, Any]) => Job = (m: Map[String, Any]) => Job.getInstance

  def failingJob: (Map[String, Any]) => Job = (m: Map[String, Any]) => {
    writeData()
    val job = Job.getInstance
    job.setMapperClass(classOf[FailingMapper])
    FileInputFormat.setInputPaths(job, new Path(inputPath("")))
    FileOutputFormat.setOutputPath(job, new Path(outputPath(System.nanoTime.toString)))
    job
  }

  def identityJob: (Map[String, Any]) => Job = (m: Map[String, Any]) => {
    writeData()
    val job = Job.getInstance
    FileInputFormat.setInputPaths(job, new Path(inputPath("")))
    FileOutputFormat.setOutputPath(job, new Path(outputPath(System.nanoTime.toString)))
    job
  }

  case class DummyView() extends View

  def writeData() {
    Files.write(Paths.get(s"${inputPath("")}/file.txt"), "some data".getBytes(StandardCharsets.UTF_8))
  }

  "MapreduceDriver" should "have transformation name Mapreduce" in {
    driver.transformationName shouldBe "mapreduce"
  }

  it should "execute Mapreduce transformations synchronously" in {
    val driverRunState = driver.runAndWait(MapreduceTransformation(new DummyView(), identityJob))

    driverRunState shouldBe a[DriverRunSucceeded[_]]
  }

  it should "execute another Mapreduce transformations synchronously" in {
    val driverRunState = driver.runAndWait(MapreduceTransformation(new DummyView(), identityJob))

    driverRunState shouldBe a[DriverRunSucceeded[_]]
  }

  it should "execute Mapreduce transformations asynchronously" in {
    val driverRunHandle = driver.run(MapreduceTransformation(new DummyView(), identityJob))

    var runWasAsynchronous = false

    while (driver.getDriverRunState(driverRunHandle).isInstanceOf[DriverRunOngoing[_]])
      runWasAsynchronous = true

    runWasAsynchronous shouldBe true
    driver.getDriverRunState(driverRunHandle) shouldBe a[DriverRunSucceeded[_]]
  }

  it should "execute Mapreduce transformations and return errors when running asynchronously" in {
    val driverRunHandle = driver.run(MapreduceTransformation(new DummyView(), failingJob))

    var runWasAsynchronous = false

    while (driver.getDriverRunState(driverRunHandle).isInstanceOf[DriverRunOngoing[_]])
      runWasAsynchronous = true

    // runWasAsynchronous shouldBe true FIXME: isn't asynchronous, why?
    driver.getDriverRunState(driverRunHandle) shouldBe a[DriverRunFailed[_]]
  }

  it should "call its DriverRunCompletitionHandlers' driverRunCompleted upon request" in {
    val runHandle = driver.run(MapreduceTransformation(new DummyView(), identityJob))

    while (driver.getDriverRunState(runHandle).isInstanceOf[DriverRunOngoing[_]]) {}

    driver.driverRunCompleted(runHandle)

    driverRunCompletedCalled(runHandle, driver.getDriverRunState(runHandle)) shouldBe true
  }

  it should "call its DriverRunCompletitionHandlers' driverRunStarted upon request" in {
    val runHandle = driver.run(MapreduceTransformation(new DummyView(), identityJob))

    driver.driverRunStarted(runHandle)

    driverRunStartedCalled(runHandle) shouldBe true
  }
} 
Example 115
Source File: Main.scala    From perf_tester   with Apache License 2.0 5 votes vote down vote up
package benchmarks

import java.nio.file.{Files, Path, Paths}

object Main extends App {
  val startTime = System.currentTimeMillis()
  val rootPath: Path = Paths.get(args.headOption.getOrElse("."))
  val compilerSetup = new CompilerSetup(rootPath, args.drop(3).toList)
  val N = args.drop(1).headOption.map(_.toInt).getOrElse(2) // TODO change it!
	val M = args.drop(2).headOption.map(_.toInt).getOrElse(15)

	val sources = IO.listSourcesIn(rootPath.resolve("sources")).map(_.toString)
  val removeAt = N - M
  val profileFile = compilerSetup.outputDir.resolve("profile.txt") // TODO always add this!

  def runCompilation(n: Int): Long = {

    val run = new compilerSetup.global.Run
    val start = System.currentTimeMillis()
    run.compile(sources)
    val duration = System.currentTimeMillis() - start
    Files.move(compilerSetup.currentOutput, compilerSetup.currentOutput.resolveSibling(s"classes_$n"))
    if (n == removeAt && Files.exists(profileFile)) {
      Files.move(profileFile, profileFile.resolveSibling("initial-profile.txt"))
    }
    duration
  }

  println(s"Running benchmark with (N=$N, M=$M) in $rootPath with scalac options: ${compilerSetup.scalacOptions}")

  val times = (1 to N).map(runCompilation)
  val total = System.currentTimeMillis() - startTime

  def asSec(long: Long) = long  / 1000.0
  def asSec(d: Double) = d  / 1000

  val overhead = asSec(total - times.sum)
  val lastMAvg = asSec(times.takeRight(M).sum / M.toDouble) // TODO support cases where M > N
  val allAvg = asSec(times.sum / N.toDouble)

  // TODO proper output format
  println(s"Run $N compilations in ${asSec(total)} with overhead: $overhead.")
  println(s"Avgs. Last ($M): $lastMAvg, all $allAvg")
  println(s"Times: ${times.map(asSec)}")
} 
Example 116
Source File: Main.scala    From perf_tester   with Apache License 2.0 5 votes vote down vote up
package org.preftester

import java.io.File
import java.nio.file.{Files, Paths}

import com.typesafe.config.{ConfigFactory, ConfigObject, ConfigParseOptions}
import org.perftester.results.renderer.TextRenderer
import org.perftester.results.{ResultReader, RunResult}

import scala.collection.JavaConverters._
import scala.sys.process.Process
import scala.util.{Random, Try}

object Main extends App {
  val baseDir = Paths.get(args.headOption.getOrElse("."))

  case class Configuration(
                            reference: String,
                            baseScalaVersion: String,
                            buildLocally: Boolean,
                            jvmOptions: String,
                            scalaOptions: String
                          ){
    val scalaVersion = if(buildLocally) s"$baseScalaVersion-$reference-SNAPSHOT" else reference
  }

  val config = ConfigFactory.parseFile(
    baseDir.resolve("benchmark.conf").toFile,
    ConfigParseOptions.defaults().setAllowMissing(false)
  )

  val benchmarks = config.getObject("benchmarks").asScala.map {
    case (name, obj: ConfigObject) =>
      def read(name: String, default: String) = Try(obj.toConfig.getString(name)).getOrElse(default)

      name -> Configuration(
        reference = read("reference", name),
        baseScalaVersion = read("baseScalaVersion", "2.12.4"),
        buildLocally = read("buildLocally", "false").toBoolean,
        jvmOptions = read("jvmOptions", ""),
        scalaOptions = read("scalaOptions", "")
      )
  }.toSeq

  val iterations = config.getInt("iterations")
  val N = config.getInt("N")
  val M = config.getInt("M")

  val results = (1 to iterations).foldLeft(Map.empty[String, Vector[RunResult]]){
    case (all, i) =>
      Random.shuffle(benchmarks).foldLeft(all){
        case (all, (name, benchmark)) =>
          val location = baseDir.resolve(benchmark.scalaVersion)
          val cmd = Seq(s"./run.sh", ".", N, M, benchmark.scalaOptions).map(_.toString)
          println(s"## Run $i for $name")
          val env = if(benchmark.jvmOptions.isEmpty) Nil else Seq("_JAVA_OPTIONS" -> benchmark.jvmOptions)
          val output = Process(cmd, location.toFile, env:_*).!!
          println(output)
          val resultsDir = location.resolve("output").resolve("profile.txt")
          if (Files.exists(resultsDir)){
            val result = ResultReader.readResults(name, resultsDir, N)
            val previous = all.getOrElse(name, Vector.empty)
            all + (name -> (previous :+ result))
          } else all

      }
  }
  results.foreach{ case (name, results) =>
    println(s"########## Result for $name ##########")
    TextRenderer.outputTextResults(iterations, results)
  }
} 
Example 117
Source File: ChunkedEntitiesDocs.scala    From endpoints4s   with MIT License 5 votes vote down vote up
package endpoints4s.play.server

import akka.stream.scaladsl.Source
import endpoints4s.algebra
import endpoints4s.algebra.JsonStreamingExample
import scala.concurrent.duration.DurationInt

trait ChunkedEntitiesDocs extends algebra.ChunkedEntitiesDocs with ChunkedEntities {

  //#implementation
  import akka.stream.scaladsl.FileIO
  import java.nio.file.Paths

  val logoHandler =
    logo.implementedBy { _ =>
      FileIO.fromPath(Paths.get("/foo/bar/logo.png")).map(_.toArray)
    }
  //#implementation

}

//#json-streaming
import endpoints4s.play.server

class JsonStreamingExampleServer(val playComponents: server.PlayComponents)
    extends JsonStreamingExample
    with server.Endpoints
    with server.ChunkedJsonEntities
    with server.JsonEntitiesFromSchemas {

  val routes = routesFromEndpoints(
    ticks.implementedBy(_ => Source.tick(0.seconds, 1.second, ()))
  )

}
//#json-streaming 
Example 118
Source File: ChunkedEntitiesDocs.scala    From endpoints4s   with MIT License 5 votes vote down vote up
package endpoints4s.akkahttp.server

import akka.http.scaladsl.server.Route
import endpoints4s.algebra
import endpoints4s.algebra.JsonStreamingExample

trait ChunkedEntitiesDocs extends algebra.ChunkedEntitiesDocs with ChunkedEntities {

  //#implementation
  import java.nio.file.Paths
  import akka.stream.scaladsl.FileIO

  val logoRoute: Route =
    logo.implementedBy { _ =>
      FileIO.fromPath(Paths.get("/foo/bar/logo.png")).map(_.toArray)
    }
  //#implementation

}

import scala.concurrent.duration.DurationInt

//#json-streaming
import akka.stream.scaladsl.Source
import endpoints4s.akkahttp.server

object JsonStreamingExampleServer
    extends JsonStreamingExample
    with server.Endpoints
    with server.ChunkedJsonEntities
    with server.JsonEntitiesFromSchemas {

  val routes =
    ticks.implementedBy(_ => Source.tick(0.seconds, 1.second, ()))

}
//#json-streaming 
Example 119
Source File: DbService.scala    From sns   with Apache License 2.0 5 votes vote down vote up
package me.snov.sns.service

import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Paths, StandardOpenOption}

import akka.event.LoggingAdapter
import me.snov.sns.model.{Configuration, Subscription, Topic}
import spray.json._

trait DbService {
  def load(): Option[Configuration]

  def save(configuration: Configuration)
}

class MemoryDbService extends DbService {
  override def load(): Option[Configuration] = {
    Some(Configuration(subscriptions= List[Subscription](), topics= List[Topic]()))
  }

  override def save(configuration: Configuration): Unit = {}
}

class FileDbService(dbFilePath: String)(implicit log: LoggingAdapter) extends DbService {

  val subscriptionsName = "subscriptions"
  val topicsName = "topics"
  
  val path = Paths.get(dbFilePath)
  
  def load(): Option[Configuration] = {
    if (Files.exists(path)) {
      log.debug("Loading DB")
      try {
        val configuration = read().parseJson.convertTo[Configuration]
        log.info("Loaded DB")
        return Some(configuration)
      } catch {
        case e: DeserializationException => log.error(e, "Unable to parse configuration")
        case e: RuntimeException => log.error(e,"Unable to load configuration")
      }
    }
    None
  }
  
  def save(configuration: Configuration) = {
    log.debug("Saving DB")
    write(configuration.toJson.prettyPrint)
  }

  private def write(contents: String) = {
    Files.write(path, contents.getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING)
  }

  private def read(): String = {
    new String(Files.readAllBytes(path))
  }
} 
Example 120
Source File: MuseCharMapLoader.scala    From Muse-CGH   with MIT License 5 votes vote down vote up
package utilities

import java.nio.file.Paths

import main.MuseChar


object MuseCharMapLoader {

  def loadDefaultCharMap(): Map[Char, MuseChar] = {
    var list = List[(Char, MuseChar)]()

    print("letters missing: ")

    (0 until 26).foreach{ i =>
      val c = ('a'.toInt + i).toChar
      val lower = loadChar(s"letters/$c.muse")
      val upper = loadChar(s"letters/upper_$c.muse")

      lower match {
        case Some(l) =>
          def fromLower() = {
            print(s"${c.toUpper} ")
            l
          }
          list = List(c -> l, c.toUpper -> upper.getOrElse(fromLower())) ++ list
        case None =>
          print(s"$c ${c.toUpper} ")
      }
    }

    val numberList = (0 to 9).map(i => (i + '0').toChar -> i.toString).toList
    val punctuationMarkList = List(
      ','->"comma",
      '.'->"period",
      ';'->"semicolon",
      '\''->"upper_comma",
      '’' -> "upper_comma",
      '-'->"hyphen",
      '—' -> "hyphen",
      ':' -> "colon",
      '?' -> "question_mark",
      '!' -> "exclamation_mark",
      '(' -> "open_bracket",
      ')' -> "close_bracket",
      '"' -> "quotation_mark"
    )

    (numberList ++ punctuationMarkList).foreach{
      case (key, name) =>
        loadChar(s"letters/$name.muse") match{
          case Some(l) => list = (key -> l) :: list
          case None =>
            print(s"$name ")
        }
    }

    println("\n-----")

    list.toMap
  }

  def loadChar(fileName: String): Option[MuseChar] = {
    val file = Paths.get(fileName).toFile
    if(file.exists()){
      EditingSaver.loadFromFile(file).foreach{ e =>
        return Some(e.letter)
      }
    }
    None
  }
} 
Example 121
Source File: ConfigValue.scala    From seqspark   with Apache License 2.0 5 votes vote down vote up
package org.dizhang.seqspark.util

import java.io.File
import java.nio.file.{Files, Paths}

object ConfigValue {
  object GenomeBuild extends Enumeration {
    val hg18 = Value("hg18")
    val hg19 = Value("hg19")
    val hg38 = Value("hg38")
  }

  object GenotypeFormat extends Enumeration {
    val vcf = Value("vcf")
    val imputed = Value("impute2")
    //val bgen = Value("bgen")
    //val cacheFullvcf = Value("cachedFullVcf")
    val cacheVcf = Value("cachedVcf")
    val cacheImputed = Value("cachedImpute2")
  }

  sealed trait Samples
  object Samples {
    case object all extends Samples
    case object none extends Samples
    case class by(batch: String) extends Samples

    def apply(value: String): Samples = value match {
      case "all" => all
      case "none" => none
      case x => by(x)
    }
  }

  sealed trait Variants
  object Variants {
    case object all extends Variants
    case object exome extends Variants
    case class by(regions: String) extends Variants
    case class from(file: File) extends Variants

    def apply(value: String): Variants = value match {
      case "all" => all
      case "exome" => exome
      case x =>
        val path = Paths.get(x)
        if (Files.exists(path)) from(path.toFile) else by(x)
    }

  }

  object MutType extends Enumeration {
    val snv = Value("snv")
    val indel = Value("indel")
    val cnv = Value("cnv")
  }

  object MethodType extends Enumeration {
    val skat = Value("skat")
    val skato = Value("skato")
    val meta = Value("meta")
    val cmc = Value("cmc")
    val brv = Value("brv")
    val snv = Value("snv")
  }

  object WeightMethod extends Enumeration {
    val none = Value("none")
    val equal = Value("equal")
    val wss = Value("wss")
    val erec = Value("erec")
    val skat = Value("skat")
    val annotation = Value("annotation")
  }

  object TestMethod extends Enumeration {
    val score = Value("score")
    //val lhr = Value("lhr")
    val wald = Value("wald")
  }

  object ImputeMethod extends Enumeration {
    val bestGuess = Value("bestGuess")
    val meanDosage = Value("meanDosage")
    val random = Value("random")
    val no = Value("no")
  }

  object DBFormat extends Enumeration {
    val vcf: Value = Value("vcf")
    val gene: Value = Value("gene")
    val plain: Value = Value("plain")
    val csv: Value = Value("csv")
    val tsv: Value = Value("tsv")
  }
} 
Example 122
Source File: Export.scala    From seqspark   with Apache License 2.0 5 votes vote down vote up
package org.dizhang.seqspark.worker

import java.net.URI
import java.nio.file.{Files, Path, Paths}

import org.dizhang.seqspark.ds.Genotype
import org.dizhang.seqspark.ds.VCF._
import org.dizhang.seqspark.util.SeqContext
import org.dizhang.seqspark.util.UserConfig.hdfs
import org.apache.hadoop
import org.slf4j.LoggerFactory

import scala.collection.JavaConverters._
object Export {

  private val logger = LoggerFactory.getLogger(getClass)

  def apply[A: Genotype](data: Data[A])(implicit ssc: SeqContext): Unit = {
    val geno = implicitly[Genotype[A]]
    val conf = ssc.userConfig.output.genotype
    if (conf.export) {
      val path = if (conf.path.isEmpty)
        ssc.userConfig.input.genotype.path + "." + ssc.userConfig.project
      else
        conf.path
      logger.info(s"going to export data to $path")

      if (path.startsWith("file:")) {
        val p = Paths.get(URI.create(path))
        if (Files.exists(p)) {
          Files.walk(p)
            .iterator()
            .asScala
            .toList
            .sorted(Ordering[Path].reverse)
            .foreach(f => Files.delete(f))
        }
      } else {
        val hdPath = new hadoop.fs.Path(path)
        if (hdfs.exists(hdPath)) {
          hdfs.delete(hdPath, true)
        }
      }

      data.samples(conf.samples).saveAsTextFile(path)

    }
    if (conf.save || conf.cache) {
      data.saveAsObjectFile(conf.path)
    }
  }
} 
Example 123
Source File: AddJar.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.magic.builtin

import java.io.{File, PrintStream}
import java.net.{URL, URI}
import java.nio.file.{Files, Paths}
import java.util.zip.ZipFile
import org.apache.toree.magic._
import org.apache.toree.magic.builtin.AddJar._
import org.apache.toree.magic.dependencies._
import org.apache.toree.utils.{ArgumentParsingSupport, DownloadSupport, LogLike, FileUtils}
import com.typesafe.config.Config
import org.apache.hadoop.fs.Path
import org.apache.toree.plugins.annotations.Event

object AddJar {
  val HADOOP_FS_SCHEMES = Set("hdfs", "s3", "s3n", "file")

  private var jarDir:Option[String] = None

  def getJarDir(config: Config): String = {
    jarDir.getOrElse({
      jarDir = Some(
        if(config.hasPath("jar_dir") && Files.exists(Paths.get(config.getString("jar_dir")))) {
          config.getString("jar_dir")
        } else {
          FileUtils.createManagedTempDirectory("toree_add_jars").getAbsolutePath
        }
      )
      jarDir.get
    })
  }
}

class AddJar
  extends LineMagic with IncludeInterpreter
  with IncludeOutputStream with DownloadSupport with ArgumentParsingSupport
  with IncludeKernel with IncludePluginManager with IncludeConfig with LogLike
{
  // Option to mark re-downloading of jars
  private val _force =
    parser.accepts("f", "forces re-download of specified jar")

  // Option to mark re-downloading of jars
  private val _magic =
    parser.accepts("magic", "loads jar as a magic extension")

  // Lazy because the outputStream is not provided at construction
  private def printStream = new PrintStream(outputStream)

  )
      } else {
        downloadFile(
          new URL(jarRemoteLocation),
          new File(downloadLocation).toURI.toURL
        )
      }

      // Report download finished
      printStream.println(s"Finished download of $jarName")
    } else {
      printStream.println(s"Using cached version of $jarName")
    }

    // validate jar file
    if(! isValidJar(fileDownloadLocation)) {
      throw new IllegalArgumentException(s"Jar '$jarName' is not valid.")
    }

    if (_magic) {
      val plugins = pluginManager.loadPlugins(fileDownloadLocation)
      pluginManager.initializePlugins(plugins)
    } else {
      kernel.addJars(fileDownloadLocation.toURI)
    }
  }
} 
Example 124
Source File: ResumingEventFilter.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.stream

import java.nio.file.{ Path, Paths }
import java.nio.file.StandardOpenOption
import java.nio.file.StandardOpenOption._

import scala.concurrent.Future

import akka.NotUsed
import akka.actor.ActorSystem
import akka.stream.{ ActorMaterializer, IOResult }
import akka.util.ByteString

import spray.json._
import com.typesafe.config.{ Config, ConfigFactory }

object ResumingEventFilter extends App with EventMarshalling {
  val config = ConfigFactory.load() 
  val maxLine = config.getInt("log-stream-processor.max-line")
  
  if(args.length != 3) {
    System.err.println("Provide args: input-file output-file state")
    System.exit(1)
  }

  val inputFile = FileArg.shellExpanded(args(0))
  val outputFile = FileArg.shellExpanded(args(1))

  val filterState = args(2) match {
    case State(state) => state
    case unknown => 
      System.err.println(s"Unknown state $unknown, exiting.") 
      System.exit(1)
  }
  import akka.stream.scaladsl._

  val source: Source[ByteString, Future[IOResult]] = 
    FileIO.fromPath(inputFile)

  val sink: Sink[ByteString, Future[IOResult]] = 
    FileIO.toPath(outputFile, Set(CREATE, WRITE, APPEND))

  val frame: Flow[ByteString, String, NotUsed] =  
    Framing.delimiter(ByteString("\n"), maxLine) 
      .map(_.decodeString("UTF8"))


  import akka.stream.ActorAttributes
  import akka.stream.Supervision

  import LogStreamProcessor.LogParseException

  val decider : Supervision.Decider = {
    case _: LogParseException => Supervision.Resume
    case _                    => Supervision.Stop
  }

  val parse: Flow[String, Event, NotUsed] = 
    Flow[String].map(LogStreamProcessor.parseLineEx) 
      .collect { case Some(e) => e }
      .withAttributes(ActorAttributes.supervisionStrategy(decider))


  val filter: Flow[Event, Event, NotUsed] =   
    Flow[Event].filter(_.state == filterState)
  
  val serialize: Flow[Event, ByteString, NotUsed] =  
    Flow[Event].map(event => ByteString(event.toJson.compactPrint))

  implicit val system = ActorSystem() 
  implicit val ec = system.dispatcher


  val graphDecider : Supervision.Decider = { 
    case _: LogParseException => Supervision.Resume
    case _                    => Supervision.Stop
  }

  import akka.stream.ActorMaterializerSettings
  implicit val materializer = ActorMaterializer(
    ActorMaterializerSettings(system)
      .withSupervisionStrategy(graphDecider)
  )



  val composedFlow: Flow[ByteString, ByteString, NotUsed] =  
    frame.via(parse)
      .via(filter)
      .via(serialize)

  val runnableGraph: RunnableGraph[Future[IOResult]] = 
    source.via(composedFlow).toMat(sink)(Keep.right)

  runnableGraph.run().foreach { result =>
    println(s"Wrote ${result.count} bytes to '$outputFile'.")
    system.terminate()
  }  

} 
Example 125
Source File: BidiEventFilter.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.stream

import java.nio.file.{ Path, Paths }
import java.nio.file.StandardOpenOption
import java.nio.file.StandardOpenOption._


import scala.concurrent.Future

import akka.NotUsed
import akka.actor.ActorSystem
import akka.stream.{ ActorMaterializer, IOResult }
import akka.stream.scaladsl._
import akka.stream.scaladsl.JsonFraming
import akka.util.ByteString

import spray.json._
import com.typesafe.config.{ Config, ConfigFactory }

object BidiEventFilter extends App with EventMarshalling {
  val config = ConfigFactory.load() 
  val maxLine = config.getInt("log-stream-processor.max-line")
  val maxJsonObject = config.getInt("log-stream-processor.max-json-object")

  if(args.length != 5) {
    System.err.println("Provide args: input-format output-format input-file output-file state")
    System.exit(1)
  }

  val inputFile = FileArg.shellExpanded(args(2))
  val outputFile = FileArg.shellExpanded(args(3))
  val filterState = args(4) match {
    case State(state) => state
    case unknown => 
      System.err.println(s"Unknown state $unknown, exiting.") 
      System.exit(1)
  }


  val inFlow: Flow[ByteString, Event, NotUsed] = 
    if(args(0).toLowerCase == "json") {
      JsonFraming.objectScanner(maxJsonObject)
      .map(_.decodeString("UTF8").parseJson.convertTo[Event])
    } else {
      Framing.delimiter(ByteString("\n"), maxLine)
        .map(_.decodeString("UTF8"))
        .map(LogStreamProcessor.parseLineEx)
        .collect { case Some(event) => event }
    }

  val outFlow: Flow[Event, ByteString, NotUsed] = 
    if(args(1).toLowerCase == "json") {
      Flow[Event].map(event => ByteString(event.toJson.compactPrint))
    } else {
      Flow[Event].map{ event => 
        ByteString(LogStreamProcessor.logLine(event))
      }
    }
  val bidiFlow = BidiFlow.fromFlows(inFlow, outFlow)

    
  val source: Source[ByteString, Future[IOResult]] = 
    FileIO.fromPath(inputFile)

  val sink: Sink[ByteString, Future[IOResult]] = 
    FileIO.toPath(outputFile, Set(CREATE, WRITE, APPEND))
  

  val filter: Flow[Event, Event, NotUsed] =   
    Flow[Event].filter(_.state == filterState)

  val flow = bidiFlow.join(filter)


  val runnableGraph: RunnableGraph[Future[IOResult]] = 
    source.via(flow).toMat(sink)(Keep.right)

  implicit val system = ActorSystem() 
  implicit val ec = system.dispatcher
  implicit val materializer = ActorMaterializer()

  runnableGraph.run().foreach { result =>
    println(s"Wrote ${result.count} bytes to '$outputFile'.")
    system.terminate()
  }  
} 
Example 126
Source File: GenerateLogFile.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.stream

import java.nio.file.{ Path, Paths }
import java.nio.file.StandardOpenOption
import java.nio.file.StandardOpenOption._
import java.time.ZonedDateTime
import java.time.format.DateTimeFormatter

import scala.concurrent.Future

import akka.actor.ActorSystem
import akka.stream.{ ActorMaterializer, IOResult }
import akka.stream.scaladsl._
import akka.util.ByteString

object GenerateLogFile extends App {
  val filePath = args(0)
  val numberOfLines = args(1).toInt
  val rnd = new java.util.Random()
  val sink = FileIO.toPath(FileArg.shellExpanded(filePath), Set(CREATE, WRITE, APPEND))
  def line(i: Int) = {
    val host = "my-host"
    val service = "my-service"
    val time = ZonedDateTime.now.format(DateTimeFormatter.ISO_INSTANT)
    val state = if( i % 10 == 0) "warning" 
      else if(i % 101 == 0) "error" 
      else if(i % 1002 == 0) "critical"
      else "ok"
    val description = "Some description of what has happened."
    val tag = "tag"
    val metric = rnd.nextDouble() * 100
    s"$host | $service | $state | $time | $description | $tag | $metric \n"
  }

  val graph = Source.fromIterator{() => 
    Iterator.tabulate(numberOfLines)(line)
  }.map(l=> ByteString(l)).toMat(sink)(Keep.right)

  implicit val system = ActorSystem() 
  implicit val ec = system.dispatcher
  implicit val materializer = ActorMaterializer()

  graph.run().foreach { result =>
    println(s"Wrote ${result.count} bytes to '$filePath'.")
    system.terminate()
  }  
} 
Example 127
Source File: ResourceDb.scala    From typebus   with MIT License 5 votes vote down vote up
package io.surfkit.typebus

import java.nio.file.{Files, Path, Paths}

trait ResourceDb{

  def databaseTablePath(key: String): Path = {
    val root = Paths.get( this.getClass.getResource("/").getPath )
    // We want the resource path before compile
    val db = Paths.get( root.toString + "/../../../src/main/resources/typebus" )
    if(Files.notExists(db))
      Files.createDirectory(db)
    Paths.get(db + s"/${key}")
  }

} 
Example 128
Source File: WindowsPluginFrontend.scala    From protoc-bridge   with Apache License 2.0 5 votes vote down vote up
package protocbridge.frontend

import java.net.ServerSocket
import java.nio.file.{Files, Path, Paths}

import protocbridge.ProtocCodeGenerator

import scala.concurrent.blocking

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future


object WindowsPluginFrontend extends PluginFrontend {

  case class InternalState(batFile: Path)

  override def prepare(plugin: ProtocCodeGenerator): (Path, InternalState) = {
    val ss = new ServerSocket(0)
    val state = createWindowsScript(ss.getLocalPort)

    Future {
      blocking {
        val client = ss.accept()
        val response =
          PluginFrontend.runWithInputStream(plugin, client.getInputStream)
        client.getOutputStream.write(response)
        client.close()
        ss.close()
      }
    }

    (state.batFile, state)
  }

  override def cleanup(state: InternalState): Unit = {
    Files.delete(state.batFile)
  }

  private def createWindowsScript(port: Int): InternalState = {
    val classPath =
      Paths.get(getClass.getProtectionDomain.getCodeSource.getLocation.toURI)
    val classPathBatchString = classPath.toString.replace("%", "%%")
    val batchFile = PluginFrontend.createTempFile(
      ".bat",
      s"""@echo off
          |"${sys
        .props(
          "java.home"
        )}\\bin\\java.exe" -cp "$classPathBatchString" ${classOf[
        BridgeApp
      ].getName} $port
        """.stripMargin
    )
    InternalState(batchFile)
  }
} 
Example 129
Source File: BitcoindAuthCredentials.scala    From bitcoin-s   with MIT License 5 votes vote down vote up
package org.bitcoins.rpc.config

import java.io.File
import org.bitcoins.core.util.BitcoinSLogger
import java.nio.file.Paths
import org.bitcoins.core.config.TestNet3
import org.bitcoins.core.config.MainNet
import org.bitcoins.core.config.RegTest
import java.nio.file.Files
import org.bitcoins.core.config.NetworkParameters


    def cookie: String = {
      if (Files.exists(cookiePath)) {
        val cookieLines = Files.readAllLines(cookiePath).asScala
        cookieLines.head
      } else {
        throw new RuntimeException(s"Could not find $cookiePath!")
      }
    }

    def username: String = cookie.split(":").head
    def password: String = cookie.split(":").last

  }

  def fromConfig(config: BitcoindConfig): BitcoindAuthCredentials = {
    val datadir = config.datadir
    val username = config.username
    val password = config.password
    (username, password) match {
      case (Some(user), Some(pass)) =>
        PasswordBased(user, pass)
      case (_, _) =>
        CookieBased(config.network, datadir = datadir)
    }
  }
} 
Example 130
Source File: JdbcProfileComponent.scala    From bitcoin-s   with MIT License 5 votes vote down vote up
package org.bitcoins.db

import java.nio.file.{Files, Path, Paths}

import org.bitcoins.core.util.BitcoinSLogger
import slick.basic.DatabaseConfig
import slick.jdbc.JdbcProfile

trait JdbcProfileComponent[+ConfigType <: AppConfig] extends BitcoinSLogger {

  def appConfig: ConfigType

  
  // todo: what happens to this if we
  // dont use SQLite?
  lazy val dbName: String = {
    appConfig.config.getString(s"${appConfig.moduleName}.db.name")
  }

  private def createDbFileIfDNE(): Unit = {
    //should add a check in here that we are using sqlite
    if (!Files.exists(dbPath)) {
      val _ = {
        logger.debug(s"Creating database directory=$dbPath")
        Files.createDirectories(dbPath)
        val dbFilePath = dbPath.resolve(dbName)
        logger.debug(s"Creating database file=$dbFilePath")
        Files.createFile(dbFilePath)
      }

      ()
    }
  }
} 
Example 131
Source File: LoggingStressTest.scala    From scribe   with MIT License 5 votes vote down vote up
package scribe.benchmark

import java.nio.file.{Files, Paths}
import java.util.concurrent.TimeUnit

import scribe.Logger
import scribe.format.Formatter
import scribe.writer.FileWriter
import scribe.writer.file.LogFileMode

object LoggingStressTest {
  def main(args: Array[String]): Unit = {
    val oneMillion = 1000000
    val oneHundredMillion = 100000000
    timed(oneHundredMillion, fileLogger(Formatter.default, LogFileMode.IO))
  }

  def stressAll(iterations: Int): Unit = {
    val types = List(
      "Null" -> nullLogger(),
      "NIO Simple" -> fileLogger(Formatter.simple, LogFileMode.NIO),
      "IO Simple" -> fileLogger(Formatter.simple, LogFileMode.IO),
      "NIO Default" -> fileLogger(Formatter.default, LogFileMode.NIO),
      "IO Default" -> fileLogger(Formatter.default, LogFileMode.IO)
    )
    types.foreach {
      case (name, logger) => {
        val elapsed = timed(iterations, logger)
        scribe.info(s"$iterations for $name in $elapsed seconds")
      }
    }
    scribe.info("Reversing!")
    types.reverse.foreach {
      case (name, logger) => {
        val elapsed = timed(iterations, logger)
        scribe.info(s"$iterations for $name in $elapsed seconds")
      }
    }
    scribe.info("Completed!")
  }

  def nullLogger(): Logger = Logger.empty.orphan()

  def fileLogger(formatter: Formatter, mode: LogFileMode): Logger = {
    val path = Paths.get("logs/file-logging.log")
    Files.deleteIfExists(path)
    Logger.empty.orphan().withHandler(formatter, FileWriter().withMode(mode).path(_ => path))
  }

  def timed(iterations: Int, logger: Logger): Double = {
    val start = System.nanoTime()
    stressLogger(iterations, logger)
    val elapsed = System.nanoTime() - start
    TimeUnit.MILLISECONDS.convert(elapsed, TimeUnit.NANOSECONDS) / 1000.0
  }

  def stressLogger(iterations: Int, logger: Logger): Unit = {
    logger.info("Testing logging")
    if (iterations > 0) {
      stressLogger(iterations - 1, logger)
    }
  }
} 
Example 132
Source File: LogPath.scala    From scribe   with MIT License 5 votes vote down vote up
package scribe.writer.file

import java.nio.file.{Path, Paths}
import perfolation._

object LogPath {
  lazy val default: Long => Path = simple()

  def simple(name: => String = "app.log", directory: Path = Paths.get("logs")): Long => Path = {
    _ => directory.resolve(name)
  }

  def daily(prefix: => String = "app",
            separator: String = "-",
            extension: String = "log",
            directory: => Path = Paths.get("logs")): Long => Path = {
    apply(prefix, separator, (l: Long) => p"${l.t.Y}$separator${l.t.m}$separator${l.t.d}", extension, directory)
  }

  def apply(prefix: => String = "app",
            separator: String = "-",
            distinction: Long => String,
            extension: String = "log",
            directory: => Path = Paths.get("logs")): Long => Path = {
    l: Long => directory.resolve(p"$prefix$separator${distinction(l)}.$extension")
  }
} 
Example 133
Source File: BackupPathAction.scala    From scribe   with MIT License 5 votes vote down vote up
package scribe.writer.action

import java.nio.file.{Files, Path, Paths}

import scribe.writer.file.LogFile
import perfolation._

object BackupPathAction extends Action {
  override def apply(previous: LogFile, current: LogFile): LogFile = {
    current.dispose()
    pushBackups(current.path)
    Files.createFile(current.path)
    current.replace()
  }

  private def pushBackups(path: Path, increment: Int = 1): Unit = {
    val backup = backupPath(path, increment)
    if (Files.exists(backup)) {
      pushBackups(path, increment + 1)
    }
    val current = backupPath(path, increment - 1)
    val lastModified = Files.getLastModifiedTime(current)
    Files.move(current, backup)
    Files.setLastModifiedTime(backup, lastModified)
  }

  private def backupPath(path: Path, increment: Int): Path = if (increment > 0) {
    val absolute = path.toAbsolutePath.toString
    val idx = absolute.lastIndexOf('.')
    val absolutePath = p"${absolute.substring(0, idx)}.$increment.${absolute.substring(idx + 1)}"
    Paths.get(absolutePath)
  } else {
    path
  }
} 
Example 134
Source File: UploadingFileClient.scala    From Akka-Cookbook   with MIT License 5 votes vote down vote up
package com.packt.chapter9

import java.nio.file.Paths

import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.HttpMethods._
import akka.http.scaladsl.model._
import akka.stream.ActorMaterializer

import scala.concurrent.Future
import scala.concurrent.duration._
import scala.util.{Failure, Success}

object UploadingFileClient extends App {

  implicit val system = ActorSystem()
  implicit val materializer = ActorMaterializer()
  import system.dispatcher

  val http = Http()
  val entity = Multipart.FormData.fromPath(
    "file",
    ContentTypes.`text/plain(UTF-8)`,
    Paths.get("./src/main/resources/testfile.txt")
  ).toEntity()
  val uris = Seq(
    "http://localhost:8088/regularupload",
    "http://localhost:8088/streamupload"
  )
  val requests = uris.map(uri => HttpRequest(POST, uri, Nil, entity))

  Future.traverse(requests)(http.singleRequest(_)) andThen {
    case Success(responses) => responses.foreach(response =>
      response.entity.toStrict(5 seconds).map(_.data.utf8String).andThen {
        case Success(content) => println(s"Response: $content")
        case _ =>
      })
    case Failure(e) => println(s"request failed $e")
  }
} 
Example 135
Source File: TransformingStreamsApplication.scala    From Akka-Cookbook   with MIT License 5 votes vote down vote up
package com.packt.chapter8

import java.nio.file.Paths

import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.stream.scaladsl._

object TransformingStreamsApplication extends App {

  implicit val actorSystem = ActorSystem("TransformingStream")
  implicit val actorMaterializer = ActorMaterializer()

  val MaxGroups = 1000

  val path = Paths.get("src/main/resources/gzipped-file.gz")

  val stream = FileIO.fromPath(path)
    .via(Compression.gunzip())
    .map(_.utf8String.toUpperCase)
    .mapConcat(_.split(" ").toList)
    .collect { case w if w.nonEmpty =>
      w.replaceAll("""[p{Punct}&&[^.]]""", "").replaceAll(System.lineSeparator(), "") }
    .groupBy(MaxGroups, identity)
    .map(_ -> 1)
    .reduce((l, r) => (l._1, l._2 + r._2))
    .mergeSubstreams
    .to(Sink.foreach(println))

  stream.run()
} 
Example 136
Source File: ModularizingStreamsApplication.scala    From Akka-Cookbook   with MIT License 5 votes vote down vote up
package com.packt.chapter8

import java.nio.file.Paths

import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.stream.scaladsl._
import akka.util.ByteString

object ModularizingStreamsApplication extends App {

  implicit val actorSystem = ActorSystem("TransformingStream")
  implicit val actorMaterializer = ActorMaterializer()

  val MaxGroups = 1000

  val path = Paths.get("src/main/resources/gzipped-file.gz")

  
  val streamUppercase = source
      .via(gunzip)
      .via(utf8UppercaseMapper)
      .via(splitter)
      .via(punctuationMapper)
      .via(filterEmptyElements)
      .via(wordCountFlow)
      .to(printlnSink)

  val streamLowercase = source
    .via(gunzip)
    .via(utf8LowercaseMapper)
    .via(splitter)
    .via(punctuationMapper)
    .via(filterEmptyElements)
    .via(wordCountFlow)
    .to(printlnSink)

  streamUppercase.run()
  streamLowercase.run()

//  val sourceGunzip = source.via(gunzip)
//  val reusableProcessingFlow = Flow[String].via(splitter)
//    .via(punctuationMapper)
//    .via(filterEmptyElements)
//    .via(wordCountFlow)
//
//  val streamLowercase = sourceGunzip
//    .via(utf8LowercaseMapper)
//    .via(reusableProcessingFlow)
//    .to(printlnSink)
} 
Example 137
Source File: VersionSyncCheckPlugin.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package lagom.build

import java.nio.file.Files
import java.nio.file.Paths

import scala.collection.JavaConverters._

import sbt._
import sbt.Keys._

object VersionSyncCheckPlugin extends AutoPlugin {
  override def trigger = allRequirements

  object autoImport {
    val versionSyncCheck = taskKey[Unit]("")
  }
  import autoImport._

  override def globalSettings = Seq(
    versionSyncCheck := versionSyncCheckImpl.value
  )

  final case class Entry(name: String, expectedVersion: String, valName: String)

  val entries = Seq(
    Entry("Scala", Dependencies.Versions.Scala212, "ScalaVersion"),
    Entry("Play", Dependencies.Versions.Play, "PlayVersion"),
    Entry("Akka", Dependencies.Versions.Akka, "AkkaVersion"),
    Entry("ScalaTest", Dependencies.Versions.ScalaTest, "ScalaTestVersion"),
    Entry("JUnit", Dependencies.Versions.JUnit, "JUnitVersion"),
    Entry("JUnitInterface", Dependencies.Versions.JUnitInterface, "JUnitInterfaceVersion"),
    Entry("Log4j", Dependencies.Versions.Log4j, "Log4jVersion")
  )

  def versionSyncCheckImpl = Def.task[Unit] {
    val log = state.value.log

    log.info("Running version sync check")

    val docsBuildLines = Files.lines(Paths.get("docs/build.sbt")).iterator.asScala.toStream

    val result = for (entry <- entries) yield {
      val Entry(name, expectedVersion, valName) = entry
      val Regex                                 = raw"""val $valName[: ].*"(\d+\.\d+(?:\.\d+)?(?:-(?:M|RC)\d+)?)"""".r.unanchored

      val unexpectedVersions = (for (Regex(version) <- docsBuildLines) yield version) match {
        case Stream(version) => if (version == expectedVersion) "" else version
        case Stream()        => "<none>"
        case multiple        => multiple.mkString("multiple: ", ", ", "")
      }

      if (unexpectedVersions == "") {
        log.info(s"Found matching version for $name: $expectedVersion")
        None
      } else {
        val message = s"Version mismatch for $name: expected $expectedVersion, found $unexpectedVersions"
        log.error(message)
        Some(message)
      }
    }

    val errorMesssages = result.flatten

    if (errorMesssages.isEmpty)
      log.info(s"Version sync check success")
    else
      fail(s"Version sync check failed:\n${errorMesssages.map("  * " + _).mkString("\n")}")
  }

  private def fail(message: String): Nothing = {
    val fail = new MessageOnlyException(message)
    fail.setStackTrace(new Array[StackTraceElement](0))
    throw fail
  }
} 
Example 138
Source File: ServiceTestSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.javadsl.testkit

import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.Paths

import javax.inject.Inject
import akka.japi.function.Procedure
import com.google.inject.AbstractModule
import com.lightbend.lagom.javadsl.api.Descriptor
import com.lightbend.lagom.javadsl.api.Service
import com.lightbend.lagom.javadsl.persistence.PersistentEntityRegistry
import com.lightbend.lagom.javadsl.server.ServiceGuiceSupport
import com.lightbend.lagom.javadsl.testkit.ServiceTest.Setup
import com.lightbend.lagom.javadsl.testkit.ServiceTest.TestServer
import play.inject.guice.GuiceApplicationBuilder

import scala.collection.JavaConverters._
import scala.compat.java8.FunctionConverters._
import scala.util.Properties
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ServiceTestSpec extends AnyWordSpec with Matchers {
  "ServiceTest" when {
    "started with Cassandra" should {
      "create a temporary directory" in {
        val temporaryFileCountBeforeRun = listTemporaryFiles().size

        withServer(ServiceTest.defaultSetup.withCassandra()) { _ =>
          val temporaryFilesDuringRun = listTemporaryFiles()

          temporaryFilesDuringRun should have size (temporaryFileCountBeforeRun + 1)
        }
      }
    }

    "stopped after starting" should {
      "remove its temporary directory" in {
        val temporaryFileCountBeforeRun = listTemporaryFiles().size

        withServer(ServiceTest.defaultSetup.withCassandra()) { _ =>
          ()
        }

        val temporaryFilesAfterRun = listTemporaryFiles()

        temporaryFilesAfterRun should have size temporaryFileCountBeforeRun
      }
    }

    "started with JDBC" should {
      "start successfully" in {
        withServer(ServiceTest.defaultSetup.withJdbc()) { _ =>
          ()
        }
      }
    }
  }

  def withServer(setup: Setup)(block: TestServer => Unit): Unit = {
    ServiceTest.withServer(setup.configureBuilder((registerService _).asJava), block(_))
  }

  def registerService(builder: GuiceApplicationBuilder): GuiceApplicationBuilder =
    builder.bindings(new TestServiceModule)

  def listTemporaryFiles(): Iterator[Path] = {
    val tmpDir = Paths.get(Properties.tmpDir)
    Files
      .newDirectoryStream(tmpDir, "ServiceTest_*")
      .iterator()
      .asScala
  }
}

trait TestService extends Service {
  import Service._

  final override def descriptor: Descriptor = named("test")
}

class TestServiceImpl @Inject() (persistentEntityRegistry: PersistentEntityRegistry) extends TestService

class TestServiceModule extends AbstractModule with ServiceGuiceSupport {
  override def configure(): Unit = bindService(classOf[TestService], classOf[TestServiceImpl])
} 
Example 139
Source File: ServiceTestSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.scaladsl.testkit

import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.Paths

import com.lightbend.lagom.scaladsl.api.Descriptor
import com.lightbend.lagom.scaladsl.api.Service
import com.lightbend.lagom.scaladsl.persistence.cassandra.CassandraPersistenceComponents
import com.lightbend.lagom.scaladsl.persistence.jdbc.JdbcPersistenceComponents
import com.lightbend.lagom.scaladsl.persistence.PersistenceComponents
import com.lightbend.lagom.scaladsl.persistence.PersistentEntityRegistry
import com.lightbend.lagom.scaladsl.playjson.EmptyJsonSerializerRegistry
import com.lightbend.lagom.scaladsl.playjson.JsonSerializerRegistry
import com.lightbend.lagom.scaladsl.server._
import play.api.db.HikariCPComponents
import play.api.libs.ws.ahc.AhcWSComponents

import scala.collection.JavaConverters._
import scala.util.Properties
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ServiceTestSpec extends AnyWordSpec with Matchers {
  "ServiceTest" when {
    "started with Cassandra" should {
      "create a temporary directory" in {
        val temporaryFileCountBeforeRun = listTemporaryFiles().size

        ServiceTest.withServer(ServiceTest.defaultSetup.withCassandra())(new CassandraTestApplication(_)) { _ =>
          val temporaryFilesDuringRun = listTemporaryFiles()

          temporaryFilesDuringRun should have size (temporaryFileCountBeforeRun + 1)
        }
      }
    }

    "stopped after starting" should {
      "remove its temporary directory" in {
        val temporaryFileCountBeforeRun = listTemporaryFiles().size

        ServiceTest.withServer(ServiceTest.defaultSetup.withCassandra())(new CassandraTestApplication(_)) { _ =>
          ()
        }

        val temporaryFilesAfterRun = listTemporaryFiles()

        temporaryFilesAfterRun should have size temporaryFileCountBeforeRun
      }
    }

    "started with JDBC" should {
      "start successfully" in {
        ServiceTest.withServer(ServiceTest.defaultSetup.withJdbc())(new JdbcTestApplication(_)) { _ =>
          ()
        }
      }
    }
  }

  def listTemporaryFiles(): Iterator[Path] = {
    val tmpDir = Paths.get(Properties.tmpDir)
    Files
      .newDirectoryStream(tmpDir, "ServiceTest_*")
      .iterator()
      .asScala
  }
}

trait TestService extends Service {
  import Service._

  final override def descriptor: Descriptor = named("test")
}

class TestServiceImpl(persistentEntityRegistry: PersistentEntityRegistry) extends TestService

class TestApplication(context: LagomApplicationContext)
    extends LagomApplication(context)
    with LocalServiceLocator
    with AhcWSComponents { self: PersistenceComponents =>

  override lazy val jsonSerializerRegistry: JsonSerializerRegistry = EmptyJsonSerializerRegistry

  override lazy val lagomServer: LagomServer = serverFor[TestService](new TestServiceImpl(persistentEntityRegistry))
}

class CassandraTestApplication(context: LagomApplicationContext)
    extends TestApplication(context)
    with CassandraPersistenceComponents

class JdbcTestApplication(context: LagomApplicationContext)
    extends TestApplication(context)
    with JdbcPersistenceComponents
    with HikariCPComponents 
Example 140
Source File: WatchServiceTest.scala    From monix-nio   with Apache License 2.0 5 votes vote down vote up
package monix.nio.file

import java.io.File
import java.nio.file.{ Paths, WatchEvent }

import minitest.SimpleTestSuite
import monix.eval.Task
import monix.execution.Ack.{ Continue, Stop }

import scala.concurrent.duration._
import scala.concurrent.{ Await, Promise }
object WatchServiceTest extends SimpleTestSuite {
  implicit val ctx = monix.execution.Scheduler.Implicits.global

  test("file event captured") {
    val path = Paths.get(System.getProperty("java.io.tmpdir"))

    val watchP = Promise[Boolean]()
    val watchT = Task.evalAsync {
      watchAsync(path).timeoutOnSlowUpstream(10.seconds).subscribe(
        (events: Array[WatchEvent[_]]) => {
          val captured = events.find(e => s"${e.kind().name()} - ${e.context().toString}".contains("monix"))
          if (captured.isDefined) {
            watchP.success(true)
            Stop
          } else {
            Continue
          }
        },
        err => watchP.failure(err),
        () => watchP.success(true))
    }
    val fileT = Task.evalAsync {
      val temp = File.createTempFile("monix", ".tmp", path.toFile)
      Thread.sleep(2000)
      temp.delete()
    }

    watchT.runToFuture
    fileT.runToFuture

    val result = Await.result(watchP.future, 20.seconds)
    assert(result)
  }

} 
Example 141
Source File: IntegrationTest.scala    From monix-nio   with Apache License 2.0 5 votes vote down vote up
package monix.nio.file

import java.nio.file.{ Files, Paths, StandardOpenOption }
import java.util

import minitest.SimpleTestSuite
import monix.execution.Callback
import monix.nio.file

import scala.concurrent.duration._
import scala.concurrent.{ Await, Promise }
import scala.util.control.NonFatal

object IntegrationTest extends SimpleTestSuite {
  test("same file generated") {
    implicit val ctx = monix.execution.Scheduler.Implicits.global

    val from = Paths.get(this.getClass.getResource("/testFiles/file.txt").toURI)
    val to = Paths.get("src/test/resources/out.txt")
    val consumer = file.writeAsync(to)
    val p = Promise[Boolean]()
    val callback = new Callback[Throwable, Long] {
      override def onSuccess(value: Long): Unit = p.success(true)
      override def onError(ex: Throwable): Unit = p.failure(ex)
    }

    readAsync(from, 3)
      .consumeWith(consumer)
      .runAsync(callback)

    val result = Await.result(p.future, 3.second)
    assert(result)

    val f1 = Files.readAllBytes(from)
    val f2 = Files.readAllBytes(to)
    Files.delete(to) // clean
    assert(util.Arrays.equals(f1, f2))
  }

  test("add data to existing file") {
    implicit val ctx = monix.execution.Scheduler.Implicits.global

    val from = Paths.get(this.getClass.getResource("/testFiles/file.txt").toURI)
    val to = Paths.get("src/test/resources/existing.txt")
    val strSeq = Seq("A", "\u0024", "\u00A2", "\u20AC", new String(Array(0xF0, 0x90, 0x8D, 0x88).map(_.toByte)), "B")

    try {
      Files.write(to, strSeq.flatMap(_.getBytes).toArray, StandardOpenOption.WRITE, StandardOpenOption.CREATE, StandardOpenOption.APPEND)
    } catch {
      case NonFatal(e) => fail(s"got error: $e")
    }
    val consumer = file.appendAsync(to, Files.size(to))
    val p = Promise[Boolean]()
    val callback = new Callback[Throwable, Long] {
      override def onSuccess(value: Long): Unit = p.success(true)
      override def onError(ex: Throwable): Unit = p.failure(ex)
    }

    readAsync(from, 3)
      .consumeWith(consumer)
      .runAsync(callback)

    val result = Await.result(p.future, 3.second)
    assert(result)

    val f1 = Files.readAllBytes(from)
    val f2 = Files.readAllBytes(to)
    Files.delete(to) // clean

    val all1: Seq[Byte] = strSeq.flatMap(_.getBytes) ++ f1.toSeq
    assertEquals(all1, f2.toSeq)
  }
} 
Example 142
Source File: CodecTest.scala    From monix-nio   with Apache License 2.0 5 votes vote down vote up
package monix.nio.file

import java.nio.file.{ Files, Paths }
import java.util

import minitest.SimpleTestSuite
import monix.eval.Task
import monix.execution.Callback
import monix.execution.Scheduler.Implicits.{ global => ctx }
import monix.nio.file
import monix.nio.text.UTF8Codec.{ utf8Decode, utf8Encode }
import monix.reactive.Observable

import scala.concurrent.duration._
import scala.concurrent.{ Await, Promise }

object CodecTest extends SimpleTestSuite {
  test("decode file utf8") {
    val from = Paths.get(this.getClass.getResource("/testFiles/specialChars.txt").toURI)

    val p = Promise[Seq[Byte]]()
    val callback = new Callback[Throwable, List[Array[Byte]]] {
      override def onSuccess(value: List[Array[Byte]]): Unit = p.success(value.flatten)
      override def onError(ex: Throwable): Unit = p.failure(ex)
    }

    readAsync(from, 3)
      .pipeThrough(utf8Decode)
      .pipeThrough(utf8Encode)
      .toListL
      .runAsync(callback)
    val result = Await.result(p.future, 3.second)
    val f1 = Files.readAllBytes(from)
    val f2 = result
    assert(util.Arrays.equals(f1, f2.toArray))
  }

  test("decode special chars") {
    val strSeq = Seq("A", "\u0024", "\u00A2", "\u20AC", new String(Array(0xF0, 0x90, 0x8D, 0x88).map(_.toByte)), "B")

    for (grouping <- 1 to 12) {
      val obsSeq =
        Observable
          .fromIterator(Task(strSeq.flatMap(_.getBytes).grouped(grouping).map(_.toArray)))
          .pipeThrough(utf8Decode)

      val p = Promise[Boolean]()
      val callback = new Callback[Throwable, List[String]] {
        override def onSuccess(value: List[String]): Unit = {
          p.success(if (value.mkString == strSeq.mkString) true else false)
        }

        override def onError(ex: Throwable): Unit = p.failure(ex)
      }
      obsSeq.toListL.runAsync(callback)
      val result = Await.result(p.future, 3.second)
      assert(result)
    }
  }

  test("copy file utf8") {
    val from = Paths.get(this.getClass.getResource("/testFiles/specialChars.txt").toURI)
    val to = Paths.get("src/test/resources/res.txt")
    val consumer = file.writeAsync(to)
    val p = Promise[Long]()
    val callback = new Callback[Throwable, Long] {
      override def onSuccess(value: Long): Unit = p.success(value)
      override def onError(ex: Throwable): Unit = p.failure(ex)
    }

    readAsync(from, 3)
      .pipeThrough(utf8Decode)
      .map { str =>
        //Console.println(str)
        str
      }
      .pipeThrough(utf8Encode)
      .consumeWith(consumer)
      .runAsync(callback)
    val result = Await.result(p.future, 3.second)
    val f1 = Files.readAllBytes(from)
    val f2 = result
    Files.delete(to)
    assertEquals(f1.size, f2)
  }
} 
Example 143
Source File: KeyUtils.scala    From grpc-scala-microservice-kit   with Apache License 2.0 5 votes vote down vote up
package mu.node.echod.util

import java.nio.file.{Files, Paths}
import java.security.{KeyFactory, PrivateKey, PublicKey}
import java.security.spec.{PKCS8EncodedKeySpec, X509EncodedKeySpec}

import pdi.jwt.JwtAlgorithm.RS256

trait KeyUtils {

  val jwtDsa = RS256

  def loadPkcs8PrivateKey(path: String): PrivateKey = {
    val keyBytes = Files.readAllBytes(Paths.get(path))
    val spec = new PKCS8EncodedKeySpec(keyBytes)
    KeyFactory.getInstance("RSA").generatePrivate(spec)
  }

  def loadX509PublicKey(path: String): PublicKey = {
    val keyBytes = Files.readAllBytes(Paths.get(path))
    val spec = new X509EncodedKeySpec(keyBytes)
    KeyFactory.getInstance("RSA").generatePublic(spec)
  }
} 
Example 144
Source File: FileEditor.scala    From eidos   with Apache License 2.0 5 votes vote down vote up
package org.clulab.wm.wmexchanger.utils

import java.io.File
import java.nio.file.Paths

class FileEditor(protected var file: File) {
  import FileEditor._

  def get: File = file

  def setDir(dir: String): FileEditor = {
    val name = file.getName
    val newPath = Paths.get(dir, name).toString

    file = new File(newPath)
    this
  }

  def incDir(dir: String): FileEditor = {
    val name = file.getName
    val pathOpt = Option(file.getParentFile)
    val newPath =
        if (pathOpt.isDefined)
          Paths.get(Paths.get(pathOpt.get.getPath, dir).toString, name).toString
        else Paths.get(dir, name).toString

    file = new File(newPath)
    this
  }

  def setName(name: String): FileEditor = {
    val pathOpt = Option(file.getParentFile)
    val newPath =
        if (pathOpt.isDefined) Paths.get(pathOpt.get.getPath, name).toString
        else name

    file = new File(newPath)
    this
  }

  def incName(name: String): FileEditor = {
    file = new File(file.getPath + name)
    this
  }

  def setExt(ext: String): FileEditor = {
    // Being an extension implies that it starts with a period, but don't add a second period,
    // because then it's not an extension but something else.
    val dottedExt = if (ext.startsWith(dots)) ext else dots + ext
    val path = file.getPath
    val extensionlessPath = StringUtils.beforeLast(path, dot)
    val newPath = extensionlessPath + dottedExt

    file = new File(newPath)
    this
  }
}

object FileEditor {
  val dot = '.'
  val dots: String = dot.toString

  def apply(file: File): FileEditor = new FileEditor(file)
} 
Example 145
Source File: FileEditor.scala    From eidos   with Apache License 2.0 5 votes vote down vote up
package org.clulab.wm.eidos.utils

import java.io.File
import java.nio.file.Paths

class FileEditor(protected var file: File) {
  import FileEditor._

  def get = file

  def setDir(dir: String): FileEditor = {
    val name = file.getName
    val newPath = Paths.get(dir, name).toString

    file = new File(newPath)
    this
  }

  def incDir(dir: String): FileEditor = {
    val name = file.getName
    val pathOpt = Option(file.getParentFile)
    val newPath =
        if (pathOpt.isDefined)
          Paths.get(Paths.get(pathOpt.get.getPath, dir).toString, name).toString
        else Paths.get(dir, name).toString

    file = new File(newPath)
    this
  }

  def setName(name: String): FileEditor = {
    val pathOpt = Option(file.getParentFile)
    val newPath =
        if (pathOpt.isDefined) Paths.get(pathOpt.get.getPath, name).toString
        else name

    file = new File(newPath)
    this
  }

  def incName(name: String): FileEditor = {
    file = new File(file.getPath + name)
    this
  }

  def setExt(ext: String): FileEditor = {
    // Being an extension implies that it starts with a period, but don't add a second period,
    // because then it's not an extension but something else.
    val dottedExt = if (ext.startsWith(dots)) ext else dots + ext
    val path = file.getPath
    val extensionlessPath = StringUtils.beforeLast(path, dot)
    val newPath = extensionlessPath + dottedExt

    file = new File(newPath)
    this
  }
}

object FileEditor {
  val dot = '.'
  val dots = dot.toString

  def apply(file: File): FileEditor = new FileEditor(file)
} 
Example 146
Source File: ZipArchive.scala    From codepropertygraph   with Apache License 2.0 5 votes vote down vote up
package io.shiftleft.codepropertygraph.cpgloading

import java.io.Closeable
import java.nio.file.attribute.BasicFileAttributes
import java.nio.file.{FileSystem, FileSystems, FileVisitResult, Files, Path, Paths, SimpleFileVisitor}
import java.util.{Collection => JCollection}

import scala.collection.mutable.ArrayBuffer
import scala.jdk.CollectionConverters._

class ZipArchive(inputFile: String) extends Closeable {
  private val zipFileSystem: FileSystem = FileSystems.newFileSystem(Paths.get(inputFile), null)

  private def root: Path = zipFileSystem.getRootDirectories.iterator.next

  private def walk(rootPath: Path): Seq[Path] = {
    val entries = ArrayBuffer[Path]()
    Files.walkFileTree(
      rootPath,
      new SimpleFileVisitor[Path]() {
        override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = {
          if (attrs.isRegularFile)
            entries += file
          FileVisitResult.CONTINUE
        }
      }
    )
    entries.toSeq
  }

  def entries: Seq[Path] = walk(root)

  def getFileEntries: JCollection[Path] = entries.asJava

  override def close(): Unit = zipFileSystem.close()
} 
Example 147
Source File: AmmoniteExecutorTest.scala    From codepropertygraph   with Apache License 2.0 5 votes vote down vote up
package io.shiftleft.console.scripting

import org.scalatest.{Matchers, WordSpec}

import io.shiftleft.codepropertygraph.Cpg

import java.nio.file.{Path, Paths}

class AmmoniteExecutorTest extends WordSpec with Matchers {
  private object TestAmmoniteExecutor extends AmmoniteExecutor {
    override protected def predef: String =
      """
        |import io.shiftleft.semanticcpg.language._
        |""".stripMargin
  }

  private def getScriptPath(script: String): Path = {
    val scriptURI = getClass.getClassLoader.getResource(script).toURI
    Paths.get(scriptURI)
  }

  private def withExecutor[T](f: AmmoniteExecutor => T): T = {
    f(TestAmmoniteExecutor)
  }

  "An AmmoniteExecutor" should {
    "execute a single script with an implicit cpg in scope" in withExecutor { executor =>
      val script = getScriptPath("scripts/general/list-funcs.sc")

      executor.runScript(script, Map.empty, Cpg.emptyCpg).unsafeRunSync() shouldBe List()
    }

    "execute multiple scripts" in withExecutor { executor =>
      val script = getScriptPath("scripts/general/list-funcs.sc")
      val secondScript = getScriptPath("scripts/java/list-sl-ns.sc")

      executor.runScripts(List(script, secondScript), Map.empty, Cpg.emptyCpg).unsafeRunSync() shouldBe
        List(List(), List())
    }

    "return a failure if the script can not be found" in withExecutor { executor =>
      val script = Paths.get("/", "tmp", "cake.sc")

      val ex = intercept[RuntimeException] {
        executor.runScript(script, Map.empty, Cpg.emptyCpg).unsafeRunSync()
      }

      ex.getMessage shouldBe "Script file not found: /tmp/cake.sc"
    }

    "propagate any exceptions thrown by a script" in withExecutor { executor =>
      val script = getScriptPath("scripts/general/divide_by_zero.sc")

      intercept[ArithmeticException] {
        executor.runScript(script, Map.empty, Cpg.emptyCpg).unsafeRunSync()
      }
    }

    "run a string query" in withExecutor { executor =>
      val query = "cpg.method.l"

      executor.runQuery(query, Cpg.emptyCpg).unsafeRunSync() shouldBe List()
    }

    "propagate errors if the string query fails" in withExecutor { executor =>
      val query = "cake"

      intercept[RuntimeException] {
        executor.runQuery(query, Cpg.emptyCpg).unsafeRunSync()
      }
    }
  }
} 
Example 148
Source File: CompressionSpec.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.ahc.io.it

import java.nio.file.Paths

import com.github.fsanaulla.chronicler.ahc.io.InfluxIO
import com.github.fsanaulla.chronicler.ahc.management.InfluxMng
import com.github.fsanaulla.chronicler.ahc.shared.Uri
import com.github.fsanaulla.chronicler.core.alias.Id
import com.github.fsanaulla.chronicler.core.api.DatabaseApi
import com.github.fsanaulla.chronicler.testing.it.DockerizedInfluxDB
import org.asynchttpclient.Response
import org.scalatest.concurrent.{Eventually, IntegrationPatience, ScalaFutures}
import org.scalatest.{FlatSpec, Matchers}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class CompressionSpec
  extends FlatSpec
  with Matchers
  with DockerizedInfluxDB
  with ScalaFutures
  with Eventually
  with IntegrationPatience {

  override def afterAll(): Unit = {
    mng.close()
    io.close()
    super.afterAll()
  }

  val testDB = "db"

  lazy val mng =
    InfluxMng(host, port, Some(creds), None)

  lazy val io =
    InfluxIO(host, port, Some(creds), compress = true)

  lazy val db: DatabaseApi[Future, Id, Response, Uri, String] =
    io.database(testDB)

  it should "ping database" in {
    eventually {
      io.ping.futureValue.right.get.version shouldEqual version
    }
  }

  it should "write data from file" in {
    mng.createDatabase(testDB).futureValue.right.get shouldEqual 200

    db.writeFromFile(Paths.get(getClass.getResource("/large_batch.txt").getPath))
      .futureValue
      .right
      .get shouldEqual 204

    db.readJson("SELECT * FROM test1").futureValue.right.get.length shouldEqual 10000
  }
} 
Example 149
Source File: CompressionSpec.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.urlhttp

import java.nio.file.Paths

import com.github.fsanaulla.chronicler.testing.it.DockerizedInfluxDB
import com.github.fsanaulla.chronicler.urlhttp.io.{InfluxIO, UrlIOClient}
import com.github.fsanaulla.chronicler.urlhttp.management.{InfluxMng, UrlManagementClient}
import org.scalatest.concurrent.{Eventually, IntegrationPatience}
import org.scalatest.{FlatSpec, Matchers}

class CompressionSpec
  extends FlatSpec
  with Matchers
  with DockerizedInfluxDB
  with Eventually
  with IntegrationPatience {

  override def afterAll(): Unit = {
    mng.close()
    io.close()
    super.afterAll()
  }

  val testDB = "db"

  lazy val mng: UrlManagementClient =
    InfluxMng(s"http://$host", port, Some(creds))

  lazy val io: UrlIOClient =
    InfluxIO(s"http://$host", port, Some(creds), compress = true)

  lazy val db: io.Database = io.database(testDB)

  it should "ping database" in {
    eventually {
      io.ping.get.right.get.version shouldEqual version
    }
  }

  it should "write data from file" in {
    mng.createDatabase(testDB).get.right.get shouldEqual 200

    db.writeFromFile(Paths.get(getClass.getResource("/large_batch.txt").getPath))
      .get
      .right
      .get shouldEqual 204

    db.readJson("SELECT * FROM test1").get.right.get.length shouldEqual 10000
  }
} 
Example 150
Source File: CompressionSpec.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.akka

import java.nio.file.Paths

import akka.actor.ActorSystem
import akka.testkit.TestKit
import com.github.fsanaulla.chronicler.akka.io.{AkkaDatabaseApi, InfluxIO}
import com.github.fsanaulla.chronicler.akka.management.InfluxMng
import com.github.fsanaulla.chronicler.akka.shared.InfluxConfig
import com.github.fsanaulla.chronicler.testing.it.DockerizedInfluxDB
import org.scalatest.concurrent.{IntegrationPatience, ScalaFutures}
import org.scalatest.{FlatSpecLike, Matchers}

import scala.concurrent.ExecutionContextExecutor

class CompressionSpec
  extends TestKit(ActorSystem())
  with FlatSpecLike
  with Matchers
  with DockerizedInfluxDB
  with ScalaFutures
  with IntegrationPatience {

  override def afterAll(): Unit = {
    mng.close()
    io.close()
    TestKit.shutdownActorSystem(system)
    super.afterAll()
  }

  implicit val ec: ExecutionContextExecutor = system.dispatcher

  val testDB = "db"

  lazy val influxConf =
    InfluxConfig(host, port, credentials = Some(creds), compress = true)

  lazy val mng =
    InfluxMng(host, port, credentials = Some(creds))

  lazy val io =
    InfluxIO(influxConf)

  lazy val db: AkkaDatabaseApi = io.database(testDB)

  it should "write data from file" in {
    mng.createDatabase(testDB).futureValue.right.get shouldEqual 200

    db.writeFromFile(Paths.get(getClass.getResource("/large_batch.txt").getPath))
      .futureValue
      .right
      .get shouldEqual 204

    db.readJson("SELECT * FROM test1").futureValue.right.get.length shouldEqual 10000
  }
} 
Example 151
Source File: Example2.scala    From tfModelServing4s   with MIT License 5 votes vote down vote up
package org.tfModelServing4s
package examples

import scala.util.Try
import tf._
import tf.implicits._
import dsl._
import utils._
import org.tfModelServing4s.utils.show._
import org.tfModelServing4s.utils.show.implicits._
import java.nio.file.{Files, Paths}

object Example2 {

  private def probsToClass(probs: Array[Float]): String = {
    val classes = io.Source.fromInputStream(getClass.getResourceAsStream("/breeds.csv")).getLines().drop(1).toArray
    val top5 = probs.zip(classes).sortBy { case (prob, idx) => prob }.reverse.take(5)

    top5.mkString("\n")
  }

  def main(args: Array[String]): Unit = {

    val imagePath = args(0)
    val serving = new TFModelServing

    val progr = for {
      _ <- use(serving.load(FileModelSource("/tmp/dogs_1"), tag = "serve")) { model =>
        for {
          meta        <- serving.metadata(model)
          _           =  println(s"model metadata: $meta")
          signature   <- Try { meta.signatures("serving_default") }
          _           =  println(s"serving signature: $signature")
          _           =  println(s"serving signature inputs: ${signature.inputs}")
          _           =  println(s"serving signature outputs: ${signature.outputs}")

          inputArray  <- Try { Array.range(0, 6).map(_.toFloat) }
          _           =  println(s"input array = ${shows(inputArray)}")

          img         <- Try {
            Files.readAllBytes(Paths.get(imagePath))
          }
          _           <- use(serving.tensor(img, shape = List(1))) { inputTensor =>
            for {
              inputDef    <- Try { signature.inputs("image_raw") }
              outputDef   <- Try { signature.outputs("probs") }
              outputArray <- serving.eval[Array[Array[Float]]](model, outputDef, Map(inputDef -> inputTensor))
              _           =  println(s"output: ${shows(outputArray)}")
              clazz       <- Try { probsToClass(outputArray.flatten) }
              _           = println(clazz)
            } yield ()
          }

        } yield ()
      }
    } yield ()

    println(s"Program result = $progr")

  }

} 
Example 152
Source File: MultipartFormDataWritable.scala    From play-swagger   with MIT License 5 votes vote down vote up
package de.zalando.play.controllers

import java.nio.file.{Files, Paths}

import play.api.http.{HeaderNames, Writeable}
import play.api.libs.Files.TemporaryFile
import play.api.mvc.MultipartFormData.FilePart
import play.api.mvc.{Codec, MultipartFormData}


object MultipartFormDataWritable {
  import scala.concurrent.ExecutionContext.Implicits.global

  val boundary = "--------ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890"

  def formatDataParts(data: Map[String, Seq[String]]): Array[Byte] = {
    val dataParts = data.flatMap { case (key, values) =>
      values.map { value =>
        val name = s""""$key""""
        s"--$boundary\r\n${HeaderNames.CONTENT_DISPOSITION}: form-data; name=$name\r\n\r\n$value\r\n"
      }
    }.mkString("")
    Codec.utf_8.encode(dataParts)
  }

  def filePartHeader(file: FilePart[TemporaryFile]): Array[Byte] = {
    val name = s""""${file.key}""""
    val filename = s""""${file.filename}""""
    val contentType = file.contentType.map { ct =>
      s"${HeaderNames.CONTENT_TYPE}: $ct\r\n"
    }.getOrElse("")
    Codec.utf_8.encode(s"--$boundary\r\n${HeaderNames.CONTENT_DISPOSITION}: form-data; name=$name; filename=$filename\r\n$contentType\r\n")
  }

  val singleton = Writeable[MultipartFormData[TemporaryFile]](
    transform = { form: MultipartFormData[TemporaryFile] =>
      formatDataParts(form.dataParts) ++
        form.files.flatMap { file =>
          val fileBytes = Files.readAllBytes(Paths.get(file.ref.file.getAbsolutePath))
          filePartHeader(file) ++ fileBytes ++ Codec.utf_8.encode("\r\n")
        } ++
        Codec.utf_8.encode(s"--$boundary--")
    },
    contentType = Some(s"multipart/form-data; boundary=$boundary")
  )
} 
Example 153
Source File: DependencyNode.scala    From cuesheet   with Apache License 2.0 5 votes vote down vote up
package com.kakao.cuesheet.deps

import java.io.{BufferedOutputStream, File, FileOutputStream, IOException}
import java.net.{URL, URLDecoder}
import java.nio.file.{Files, Paths}
import java.util.zip.{ZipEntry, ZipOutputStream}

import com.kakao.mango.io.FileSystems
import com.kakao.mango.logging.Logging
import com.kakao.shaded.guava.io.Files.createTempDir

sealed trait DependencyNode {
  def path: String
}

case class ManagedDependency(group: String, artifact: String, classifier: String = "jar")

case class ManagedDependencyNode(
  path: String,
  group: String,
  artifact: String,
  classifier: String,
  version: String,
  children: Seq[ManagedDependency]
) extends DependencyNode {
  def key = ManagedDependency(group, artifact, classifier)
}

case class DirectoryDependencyNode(path: String) extends DependencyNode with Logging {
  lazy val compressed: UnmanagedDependencyNode = {
    val tmpdir = createTempDir()
    val jar = new File(s"${tmpdir.getAbsolutePath}/local-${tmpdir.getName}.jar")
    val root = Paths.get(path)

    val output = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(jar)))
    var count = 0

    FileSystems.entries(root).foreach { path =>
      if (resourceExtensions.exists(path.toString.endsWith)) {
        val entry = new ZipEntry(root.relativize(path).toString)
        output.putNextEntry(entry)
        try {
          Files.copy(path, output)
          count += 1
        } catch {
          case e: IOException => logger.warn(s"skipping $path due to an IOException: ${e.getMessage}")
        }
        output.closeEntry()
      }
    }

    output.close()

    logger.debug(s"Successfully zipped $count files in $path into $jar")

    UnmanagedDependencyNode(jar.getAbsolutePath)
  }
}

case class JavaRuntimeDependencyNode(path: String) extends DependencyNode
case class UnmanagedDependencyNode(path: String) extends DependencyNode

object DependencyNode {

  val resolver = new ChainedArtifactResolver(
    new IvyPathArtifactResolver,
    new IvyOriginalPathArtifactResolver,
    new MavenPathArtifactResolver,
    new GradlePathArtifactResolver,
    new JavaRuntimeResolver,
    new MavenMetadataArtifactResolver,
    new UnmanagedJarResolver
  )

  def resolve(url: URL): DependencyNode = {
    if (url.getProtocol != "file") {
      throw new IllegalArgumentException("non-file dependency is not supported")
    }

    val path = URLDecoder.decode(url.getFile, "UTF-8")
    val file = new File(path)
    if (file.isDirectory) {
      return DirectoryDependencyNode(file.getAbsolutePath)
    }

    if (!file.isFile || !file.canRead) {
      throw new IllegalArgumentException(s"$path is not a file or readable")
    }

    DependencyNode.resolver.resolve(file.getAbsolutePath) match {
      case Some(node) => node
      case None => throw new IllegalArgumentException(s"Could not determine the dependency of $path")
    }
  }
} 
Example 154
Source File: CueSheetVersion.scala    From cuesheet   with Apache License 2.0 5 votes vote down vote up
package com.kakao.cuesheet

import java.nio.file.{Files, Paths}

import com.kakao.cuesheet.deps.{DependencyAnalyzer, ManagedDependencyNode}
import com.kakao.mango.logging.Logging

import scala.collection.JavaConversions._
import scala.io.Source
import scala.util.Try


object CueSheetVersion extends Logging {
  private val versionPattern = """[^"]*"([^"]+)".*""".r

  lazy val version: String = {
    // read from MANIFEST.MF
    getClass.getClassLoader.getResources("META-INF/MANIFEST.MF").toSeq.flatMap { url =>
      val src = Source.fromInputStream(url.openStream())
      try {
        val manifest = src.getLines().map(_.split(":", 2)).collect {
          case Array(key, value) => (key.trim(), value.trim())
        }.toMap
        (manifest.get("Implementation-Vendor"), manifest.get("Implementation-Title")) match {
          case (Some("com.kakao.cuesheet"), Some("cuesheet")) => manifest.get("Implementation-Version")
          case (Some("com.kakao.cuesheet"), Some("cuesheet-assembly")) => manifest.get("Implementation-Version")
          case _ => Nil
        }
      } finally {
        src.close()
      }
    }.headOption.orElse {
      val (_, applicationJars) = DependencyAnalyzer().graph.divide()
      applicationJars.collectFirst {
        case jar: ManagedDependencyNode if jar.artifact.startsWith("cuesheet") => jar.version
      }
    }.orElse {
      Try(Files.readAllBytes(Paths.get("version.sbt"))).map { bytes =>

      }.toOption
      Try(Source.fromFile("version.sbt")).map { src =>
        // try to read from version.sbt
        try {
          src.getLines().collectFirst {
            case versionPattern(v) => v
          }.head
        } finally {
          src.close()
        }
      }.toOption
    }.getOrElse("Unknown")
  }

} 
Example 155
Source File: Main.scala    From docspell   with GNU General Public License v3.0 5 votes vote down vote up
package docspell.joex

import java.nio.file.{Files, Paths}

import cats.effect._
import cats.implicits._

import docspell.common.{Banner, Pools, ThreadFactories}

import org.log4s._

object Main extends IOApp {
  private[this] val logger = getLogger

  val blockingEC =
    ThreadFactories.cached[IO](ThreadFactories.ofName("docspell-joex-blocking"))
  val connectEC =
    ThreadFactories.fixed[IO](5, ThreadFactories.ofName("docspell-joex-dbconnect"))
  val restserverEC =
    ThreadFactories.workSteal[IO](ThreadFactories.ofNameFJ("docspell-joex-server"))

  def run(args: List[String]) = {
    args match {
      case file :: Nil =>
        val path = Paths.get(file).toAbsolutePath.normalize
        logger.info(s"Using given config file: $path")
        System.setProperty("config.file", file)
      case _ =>
        Option(System.getProperty("config.file")) match {
          case Some(f) if f.nonEmpty =>
            val path = Paths.get(f).toAbsolutePath.normalize
            if (!Files.exists(path)) {
              logger.info(s"Not using config file '$f' because it doesn't exist")
              System.clearProperty("config.file")
            } else
              logger.info(s"Using config file from system properties: $f")
          case _ =>
        }
    }

    val cfg = ConfigFile.loadConfig
    val banner = Banner(
      "JOEX",
      BuildInfo.version,
      BuildInfo.gitHeadCommit,
      cfg.jdbc.url,
      Option(System.getProperty("config.file")),
      cfg.appId,
      cfg.baseUrl
    )
    logger.info(s"\n${banner.render("***>")}")
    val pools = for {
      cec <- connectEC
      bec <- blockingEC
      blocker = Blocker.liftExecutorService(bec)
      rec <- restserverEC
    } yield Pools(cec, bec, blocker, rec)
    pools.use(p =>
      JoexServer
        .stream[IO](cfg, p)
        .compile
        .drain
        .as(ExitCode.Success)
    )
  }
} 
Example 156
Source File: OcrConfig.scala    From docspell   with GNU General Public License v3.0 5 votes vote down vote up
package docspell.extract.ocr

import java.nio.file.{Path, Paths}

import docspell.common._

case class OcrConfig(
    maxImageSize: Int,
    ghostscript: OcrConfig.Ghostscript,
    pageRange: OcrConfig.PageRange,
    unpaper: OcrConfig.Unpaper,
    tesseract: OcrConfig.Tesseract
) {}

object OcrConfig {

  case class PageRange(begin: Int)

  case class Ghostscript(command: SystemCommand.Config, workingDir: Path)

  case class Tesseract(command: SystemCommand.Config)

  case class Unpaper(command: SystemCommand.Config)

  val default = OcrConfig(
    maxImageSize = 3000 * 3000,
    pageRange = PageRange(10),
    ghostscript = Ghostscript(
      SystemCommand.Config(
        "gs",
        Seq(
          "-dNOPAUSE",
          "-dBATCH",
          "-dSAFER",
          "-sDEVICE=tiffscaled8",
          "-sOutputFile={{outfile}}",
          "{{infile}}"
        ),
        Duration.seconds(30)
      ),
      Paths.get(System.getProperty("java.io.tmpdir")).resolve("docspell-extraction")
    ),
    unpaper = Unpaper(
      SystemCommand
        .Config("unpaper", Seq("{{infile}}", "{{outfile}}"), Duration.seconds(30))
    ),
    tesseract = Tesseract(
      SystemCommand
        .Config(
          "tesseract",
          Seq("{{file}}", "stdout", "-l", "{{lang}}"),
          Duration.minutes(1)
        )
    )
  )
} 
Example 157
Source File: Main.scala    From docspell   with GNU General Public License v3.0 5 votes vote down vote up
package docspell.restserver

import java.nio.file.{Files, Paths}

import cats.effect._
import cats.implicits._

import docspell.common.{Banner, Pools, ThreadFactories}

import org.log4s._

object Main extends IOApp {
  private[this] val logger = getLogger

  val blockingEC =
    ThreadFactories.cached[IO](ThreadFactories.ofName("docspell-restserver-blocking"))
  val connectEC =
    ThreadFactories.fixed[IO](5, ThreadFactories.ofName("docspell-dbconnect"))
  val restserverEC =
    ThreadFactories.workSteal[IO](ThreadFactories.ofNameFJ("docspell-restserver"))

  def run(args: List[String]) = {
    args match {
      case file :: Nil =>
        val path = Paths.get(file).toAbsolutePath.normalize
        logger.info(s"Using given config file: $path")
        System.setProperty("config.file", file)
      case _ =>
        Option(System.getProperty("config.file")) match {
          case Some(f) if f.nonEmpty =>
            val path = Paths.get(f).toAbsolutePath.normalize
            if (!Files.exists(path)) {
              logger.info(s"Not using config file '$f' because it doesn't exist")
              System.clearProperty("config.file")
            } else
              logger.info(s"Using config file from system properties: $f")
          case _ =>
        }
    }

    val cfg = ConfigFile.loadConfig
    val banner = Banner(
      "REST Server",
      BuildInfo.version,
      BuildInfo.gitHeadCommit,
      cfg.backend.jdbc.url,
      Option(System.getProperty("config.file")),
      cfg.appId,
      cfg.baseUrl
    )
    val pools = for {
      cec <- connectEC
      bec <- blockingEC
      blocker = Blocker.liftExecutorService(bec)
      rec <- restserverEC
    } yield Pools(cec, bec, blocker, rec)

    logger.info(s"\n${banner.render("***>")}")
    pools.use(p =>
      RestServer
        .stream[IO](cfg, p)
        .compile
        .drain
        .as(ExitCode.Success)
    )
  }
} 
Example 158
Source File: Zip.scala    From docspell   with GNU General Public License v3.0 5 votes vote down vote up
package docspell.files

import java.io.InputStream
import java.nio.file.Paths
import java.util.zip.ZipInputStream

import cats.effect._
import cats.implicits._
import fs2.{Pipe, Stream}

import docspell.common.Binary

object Zip {

  def unzipP[F[_]: ConcurrentEffect: ContextShift](
      chunkSize: Int,
      blocker: Blocker
  ): Pipe[F, Byte, Binary[F]] =
    s => unzip[F](chunkSize, blocker)(s)

  def unzip[F[_]: ConcurrentEffect: ContextShift](chunkSize: Int, blocker: Blocker)(
      data: Stream[F, Byte]
  ): Stream[F, Binary[F]] =
    data.through(fs2.io.toInputStream[F]).flatMap(in => unzipJava(in, chunkSize, blocker))

  def unzipJava[F[_]: Sync: ContextShift](
      in: InputStream,
      chunkSize: Int,
      blocker: Blocker
  ): Stream[F, Binary[F]] = {
    val zin = new ZipInputStream(in)

    val nextEntry = Resource.make(Sync[F].delay(Option(zin.getNextEntry))) {
      case Some(_) => Sync[F].delay(zin.closeEntry())
      case None    => ().pure[F]
    }

    Stream
      .resource(nextEntry)
      .repeat
      .unNoneTerminate
      .map { ze =>
        val name = Paths.get(ze.getName()).getFileName.toString
        val data =
          fs2.io.readInputStream[F]((zin: InputStream).pure[F], chunkSize, blocker, false)
        Binary(name, data)
      }
  }
} 
Example 159
Source File: Calculator.scala    From sbt-docker-compose   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package example

import java.nio.file.{Files, Path, Paths}

import scala.collection.JavaConverters._

object Calculator {

  def main(args: Array[String]): Unit = {
    args.toList match {
      case Nil => println(s"Usage: Expected either two ints or a file path")
      case List(filePath) =>
        println(apply(Paths.get(filePath)))
      case List(a, b) =>
        println(add(a.toInt, b.toInt))
      case err => println(s"Usage: Expected either two ints or a file path, but got $err")
    }
  }

  def add(x: Int, y: Int): Int = x + y

  def subtract(x: Int, y: Int): Int = x - y

  val PlusR = """(\d+)\s*\+\s*(\d+)""".r
  val MinusR = """(\d+)\s*-\s*(\d+)""".r

  
  def apply(input: Path): Int = {
    Files.readAllLines(input).asScala.headOption match {
      case Some(PlusR(a, b)) => add(a.toInt, b.toInt)
      case Some(MinusR(a, b)) => subtract(a.toInt, b.toInt)
      case _ => sys.error("Whacha talkin' bout, willis?")
    }
  }

} 
Example 160
Source File: IntegrationTests.scala    From scala-typed-holes   with Apache License 2.0 5 votes vote down vote up
package holes

import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Path, Paths}

import org.apache.commons.io.FileUtils
import org.scalatest.{BeforeAndAfterAll, FunSpec}

import scala.sys.process._

class IntegrationTests extends FunSpec with BeforeAndAfterAll {

  private val pluginJar = sys.props("plugin.jar")
  private val scalacClasspath = sys.props("scalac.classpath")
  private val targetDir = Paths.get("target/integration-tests")

  private def runScalac(args: String*): String = {
    val buf = new StringBuffer
    val logger = new ProcessLogger {
      override def out(s: => String): Unit = { buf.append(s); buf.append('\n') }
      override def err(s: => String): Unit = { buf.append(s); buf.append('\n') }
      override def buffer[T](f: => T): T = f
    }

    Process(
      "java"
        :: "-Dscala.usejavacp=true"
        :: "-cp" :: scalacClasspath
        :: "scala.tools.nsc.Main"
        :: args.toList
    ).!(logger)

    buf.toString
  }

  private def compileFile(path: Path): String =
    runScalac(
      s"-Xplugin:$pluginJar",
      "-P:typed-holes:log-level:info",
      "-d", targetDir.toString,
      path.toString
    )

  override def beforeAll(): Unit = {
    println(runScalac("-version"))

    FileUtils.deleteQuietly(targetDir.toFile)
    Files.createDirectories(targetDir)
  }

  describe("produces the expected output") {
    for (scenario <- Paths.get("src/test/resources").toFile.listFiles().toList.map(_.toPath)) {
      it(scenario.getFileName.toString) {
        val expected =
          new String(Files.readAllBytes(scenario.resolve("expected.txt")), StandardCharsets.UTF_8).trim
        val actual =
          compileFile(scenario.resolve("input.scala")).trim

        if (actual != expected) {
          println("Compiler output:")
          println("=====")
          println(actual)
          println("=====")
        }
        assert(actual === expected)
      }
    }
  }

} 
Example 161
Source File: JniJavah.scala    From sbt-jni   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package ch.jodersky.sbt.jni
package plugins

import java.nio.file.Paths

import collection.JavaConverters._
import util.BytecodeUtil

import sbt._
import sbt.Keys._


object JniJavah extends AutoPlugin {

  override def requires = plugins.JvmPlugin
  override def trigger = allRequirements

  object autoImport {

    val javahClasses = taskKey[Set[String]](
      "Finds fully qualified names of classes containing native declarations."
    )

    val javah = taskKey[File](
      "Generate JNI headers. Returns the directory containing generated headers."
    )

  }
  import autoImport._

  lazy val mainSettings: Seq[Setting[_]] = Seq(

    javahClasses in javah := {
      import xsbti.compile._
      val compiled: CompileAnalysis = (compile in Compile).value
      val classFiles: Set[File] = compiled.readStamps().getAllProductStamps()
        .asScala.keySet.toSet
      val nativeClasses = classFiles flatMap { file =>
        BytecodeUtil.nativeClasses(file)
      }
      nativeClasses
    },

    target in javah := target.value / "native" / "include",

    javah := {
      val out = (target in javah).value

      // fullClasspath can't be used here since it also generates resources. In
      // a project combining JniJavah and JniPackage, we would have a chicken-and-egg
      // problem.
      val jcp: Seq[File] = (dependencyClasspath in Compile).value.map(_.data) ++ {
        (compile in Compile).value; Seq((classDirectory in Compile).value)
      }

      val log = streams.value.log

      val classes = (javahClasses in javah).value
      if (classes.nonEmpty) {
        log.info("Headers will be generated to " + out.getAbsolutePath)
      }

      val task = new ch.jodersky.sbt.jni.javah.JavahTask
      classes.foreach(task.addClass(_))  
      jcp.map(_.toPath).foreach(task.addClassPath(_))
      task.addRuntimeSearchPath()
      task.setOutputDir(Paths.get(out.getAbsolutePath))
      task.run()

      out
    }
  )

  override lazy val projectSettings = mainSettings

} 
Example 162
Source File: WordCountTest.scala    From spark-example-project   with Apache License 2.0 5 votes vote down vote up
package me.soulmachine.spark

import java.io.File
import java.nio.charset.StandardCharsets
import java.nio.file.{Paths, Files}

import org.scalatest._

import scala.io.Source


class WordCountTest extends FlatSpec with Matchers {
  "A WordCount job" should  "count words correctly" in {
    val tempDir = Files.createTempDirectory(null)
    println(tempDir.toAbsolutePath)

    val inputFile = new File(tempDir.toAbsolutePath.toString, "input.txt")
    Files.write(Paths.get(inputFile.getAbsolutePath),
      "hack hack hack and hack".getBytes(StandardCharsets.UTF_8))
    inputFile.deleteOnExit()

    val outputDir = new File(tempDir.toAbsolutePath.toString, "output").getAbsolutePath

    WordCount.execute(
      master = Some("local"),
      input   = inputFile.getAbsolutePath,
      output  = outputDir
    )

    val outputFile = new File(outputDir, "part-00000")
    val actual = Source.fromFile(outputFile, "UTF-8").mkString
    // delete the temporary folder
    new ProcessBuilder("rm","-rf", tempDir.toAbsolutePath.toString).start().waitFor()

    assert(actual === "(hack,4)\n(and,1)\n")
  }
} 
Example 163
Source File: FileChannelSuite.scala    From scalaz-nio   with Apache License 2.0 5 votes vote down vote up
package zio.nio

import java.nio.file.{ Files, Paths, StandardOpenOption }

import zio.{ Chunk, DefaultRuntime }
import testz.{ Harness, assert }
import zio.nio.channels.AsynchronousFileChannel

import scala.io.Source

object FileChannelSuite extends DefaultRuntime {

  def tests[T](harness: Harness[T]): T = {
    import harness._

    section(
      test("asynchronous file read") { () =>
        val path = Paths.get("src/test/resources/async_file_read_test.txt")

        val testProgram = for {
          channel <- AsynchronousFileChannel.open(path, Set(StandardOpenOption.READ))
          buffer  <- Buffer.byte(16)
          _       <- channel.readBuffer(buffer, 0)
          _       <- buffer.flip
          array   <- buffer.array
          text    = array.takeWhile(_ != 10).map(_.toChar).mkString.trim
          _       <- channel.close
        } yield text

        val result = unsafeRun(testProgram)

        assert(result == "Hello World")
      },
      test("asynchronous file write") { () =>
        val path    = Paths.get("src/test/resources/async_file_write_test.txt")
        val options = Set(StandardOpenOption.CREATE, StandardOpenOption.WRITE)

        val testProgram = for {
          channel <- AsynchronousFileChannel.open(path, options)
          buffer  <- Buffer.byte(Chunk.fromArray("Hello World".getBytes))
          _       <- channel.writeBuffer(buffer, 0)
          _       <- channel.close
        } yield ()

        unsafeRun(testProgram)

        val result = Source.fromFile(path.toFile()).getLines.toSeq
        Files.delete(path)

        assert(result.size == 1)
        assert(result.head == "Hello World")
      }
    )
  }

} 
Example 164
Source File: Resources.scala    From hepek   with Apache License 2.0 5 votes vote down vote up
package ba.sake.hepek

import ba.sake.hepek.core.RelativePath
import ba.sake.hepek.path.RelativePathAddons
import java.nio.file.Paths

final case class Resource(fileName: String) extends RelativePath with RelativePathAddons {
  override def relPath = Paths.get(fileName)
}

object Resources extends Resources

trait Resources {
  def siteRootPath: String = "site"

  def resource(fileName: String): Resource =
    Resource(s"$siteRootPath/$fileName")

  // css folder
  object styles {
    def css(baseName: String) = resource(s"styles/$baseName.css")
  }

  // js folder
  object scripts {
    def js(baseName: String) = resource(s"scripts/$baseName.js")
  }

  // images folder
  object images {
    def image(fullName: String) = resource(s"images/$fullName")
    def ico(baseName: String)   = image(baseName + ".ico")
    def svg(baseName: String)   = image(baseName + ".svg")
    def jpg(baseName: String)   = image(baseName + ".jpg")
    def jpeg(baseName: String)  = image(baseName + ".jpeg")
    def png(baseName: String)   = image(baseName + ".png")
    def gif(baseName: String)   = image(baseName + ".gif")
  }

  // lib folder
  object lib {
    def js(baseName: String)    = resource(s"lib/$baseName.js")
    def jsMin(baseName: String) = js(baseName + ".min")

    def css(baseName: String)    = resource(s"lib/$baseName.css")
    def cssMin(baseName: String) = css(baseName + ".min")
  }
} 
Example 165
Source File: PackageRelativePath.scala    From hepek   with Apache License 2.0 5 votes vote down vote up
package ba.sake.hepek.path

import java.nio.file.Paths
import ba.sake.hepek.core.RelativePath


  def fileName: String

  override def relPath = {
    val path = if (this.getClass.getPackage == null) {
      fileName
    } else {
      this.getClass.getPackage.getName.replaceAll("\\.", "/") + "/" + fileName
    }
    Paths.get(path)
  }
} 
Example 166
Source File: PackageRelativePathTest.scala    From hepek   with Apache License 2.0 5 votes vote down vote up
package ba.sake.hepek.path

import java.nio.file.Paths

import org.scalatest.{FlatSpec, Matchers}

class PackageRelativePathTest extends FlatSpec with Matchers {
  "PackageRelativePath" should "make path based on package" in {
    val obj = PackageRelativeObject1
    obj.relPath shouldBe Paths.get("ba/sake/hepek/path/abc.txt")
  }
  it should "make path based on inner package" in {
    val obj = testme.PackageRelativeObject2
    obj.relPath shouldBe Paths.get("ba/sake/hepek/path/testme/index.html")
  }
}

object PackageRelativeObject1 extends PackageRelativePath {
  override def fileName = "abc.txt"
}

package object testme {

  object PackageRelativeObject2 extends PackageRelativePath {
    override def fileName = "index.html"
  }
} 
Example 167
Source File: ClassPackageRelativePathTest.scala    From hepek   with Apache License 2.0 5 votes vote down vote up
package ba.sake.hepek.path

import java.nio.file.Paths
import org.scalatest.{FlatSpec, Matchers}

class ClassPackageRelativePathTest extends FlatSpec with Matchers {
  "ClassPackageRelativePath" should "make path based on class and package" in {
    val obj = ClassPackageObject1
    obj.relPath shouldBe Paths.get("ba/sake/hepek/path/class-package-object1.html")
  }
  it should "respect custom extension" in {
    val obj = CustomExtensionObject1
    obj.relPath shouldBe Paths.get("ba/sake/hepek/path/custom-extension-object1.xml")
  }
}

object ClassPackageObject1 extends ClassPackageRelativePath

object CustomExtensionObject1 extends ClassPackageRelativePath {
  override def fileExtension = "xml"
} 
Example 168
Source File: MultiPage.scala    From hepek   with Apache License 2.0 5 votes vote down vote up
package docs.hepek

import scalatags.Text.all._
import utils.Imports.Bundle._
import utils.Imports._
import utils._

object MultiPage extends HepekDocsPage {

  override def pageSettings =
    super.pageSettings.withTitle("Multi page").withDescription("Headless CMS, blog, generator")

  override def blogSettings = super.blogSettings.withSections(
    multiPageSection
  )

  val multiPageSection = Section(
    "Multi-page",
    frag(
      s"""
        The abstraction of `StaticPage` maps nicely to a file.  
        This is a `1:1` relation.  
        But sometimes we need more power. We'd like to render **multiple files** based on one template.  
        This template is just a list of `Renderable`s.

        Here's a very simple example:
      """.md,
      chl.scala("""
        import java.nio.file.Paths
        import scalatags.Text.all._
        import ba.sake.hepek.path.ScalaMultiRenderable
        import examples.Imports._
        
        object MyPages extends ScalaMultiRenderable {
          override def rends =
            List(1, 2, 3) map MyPage
        }
        
        case class MyPage(num: Int) extends StaticPage {
          override def relPath =
            Paths.get(s"pages/page-$num.html")
        
          override def pageContent =
            div(s"This is page number $num")
        }
      """),
      s"""
        This will render 3 pages with paths "pages/page-1.html", "pages/page-2.html" and "pages/page-3.html"
        and their corresponding contents.

        It's a very powerful and nice abstraction.  
        You can browse through an [example of using Wordpress API](${links.WpExampleUrl})
        for rendering a static site.
        
        It makes Hepek SSG a powerful tool for Headles CMS-es. :)  
        Especially since you can use Scala!
      """.md
    )
  )
} 
Example 169
Source File: HepekSeleniumTest.scala    From hepek   with Apache License 2.0 5 votes vote down vote up
package ba.sake.hepek.selenium

import java.nio.file.Paths
import org.scalatest._
import org.scalatest.concurrent.Eventually
import org.scalatest.selenium.HtmlUnit
import ba.sake.hepek.core.Renderable

trait HepekSeleniumTest extends FlatSpec with Matchers with HtmlUnit with Eventually {
  java.util.logging.Logger
    .getLogger("com.gargoylesoftware.htmlunit")
    .setLevel(java.util.logging.Level.OFF) // disable annoying HtmlUnit warnings

  val basePath = "hepek-tests/target/web/public/main/"

  def filePath(renderable: Renderable): String = {
    val pagePath = basePath + renderable.relPath
    val path     = Paths.get(pagePath)
    path.toUri.toString
  }

  def getByCss(selector: String): Option[Element] =
    find(cssSelector(selector))
} 
Example 170
Source File: Release.scala    From ionroller   with MIT License 5 votes vote down vote up
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, File}
import java.nio.file.{Files, Paths}

import com.amazonaws.services.s3.model._
import com.amazonaws.services.s3.transfer.Transfer.TransferState
import com.amazonaws.services.s3.transfer.TransferManager
import com.amazonaws.util.IOUtils
import sbt._

import scalaz.concurrent.Task

object Release {

  lazy val releaseCli = taskKey[Unit]("Releases ION-Roller CLI")

  def release(ver: String, zip: File, install: File) = {
    val files = Seq(
      (install.getName, replaceVersionAndReadBytes(ver, install), "text/plain"),
      (zip.getName, readBytes(zip), "application/zip"))
    val tx = new TransferManager
    val tasks = for {
      f <- files
    } yield uploadFile(tx, f._1, f._2, f._3)
    val t = for {
      results <- Task.gatherUnordered(tasks)
      finalResult = if (results.forall(_ == TransferState.Completed)) TransferState.Completed else TransferState.Failed
      printTask <- Task.delay(println(finalResult))
    } yield printTask
    t.run
  }

  def uploadFile(tx: TransferManager, name: String, getBytes: Task[Array[Byte]], contentType: String): Task[TransferState] = {
    for {
      bytes <- getBytes
      meta <- metadata(bytes, contentType)
      transferState <- upload(tx, bytes, name, meta)
    } yield transferState
  }

  def metadata(bytes: Array[Byte], contentType: String): Task[ObjectMetadata] = {
    Task.delay({
      val out = new ByteArrayOutputStream
      out.write(bytes)
      val metadata = new ObjectMetadata
      metadata.setContentType(contentType)
      val contentBytes = IOUtils.toByteArray(new ByteArrayInputStream(out.toByteArray)).length.toLong
      // we need to call new ByteArrayInputStream again, as checking the length reads the stream
      metadata.setContentLength(contentBytes)
      metadata
    })
  }

  def upload(tx: TransferManager, in: Array[Byte], name: String, meta: ObjectMetadata): Task[TransferState] = {
    Task.delay({
      println(s"Uploading $name...")
      val upload = tx.upload(
        new PutObjectRequest("ionroller-cli", name, new ByteArrayInputStream(in), meta)
          .withCannedAcl(CannedAccessControlList.PublicRead)
      )
      while (!upload.isDone) {
        Thread.sleep(2000)
        println(upload.getProgress.getPercentTransferred.toInt + "%")
      }
      upload.getState
    })
  }

  def replaceVersionAndReadBytes(ver: String, file: File): Task[Array[Byte]] = {
    Task.delay({
      scala.io.Source.fromFile(file).getLines()
        .map(in => if (in startsWith "VERSION=") s"VERSION=$ver" else in)
        .mkString("\n")
        .getBytes
        .toSeq
        .toArray
    })
  }

  def readBytes(file: File): Task[Array[Byte]] = Task.delay({
    Files.readAllBytes(Paths.get(file.getAbsolutePath))
  })

} 
Example 171
Source File: TestsDefaults.scala    From renku   with Apache License 2.0 5 votes vote down vote up
package ch.renku.acceptancetests.tooling

final case class TestsDefaults(env:                 Option[String],
                               email:               Option[String] = None,
                               username:            Option[String] = None,
                               password:            Option[String] = None,
                               fullname:            Option[String] = None,
                               renkuVersion:        String,
                               renkuInstallCommand: String)

object TestsDefaults {
  import java.nio.file.Paths

  import pureconfig.ConfigSource
  import pureconfig.generic.auto._

  private val defaultConfigFileName = "./tests-defaults.conf"

  def apply(): TestsDefaults =
    if (Paths.get(defaultConfigFileName).toFile.exists())
      ConfigSource
        .file(defaultConfigFileName)
        .load[TestsDefaults]
        .fold(error => throw new Exception(error.prettyPrint()), identity)
    else
      TestsDefaults(
        env                 = Some("https://dev.renku.ch"),
        renkuVersion        = "0.10.4",
        renkuInstallCommand = "python3 -m pip install 'renku==%s'"
      )
} 
Example 172
Source File: package.scala    From renku   with Apache License 2.0 5 votes vote down vote up
package ch.renku.acceptancetests.tooling

import java.nio.file.Files.createDirectory
import java.nio.file.{Path, Paths}
import java.time.LocalDateTime.now
import java.time.format.DateTimeFormatter

import ch.renku.acceptancetests.model.users.UserCredentials

package object console {

  
  def %%>(command: Command)(implicit workPath: Path, userCredentials: UserCredentials): String =
    new CommandExecutor(command).safeExecute

  val rootWorkDirectory: Path = Paths.get("target")

  def createTempFolder: Path = {
    val timestampPattern = DateTimeFormatter.ofPattern("yyyy_MM_dd_HHmm_ss")
    val folder           = rootWorkDirectory.toUri resolve (now format timestampPattern)
    createDirectory(Paths.get(folder))
  }

  implicit class CommandOps(val context: StringContext) {

    def c(args: Any*): Command = Command {
      context.parts.zipAll(args, "", "").foldLeft("") {
        case (command, (part, arg)) => s"$command$part$arg"
      }
    }
  }
} 
Example 173
Source File: ScreenCapturingSpec.scala    From renku   with Apache License 2.0 5 votes vote down vote up
package ch.renku.acceptancetests.tooling

import java.io.File
import java.nio.file.Paths
import java.time.LocalDateTime.now
import java.time.format.DateTimeFormatter.ofPattern

import org.scalatest.{Outcome, TestSuite}
import org.scalatestplus.selenium.{Driver, WebBrowser}

trait ScreenCapturingSpec extends ScreenCapturing {
  this: AcceptanceSpec =>

  override def withFixture(test: NoArgTest): Outcome = {
    val outcome = test()

    if (outcome.isExceptional) {
      saveScreenshot
    }
    outcome
  }
} 
Example 174
Source File: scrimage.scala    From scastie   with Apache License 2.0 5 votes vote down vote up
// scrimage-core, scrimage-filters

import com.sksamuel.scrimage._, filter._
import java.io.{File, FileInputStream}
import java.net.URL
import java.nio.file.{Files, Paths}
import scala.util.Try

// Download image to cache
val dest = Paths.get("/tmp/scastie/lanzarote.jpg")
if (!Files.exists(dest)) {
  Files.createDirectories(dest.getParent)
  val url = new URL("https://github.com/sksamuel/scrimage/blob/master/scrimage-core/src/test/resources/lanzarote.jpg?raw=true")
  Try(url.openStream()).foreach(src => Files.copy(src, dest))
}
val image = Image.fromStream(new FileInputStream(new File("/tmp/scastie/lanzarote.jpg")))
val small = image.scaleToWidth(200)


toBase64(small)

toBase64(small.filter(SepiaFilter)) 
Example 175
Source File: AbstractProjectTestRunner.scala    From ScalaClean   with Apache License 2.0 5 votes vote down vote up
package scalaclean.cli

import java.io.File
import java.nio.file.Paths

import scalaclean.model.ProjectModel
import scalaclean.rules.AbstractRule
import scalafix.testkit.DiffAssertions

abstract class AbstractProjectTestRunner(
  val projectNames: List[String], overwriteTargetFiles: Boolean) extends DiffAssertions {

  def taskName: String

  def createModelTaskFn(propsFiles: Seq[File], debug: Boolean): ProjectModel => AbstractRule

  def run(): Boolean = {

    // sbt and intellij have different ideas about the base directory for running tests.
    // so try both options
    val propsFiles = projectNames.map { projectName =>
      val srcDir = Paths.get(s"../testProjects/$projectName/target/scala-2.12/classes/META-INF/ScalaClean/").toAbsolutePath
      val f1 = srcDir.resolve(s"ScalaClean.properties").toFile
      if (f1.exists()) {
        f1
      } else {
        val srcDir = Paths.get(s"testProjects/$projectName/target/scala-2.12/classes/META-INF/ScalaClean/").toAbsolutePath
        val f1 = srcDir.resolve(s"ScalaClean.properties").toFile
        f1
      }

    }

    val options = SCOptions(taskName, debug = true, validate = true, replace = overwriteTargetFiles, propsFiles)
    val main = new ScalaCleanMain(options, createModelTaskFn(propsFiles, options.debug))
    !main.run() || overwriteTargetFiles
  }
} 
Example 176
Source File: Project.scala    From ScalaClean   with Apache License 2.0 5 votes vote down vote up
package scalaclean.model.impl

import scalaclean.model._
import java.io.File
import java.net.{URL, URLClassLoader}
import java.nio.file.{Files, Path, Paths}
import java.util.Properties
import java.util.concurrent.ConcurrentHashMap

import scalafix.v1.SymbolInformation

import scala.meta.internal.symtab.{GlobalSymbolTable, SymbolTable}
import scala.meta.io.{AbsolutePath, Classpath}

object Project {

  import org.scalaclean.analysis.PropertyNames._

  def apply(propsPath: Path, projects: ProjectSet): Project = {
    val props = new Properties()
    println("PropsPath = " + propsPath)
    props.load(Files.newBufferedReader(propsPath))
    val classpathValue = props.getProperty(prop_classpath)
    val outputPath = props.getProperty(prop_outputDir)
    val elementsFilePath = props.getProperty(prop_elementsFile)
    val relationshipsFilePath = props.getProperty(prop_relationshipsFile)
    val extensionsFilePath = props.getProperty(prop_extensionsFile)
    val src = props.getProperty(prop_src)
    val srcBuildBase = props.getProperty(prop_srcBuildBase)
    val srcFiles = props.getProperty(prop_srcFiles, "").split(File.pathSeparatorChar).toSet
    val srcRoots = props.getProperty(prop_srcRoots).split(File.pathSeparatorChar).toList.sortWith((s1, s2) => s1.length > s1.length || s1 < s2).map(AbsolutePath(_))
    println("srcRoots = " + srcRoots)
    assert(classpathValue ne null, props.keys)
    assert(outputPath ne null, props.keys)

    val classPath = Classpath.apply(classpathValue)

    new Project(projects, classPath, outputPath, src, srcRoots, srcBuildBase, elementsFilePath, relationshipsFilePath, extensionsFilePath, srcFiles)
  }
}

class Project private(
                       val projects: ProjectSet, val classPath: Classpath, val outputPath: String, val src: String, val srcRoots: List[AbsolutePath], val srcBuildBase: String,
                       elementsFilePath: String, relationshipsFilePath: String, extensionsFilePath: String,
                       val srcFiles: Set[String]) {
  def symbolTable: SymbolTable = GlobalSymbolTable(classPath, includeJdk = true)

  lazy val classloader: ClassLoader = new URLClassLoader(Array(new URL("file:" + outputPath + "/")), null)

  private val infos = new ConcurrentHashMap[LegacyElementId, SymbolInformation]()

  def symbolInfo(viewedFrom: ElementModelImpl, symbol: LegacyElementId): SymbolInformation = {
    infos.computeIfAbsent(symbol,
      s => //any doc in the project would do though
        viewedFrom.source.doc.info(s.symbol).orNull)
  }

  def read: (Vector[ElementModelImpl], BasicRelationshipInfo) = ModelReader.read(this, elementsFilePath, relationshipsFilePath, extensionsFilePath)

  private val sourcesMap = new ConcurrentHashMap[String, SourceData]()

  def source(name: String): SourceData = {
    sourcesMap.computeIfAbsent(name, p => SourceData(this, Paths.get(p)))
  }

} 
Example 177
Source File: Settings.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.service.config

import java.nio.file.{Path, Paths}

import akka.actor.{ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider}
import akka.http.scaladsl.model.Uri
import ch.epfl.bluebrain.nexus.iam.auth.AccessToken
import ch.epfl.bluebrain.nexus.iam.types.Permission
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri
import ch.epfl.bluebrain.nexus.rdf.implicits._
import com.typesafe.config.Config
import pureconfig.generic.auto._
import pureconfig.ConvertHelpers.{catchReadError, optF}
import pureconfig.{ConfigConvert, ConfigSource}

import scala.annotation.nowarn


@SuppressWarnings(Array("LooksLikeInterpolatedString"))
class Settings(config: Config) extends Extension {

  @nowarn("cat=unused")
  implicit private val uriConverter: ConfigConvert[Uri] =
    ConfigConvert.viaString[Uri](catchReadError(Uri(_)), _.toString)

  @nowarn("cat=unused")
  implicit private val permissionConverter: ConfigConvert[Permission] =
    ConfigConvert.viaString[Permission](optF(Permission(_)), _.toString)

  @nowarn("cat=unused")
  implicit val absoluteIriConverter: ConfigConvert[AbsoluteIri] =
    ConfigConvert.viaString[AbsoluteIri](catchReadError(s => url"$s"), _.toString)

  @nowarn("cat=unused")
  implicit private val pathConverter: ConfigConvert[Path] =
    ConfigConvert.viaString[Path](catchReadError(s => Paths.get(s)), _.toString)

  @nowarn("cat=unused")
  implicit private val authTokenConverter: ConfigConvert[AccessToken] =
    ConfigConvert.viaString[AccessToken](catchReadError(s => AccessToken(s)), _.value)

  val serviceConfig: ServiceConfig =
    ConfigSource.fromConfig(config).at("app").loadOrThrow[ServiceConfig]
}

object Settings extends ExtensionId[Settings] with ExtensionIdProvider {

  override def lookup(): ExtensionId[_ <: Extension] = Settings

  override def createExtension(system: ExtendedActorSystem): Settings = apply(system.settings.config)

  def apply(config: Config): Settings = new Settings(config)
} 
Example 178
Source File: StorageCacheSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.cache

import java.nio.file.Paths
import java.time.Clock

import akka.testkit._
import ch.epfl.bluebrain.nexus.commons.test.ActorSystemFixture
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.ProjectRef
import ch.epfl.bluebrain.nexus.kg.storage.Storage.DiskStorage
import ch.epfl.bluebrain.nexus.rdf.implicits._
import ch.epfl.bluebrain.nexus.service.config.{ServiceConfig, Settings}
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.matchers.should.Matchers
import org.scalatest.{Inspectors, TryValues}

import scala.concurrent.duration._

//noinspection NameBooleanParameters
class StorageCacheSpec
    extends ActorSystemFixture("StorageCacheSpec", true)
    with Matchers
    with Inspectors
    with ScalaFutures
    with TryValues
    with TestHelper {

  implicit override def patienceConfig: PatienceConfig = PatienceConfig(3.seconds.dilated, 5.milliseconds)

  implicit private val clock: Clock             = Clock.systemUTC
  implicit private val appConfig: ServiceConfig = Settings(system).serviceConfig
  implicit private val keyValueStoreCfg         = appConfig.kg.keyValueStore.keyValueStoreConfig

  val ref1 = ProjectRef(genUUID)
  val ref2 = ProjectRef(genUUID)

  val time   = clock.instant()
  val lastId = url"http://example.com/lastA"
  // initialInstant.minusSeconds(1L + genInt().toLong)

  val tempStorage = DiskStorage(ref1, genIri, 1L, false, true, "alg", Paths.get("/tmp"), read, write, 1024L)

  val lastStorageProj1 = tempStorage.copy(id = lastId)
  val lastStorageProj2 = tempStorage.copy(ref = ref2, id = lastId)

  val storagesProj1: List[DiskStorage] = List.fill(5)(tempStorage.copy(id = genIri)) :+ lastStorageProj1
  val storagesProj2: List[DiskStorage] = List.fill(5)(tempStorage.copy(ref = ref2, id = genIri)) :+ lastStorageProj2

  private val cache = StorageCache[Task]

  "StorageCache" should {

    "index storages" in {
      forAll((storagesProj1 ++ storagesProj2).zipWithIndex) {
        case (storage, index) =>
          implicit val instant = time.plusSeconds(index.toLong)
          cache.put(storage).runToFuture.futureValue
          cache.get(storage.ref, storage.id).runToFuture.futureValue shouldEqual Some(storage)
      }
    }

    "get latest default storage" in {
      cache.getDefault(ref1).runToFuture.futureValue shouldEqual Some(lastStorageProj1)
      cache.getDefault(ref2).runToFuture.futureValue shouldEqual Some(lastStorageProj2)
      cache.getDefault(ProjectRef(genUUID)).runToFuture.futureValue shouldEqual None
    }

    "list storages" in {
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs storagesProj1
      cache.get(ref2).runToFuture.futureValue should contain theSameElementsAs storagesProj2
    }

    "deprecate storage" in {
      val storage          = storagesProj1.head
      implicit val instant = time.plusSeconds(30L)
      cache.put(storage.copy(deprecated = true, rev = 2L)).runToFuture.futureValue
      cache.get(storage.ref, storage.id).runToFuture.futureValue shouldEqual None
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs storagesProj1.filterNot(_ == storage)
    }
  }
} 
Example 179
Source File: DiskStorageOperationsSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.storage

import java.nio.file.Paths

import akka.http.scaladsl.model.{ContentTypes, Uri}
import cats.effect.IO
import ch.epfl.bluebrain.nexus.commons.test._
import ch.epfl.bluebrain.nexus.commons.test.io.IOEitherValues
import ch.epfl.bluebrain.nexus.kg.config.KgConfig._
import ch.epfl.bluebrain.nexus.kg.resources.file.File.FileDescription
import ch.epfl.bluebrain.nexus.kg.resources.Id
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.ProjectRef
import ch.epfl.bluebrain.nexus.kg.{KgError, TestHelper}
import ch.epfl.bluebrain.nexus.service.config.Settings
import ch.epfl.bluebrain.nexus.sourcing.RetryStrategyConfig
import org.mockito.IdiomaticMockito
import org.scalatest.{BeforeAndAfter, OptionValues}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

import scala.concurrent.duration._

class DiskStorageOperationsSpec
    extends ActorSystemFixture("DiskStorageOperationsSpec")
    with AnyWordSpecLike
    with Matchers
    with BeforeAndAfter
    with IdiomaticMockito
    with IOEitherValues
    with Resources
    with TestHelper
    with OptionValues {

  implicit private val appConfig = Settings(system).serviceConfig

  implicit private val sc: StorageConfig = appConfig.kg.storage.copy(
    DiskStorageConfig(Paths.get("/tmp"), "SHA-256", read, write, false, 1024L),
    RemoteDiskStorageConfig("http://example.com", "v1", None, "SHA-256", read, write, true, 1024L),
    S3StorageConfig("MD5", read, write, true, 1024L),
    "password",
    "salt",
    RetryStrategyConfig("linear", 300.millis, 5.minutes, 100, 1.second)
  )

  private val project  = ProjectRef(genUUID)
  private val storage  = Storage.DiskStorage.default(project)
  private val resId    = Id(storage.ref, genIri)
  private val fileDesc = FileDescription("my file.txt", ContentTypes.`text/plain(UTF-8)`)

  "DiskStorageOperations" should {

    "verify when the storage exists" in {
      val verify = new DiskStorageOperations.VerifyDiskStorage[IO](storage)
      verify.apply.accepted
    }

    "save and fetch files" in {
      val save   = new DiskStorageOperations.SaveDiskFile[IO](storage)
      val fetch  = new DiskStorageOperations.FetchDiskFile[IO]()
      val source = genSource

      val attr    = save.apply(resId, fileDesc, source).ioValue
      attr.bytes shouldEqual 16L
      attr.filename shouldEqual fileDesc.filename
      attr.mediaType shouldEqual fileDesc.mediaType.value
      attr.location shouldEqual Uri(s"file:///tmp/${mangle(project, attr.uuid, "my%20file.txt")}")
      attr.path shouldEqual attr.location.path.tail.tail.tail
      val fetched = fetch.apply(attr).ioValue

      consume(source) shouldEqual consume(fetched)
    }

    "not link files" in {
      val link = new DiskStorageOperations.LinkDiskFile[IO]()
      link.apply(resId, fileDesc, Uri.Path("/foo")).failed[KgError] shouldEqual KgError.UnsupportedOperation
    }
  }

} 
Example 180
Source File: PackageObjectSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.storage

import java.nio.file.Paths
import java.util.UUID

import akka.actor.ActorSystem
import akka.http.scaladsl.model.Uri
import akka.stream.scaladsl.FileIO
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.ProjectRef
import ch.epfl.bluebrain.nexus.kg.resources.file.File.Digest
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.flatspec.AnyFlatSpecLike
import org.scalatest.matchers.should.Matchers

class PackageObjectSpec extends AnyFlatSpecLike with Matchers with ScalaFutures {

  "uriToPath" should "convert an Akka Uri that represents a valid file path to a Java Path" in {
    uriToPath("file:///some/path/my%20file.txt") shouldEqual Some(Paths.get("/some/path/my file.txt"))
    uriToPath("s3://some/path") shouldEqual None
    uriToPath("foo") shouldEqual None
  }

  "pathToUri" should "convert a Java Path to an Akka Uri" in {
    pathToUri(Paths.get("/some/path/my file.txt")) shouldEqual Uri("file:///some/path/my%20file.txt")
  }

  "mangle" should "generate a properly mangled path given a file project and UUID" in {
    val projUuid = UUID.fromString("4947db1e-33d8-462b-9754-3e8ae74fcd4e")
    val fileUuid = UUID.fromString("b1d7cda2-1ec0-40d2-b12e-3baf4895f7d7")
    mangle(ProjectRef(projUuid), fileUuid, "my file.jpg") shouldEqual
      "4947db1e-33d8-462b-9754-3e8ae74fcd4e/b/1/d/7/c/d/a/2/my file.jpg"
  }

  "digest" should "properly compute the hash of a given input" in {
    implicit val as: ActorSystem = ActorSystem()

    val filePath = "/storage/s3.json"
    val path     = Paths.get(getClass.getResource(filePath).toURI)
    val input    = FileIO.fromPath(path)
    val algo     = "SHA-256"

    input.runWith(digestSink(algo)(as.dispatcher)).futureValue shouldEqual Digest(
      algo,
      "5602c497e51680bef1f3120b1d6f65d480555002a3290029f8178932e8f4801a"
    )
  }
} 
Example 181
Source File: KryoSerializerInitSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.serializers

import java.nio.file.Paths

import akka.actor.ActorSystem
import akka.serialization.SerializationExtension
import akka.testkit.TestKit
import ch.epfl.bluebrain.nexus.kg.TestHelper
import io.altoo.akka.serialization.kryo.KryoSerializer
import org.scalatest.TryValues
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class KryoSerializerInitSpec
    extends TestKit(ActorSystem("KryoSerializerInitSpec"))
    with AnyWordSpecLike
    with Matchers
    with TryValues
    with TestHelper {
  private val serialization = SerializationExtension(system)

  "A Path Kryo serialization" should {
    "succeed" in {
      val path = Paths.get("resources/application.conf")

      // Find the Serializer for it
      val serializer = serialization.findSerializerFor(path)
      serializer.getClass.equals(classOf[KryoSerializer]) shouldEqual true

      // Check serialization/deserialization
      val serialized = serialization.serialize(path)
      serialized.isSuccess shouldEqual true

      val deserialized = serialization.deserialize(serialized.get, path.getClass)
      deserialized.isSuccess shouldEqual true
      deserialized.success.value shouldEqual path
    }
  }

} 
Example 182
Source File: CliOpts.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.cli

import java.nio.file.{Path, Paths}

import cats.data.{Validated, ValidatedNel}
import cats.implicits._
import ch.epfl.bluebrain.nexus.cli.sse.{BearerToken, Offset}
import com.monovore.decline.{Argument, Opts}
import org.http4s.Uri

import scala.util.Try


object CliOpts extends OptsInstances {

  val token: Opts[Option[BearerToken]] = Opts
    .option[String](
      long = "token",
      help = "The token to use when interacting with the Nexus API; " +
        "a 'none' string value will remove any preconfigured token."
    )
    .validate("Token must be a non empty string") { !_.isBlank }
    .map {
      case "none" => None
      case value  => Some(BearerToken(value))
    }

  val offset: Opts[Option[Offset]] = Opts
    .option[String](
      long = "offset",
      help = "The offset to use when starting the event replay; " +
        "a 'none' string value will discard any saved offset."
    )
    .map(_.trim)
    .mapValidated {
      case "none" => Validated.validNel(None)
      case value  => Offset(value).toRight("Offset is not valid").map(o => Some(o)).toValidatedNel
    }

  val endpoint: Opts[Uri] = Opts
    .option[Uri](
      long = "endpoint",
      help = "The base address of the Nexus API"
    )

  val envConfig: Opts[Path] = Opts
    .option[Path](
      long = "env",
      help = "The environment configuration file"
    )

  val postgresConfig: Opts[Path] = Opts
    .option[Path](
      long = "config",
      help = "The postgres configuration file"
    )

  val influxConfig: Opts[Path] = Opts
    .option[Path](
      long = "config",
      help = "The influx configuration file"
    )

}

trait OptsInstances {
  implicit protected val uriArgument: Argument[Uri] = new Argument[Uri] {
    override def read(string: String): ValidatedNel[String, Uri] =
      Uri
        .fromString(string)
        .leftMap(_ => s"Invalid Uri: '$string'")
        .ensure(s"Invalid Uri: '$string'")(uri => uri.scheme.isDefined)
        .toValidatedNel
    override val defaultMetavar: String                          = "http://..."
  }

  implicit protected val pathArgument: Argument[Path] = new Argument[Path] {
    override def read(string: String): ValidatedNel[String, Path] =
      Try(Paths.get(string)).toOption.toRight(s"Invalid file path '$string'").toValidatedNel
    override val defaultMetavar: String                           = "../file.conf"
  }
} 
Example 183
Source File: Main.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.storage

import java.nio.file.Paths
import java.time.Clock

import akka.actor.ActorSystem
import akka.event.{Logging, LoggingAdapter}
import akka.http.scaladsl.Http
import akka.http.scaladsl.server.Route
import akka.util.Timeout
import cats.effect.Effect
import ch.epfl.bluebrain.nexus.storage.Storages.DiskStorage
import ch.epfl.bluebrain.nexus.storage.attributes.AttributesCache
import ch.epfl.bluebrain.nexus.storage.config.{AppConfig, Settings}
import ch.epfl.bluebrain.nexus.storage.config.AppConfig._
import ch.epfl.bluebrain.nexus.storage.routes.Routes
import com.typesafe.config.{Config, ConfigFactory}
import kamon.Kamon
import monix.eval.Task
import monix.execution.Scheduler

import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContext, Future}
import scala.util.{Failure, Success}

//noinspection TypeAnnotation
// $COVERAGE-OFF$
object Main {

  def loadConfig(): Config = {
    val cfg = sys.env.get("STORAGE_CONFIG_FILE") orElse sys.props.get("storage.config.file") map { str =>
      val file = Paths.get(str).toAbsolutePath.toFile
      ConfigFactory.parseFile(file)
    } getOrElse ConfigFactory.empty()
    (cfg withFallback ConfigFactory.load()).resolve()
  }

  def setupMonitoring(config: Config): Unit = {
    if (sys.env.getOrElse("KAMON_ENABLED", "false").toBoolean) {
      Kamon.reconfigure(config)
      Kamon.loadModules()
    }
  }

  def shutdownMonitoring(): Unit = {
    if (sys.env.getOrElse("KAMON_ENABLED", "false").toBoolean) {
      Await.result(Kamon.stopModules(), 10.seconds)
    }
  }

  @SuppressWarnings(Array("UnusedMethodParameter"))
  def main(args: Array[String]): Unit = {
    val config = loadConfig()
    setupMonitoring(config)

    implicit val appConfig: AppConfig = Settings(config).appConfig

    implicit val as: ActorSystem                          = ActorSystem(appConfig.description.fullName, config)
    implicit val ec: ExecutionContext                     = as.dispatcher
    implicit val eff: Effect[Task]                        = Task.catsEffect(Scheduler.global)
    implicit val iamIdentities: IamIdentitiesClient[Task] = new IamIdentitiesClient[Task](appConfig.iam)
    implicit val timeout                                  = Timeout(1.minute)
    implicit val clock                                    = Clock.systemUTC

    val storages: Storages[Task, AkkaSource] =
      new DiskStorage(appConfig.storage, appConfig.digest, AttributesCache[Task, AkkaSource])

    val logger: LoggingAdapter = Logging(as, getClass)

    logger.info("==== Cluster is Live ====")
    val routes: Route = Routes(storages)

    val httpBinding: Future[Http.ServerBinding] = {
      Http().bindAndHandle(routes, appConfig.http.interface, appConfig.http.port)
    }
    httpBinding onComplete {
      case Success(binding) =>
        logger.info(s"Bound to ${binding.localAddress.getHostString}: ${binding.localAddress.getPort}")
      case Failure(th)      =>
        logger.error(th, "Failed to perform an http binding on {}:{}", appConfig.http.interface, appConfig.http.port)
        Await.result(as.terminate(), 10.seconds)
    }

    as.registerOnTermination {
      shutdownMonitoring()
    }
    // attempt to leave the cluster before shutting down
    val _ = sys.addShutdownHook {
      Await.result(as.terminate().map(_ => ()), 10.seconds)
    }
  }
}
// $COVERAGE-ON$ 
Example 184
Source File: Settings.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.storage.config

import java.nio.file.{Path, Paths}

import akka.actor.{ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider}
import akka.http.scaladsl.model.Uri
import ch.epfl.bluebrain.nexus.rdf.implicits._
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri
import scala.annotation.nowarn
import com.typesafe.config.Config
import pureconfig.generic.auto._
import pureconfig.ConvertHelpers._
import pureconfig._


@SuppressWarnings(Array("LooksLikeInterpolatedString", "OptionGet"))
class Settings(config: Config) extends Extension {

  @nowarn("cat=unused")
  val appConfig: AppConfig = {
    implicit val uriConverter: ConfigConvert[Uri]                 =
      ConfigConvert.viaString[Uri](catchReadError(s => Uri(s)), _.toString)
    implicit val pathConverter: ConfigConvert[Path]               =
      ConfigConvert.viaString[Path](catchReadError(s => Paths.get(s)), _.toString)
    implicit val absoluteIriConverter: ConfigConvert[AbsoluteIri] =
      ConfigConvert.viaString[AbsoluteIri](catchReadError(s => url"$s"), _.toString)
    ConfigSource.fromConfig(config).at("app").loadOrThrow[AppConfig]
  }

}

object Settings extends ExtensionId[Settings] with ExtensionIdProvider {

  override def lookup(): ExtensionId[_ <: Extension] = Settings

  override def createExtension(system: ExtendedActorSystem): Settings = apply(system.settings.config)

  def apply(config: Config): Settings = new Settings(config)
} 
Example 185
Source File: AttributesComputationSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.storage.attributes

import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Paths}

import akka.actor.ActorSystem
import akka.http.scaladsl.model.ContentTypes.`text/plain(UTF-8)`
import akka.testkit.TestKit
import cats.effect.IO
import ch.epfl.bluebrain.nexus.storage.File.{Digest, FileAttributes}
import ch.epfl.bluebrain.nexus.storage.StorageError.InternalError
import ch.epfl.bluebrain.nexus.storage.utils.IOValues
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

import scala.concurrent.ExecutionContextExecutor

class AttributesComputationSpec
    extends TestKit(ActorSystem("AttributesComputationSpec"))
    with AnyWordSpecLike
    with Matchers
    with IOValues {

  implicit private val ec: ExecutionContextExecutor = system.dispatcher

  private trait Ctx {
    val path           = Files.createTempFile("storage-test", ".txt")
    val (text, digest) = "something" -> "3fc9b689459d738f8c88a3a48aa9e33542016b7a4052e001aaa536fca74813cb"
  }

  "Attributes computation computation" should {
    val computation = AttributesComputation.akkaAttributes[IO]
    val alg         = "SHA-256"

    "succeed" in new Ctx {
      Files.write(path, text.getBytes(StandardCharsets.UTF_8))
      computation(path, alg).ioValue shouldEqual FileAttributes(
        s"file://$path",
        Files.size(path),
        Digest(alg, digest),
        `text/plain(UTF-8)`
      )
      Files.deleteIfExists(path)
    }

    "fail when algorithm is wrong" in new Ctx {
      Files.write(path, text.getBytes(StandardCharsets.UTF_8))
      computation(path, "wrong-alg").failed[InternalError]
    }

    "fail when file does not exists" in new Ctx {
      computation(Paths.get("/tmp/non/existing"), alg).failed[InternalError]
    }
  }
} 
Example 186
Source File: TarFlowSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.storage

import java.io.ByteArrayInputStream
import java.nio.file.{Files, Path, Paths}

import akka.actor.ActorSystem
import akka.stream.alpakka.file.scaladsl.Directory
import akka.stream.scaladsl.{FileIO, Source}
import akka.testkit.TestKit
import akka.util.ByteString
import ch.epfl.bluebrain.nexus.storage.utils.{EitherValues, IOEitherValues, Randomness}
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream
import org.apache.commons.io.FileUtils
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
import org.scalatest.{BeforeAndAfterAll, Inspectors, OptionValues}

import scala.annotation.tailrec

class TarFlowSpec
    extends TestKit(ActorSystem("TarFlowSpec"))
    with AnyWordSpecLike
    with Matchers
    with IOEitherValues
    with Randomness
    with EitherValues
    with OptionValues
    with Inspectors
    with BeforeAndAfterAll {

  val basePath = Files.createTempDirectory("tarflow")
  val dir1     = basePath.resolve("one")
  val dir2     = basePath.resolve("two")

  override def afterAll(): Unit = {
    super.afterAll()
    FileUtils.cleanDirectory(basePath.toFile)
    ()
  }

  type PathAndContent = (Path, String)

  "A TarFlow" should {

    Files.createDirectories(dir1)
    Files.createDirectories(dir2)

    def relativize(path: Path): String = basePath.getParent().relativize(path).toString

    "generate the byteString for a tar file correctly" in {
      val file1        = dir1.resolve("file1.txt")
      val file1Content = genString()
      val file2        = dir1.resolve("file3.txt")
      val file2Content = genString()
      val file3        = dir2.resolve("file3.txt")
      val file3Content = genString()
      val files        = List(file1 -> file1Content, file2 -> file2Content, file3 -> file3Content)
      forAll(files) {
        case (file, content) => Source.single(ByteString(content)).runWith(FileIO.toPath(file)).futureValue
      }
      val byteString   = Directory.walk(basePath).via(TarFlow.writer(basePath)).runReduce(_ ++ _).futureValue
      val bytes        = new ByteArrayInputStream(byteString.toArray)
      val tar          = new TarArchiveInputStream(bytes)

      @tailrec def readEntries(
          tar: TarArchiveInputStream,
          entries: List[PathAndContent] = Nil
      ): List[PathAndContent] = {
        val entry = tar.getNextTarEntry
        if (entry == null) entries
        else {
          val data = Array.ofDim[Byte](entry.getSize.toInt)
          tar.read(data)
          readEntries(tar, (Paths.get(entry.getName) -> ByteString(data).utf8String) :: entries)
        }
      }
      val directories = List(relativize(basePath) -> "", relativize(dir1) -> "", relativize(dir2) -> "")
      val untarred    = readEntries(tar).map { case (path, content) => path.toString -> content }
      val expected    = files.map { case (path, content) => relativize(path) -> content } ++ directories
      untarred should contain theSameElementsAs expected
    }
  }

} 
Example 187
Source File: inception.scala    From flink-tensorflow   with Apache License 2.0 5 votes vote down vote up
package org.apache.flink.contrib.tensorflow.examples.inception

import java.nio.file.Paths

import org.apache.flink.contrib.tensorflow.examples.inception.InceptionModel._
import org.apache.flink.contrib.tensorflow.streaming._
import org.apache.flink.streaming.api.functions.source.FileProcessingMode.PROCESS_ONCE
import org.apache.flink.streaming.api.scala._
import org.tensorflow.contrib.scala._
import resource._

import scala.concurrent.duration._


object Inception {

  type Image = Array[Byte]

  def main(args: Array[String]) {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    if (args.length < 2) {
      System.out.println("Usage: Inception <model-dir> <images-dir>")
      System.exit(1)
    }
    val modelPath = Paths.get(args(0)).toUri
    val imagesPath = args(1)

    // read each image
    val imageStream = env
      .readFile(new ImageInputFormat, imagesPath, PROCESS_ONCE, (1 second).toMillis)

    // label each image tensor using the inception5h model
    implicit val inceptionModel = new InceptionModel(modelPath)

    val labelStream: DataStream[(String,LabeledImage)] = imageStream
      .mapWithModel(inceptionModel) { (in, model) =>
        val labels =
          managed(in._2.toTensor.taggedAs[ImageTensor])
          .flatMap(x => model.label(x))
          .acquireAndGet(_.toTextLabels())
        (in._1, labels.head)
      }

    labelStream.print()

    // execute program
    env.execute("Inception")
  }
} 
Example 188
Source File: johnny.scala    From flink-tensorflow   with Apache License 2.0 5 votes vote down vote up
package org.apache.flink.contrib.tensorflow.examples.inception

import java.nio.file.Paths

import org.apache.flink.cep.scala.CEP
import org.apache.flink.cep.scala.pattern.Pattern
import org.apache.flink.contrib.tensorflow.examples.inception.InceptionModel._
import org.apache.flink.contrib.tensorflow.streaming._
import org.apache.flink.streaming.api.functions.source.FileProcessingMode._
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.windowing.time.Time
import org.tensorflow.contrib.scala._
import resource._

import scala.collection.mutable
import scala.concurrent.duration._


object Johnny {

  type Image = Array[Byte]

  def main(args: Array[String]) {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    if (args.length < 2) {
      System.out.println("Usage: Johnny <model-dir> <image-dir>")
      System.exit(1)
    }
    val modelPath = Paths.get(args.toSeq.head).toUri
    val imagePath = args.toSeq.tail.head

    // 1. read input files as images
    val imageStream = env
      .readFile(new ImageInputFormat, imagePath, PROCESS_CONTINUOUSLY, (1 second).toMillis)

    // 2. label the image using the TensorFlow 'inception' model
    implicit val inceptionModel = new InceptionModel(modelPath)

    val labelStream = imageStream.mapWithModel(inceptionModel) { (in, model) =>
      val labeled = managed(in._2.toTensor.taggedAs[ImageTensor])
        .flatMap(img => model.label(img))
        .acquireAndGet(label => label.toTextLabels())
      println(labeled.head)
      labeled.head
    }

    // 3. detect a certain time-based pattern representing a 'secret access code'
    val detectionPattern = Pattern
      .begin[LabeledImage]("first").where(img => labeled(img, "cheeseburger", .50f))
      .followedBy("second").where(img => labeled(img, "ladybug", .50f))
      .followedBy("third").where(img => labeled(img, "llama", .50f))
      .within(Time.seconds(60))

    val detectionStream = CEP
      .pattern(labelStream, detectionPattern)
      .select(
        (pattern, timestamp) => AccessDenied(pattern))(
        (pattern) => AccessGranted(pattern("first"), pattern("second"), pattern("third")))

    // print the detection events
    detectionStream.print()

    // execute program
    env.execute("Johnny")
  }

  def labeled(image: LabeledImage, label: String, confidence: Float = .90f): Boolean = {
    image.labels.exists(l => l._2.equalsIgnoreCase(label) && l._1 >= confidence)
  }

  case class AccessDenied(pattern: mutable.Map[String, LabeledImage])
  case class AccessGranted(first: LabeledImage, second: LabeledImage, third: LabeledImage) {
    override def toString: String = s"AccessGranted(${(first.labels.head, second.labels.head, third.labels.head)})"
  }

} 
Example 189
Source File: HugeRoundTripBench.scala    From xml-lens   with MIT License 5 votes vote down vote up
package pl.msitko.xml.bench

import java.io.{File, StringWriter}
import java.nio.file.Paths
import java.util.concurrent.TimeUnit

import org.openjdk.jmh.annotations._
import pl.msitko.xml.parsing.XmlParser
import pl.msitko.xml.printing.XmlPrinter

import scala.xml.XML

@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.MICROSECONDS)
@State(Scope.Benchmark)
class HugeRoundTripBench {
  import HugeRoundTripBench._

  @Benchmark def roundTripWithLens = {
    val parsed = XmlParser.parsePath(path).right.get
    XmlPrinter.print(parsed)
  }

  @Benchmark def roundtripWithStd = {
    val xml = XML.loadFile(file)

    val writer = new StringWriter
    XML.write(writer, xml, "UTF-8", true, null)
    writer.toString
  }
}

object HugeRoundTripBench {
  // BEWARE: that file is not included in the repo because of its huge size
  // you can download some big XMLs at https://dumps.wikimedia.org/enwiki/
  val path = Paths.get("src", "main", "resources", "enwiki-20180420-pages-articles26.xml")

  val file = {
    // BEWARE: that file is not included in the repo because of its huge size
    // you can download some big XMLs at https://dumps.wikimedia.org/enwiki/
    val p = List("src", "main", "resources", "enwiki-20180420-pages-articles26.xml").mkString(File.separator)
    new File(p)
  }
} 
Example 190
Source File: RandomizeBamTest.scala    From fgbio   with MIT License 5 votes vote down vote up
package com.fulcrumgenomics.bam

import java.nio.file.Paths

import com.fulcrumgenomics.FgBioDef._
import com.fulcrumgenomics.testing.UnitSpec
import org.apache.commons.math3.stat.regression.SimpleRegression

import scala.util.Random


  def slurp(p: PathToBam): IndexedSeq[String] = readBamRecs(p).map(_.id)

  "RandomizeBam" should "truly randomize the order of reads in a file in non-query-grouped mode" in {
    val out1 = makeTempFile("random1.", ".bam")
    val out2 = makeTempFile("random1.", ".bam")
    val random = new Random(1)
    Seq(out1, out2).foreach(out => new RandomizeBam(input=bam, output=out, seed=random.nextInt(), queryGroup=false).execute())

    val in = slurp(bam)
    val o1 = slurp(out1)
    val o2 = slurp(out2)

    val inPos = in.zipWithIndex.map(_._2)
    val o1Pos = o1.map(name => in.indexOf(name))
    val o2Pos = o2.map(name => in.indexOf(name))

    Seq((inPos, o1Pos), (inPos, o2Pos), (o1Pos, o2Pos)).foreach { case (is1, is2) => {
      val regression = new SimpleRegression()
      is1.zip(is2).foreach(pair => regression.addData(pair._1, pair._2))
      regression.regress().getRSquared should be < 0.05
    }}
  }

  it should "randomize the order of reads in a file in query-grouped mode" in {
    val out1 = makeTempFile("random1.", ".bam")
    val out2 = makeTempFile("random1.", ".bam")
    val random = new Random(7)
    Seq(out1, out2).foreach(out => new RandomizeBam(input=bam, output=out, seed=random.nextInt(), queryGroup=true).execute())

    val in = slurp(bam)
    val o1 = slurp(out1)
    val o2 = slurp(out2)

    val inPos = in.zipWithIndex.map(_._2)
    val o1Pos = o1.map(name => in.indexOf(name))
    val o2Pos = o2.map(name => in.indexOf(name))

    Seq((inPos, o1Pos), (inPos, o2Pos), (o1Pos, o2Pos)).foreach { case (is1, is2) => {
      val regression = new SimpleRegression()
      is1.zip(is2).foreach(pair => regression.addData(pair._1, pair._2))
      regression.regress().getRSquared should be < 0.05
    }}

    // Additionally validate that the outputs are query-grouped
    Seq(o1, o2).foreach(names => {
      names.grouped(2).foreach { case Seq(r1, r2) => {
        r1.substring(0, r1.length-2) shouldEqual r2.substring(0, r2.length-2)
      }}
    })
  }
} 
Example 191
Source File: SetMateInformationTest.scala    From fgbio   with MIT License 5 votes vote down vote up
package com.fulcrumgenomics.bam

import java.nio.file.Paths

import com.fulcrumgenomics.bam.api.SamSource
import com.fulcrumgenomics.sopt.cmdline.ValidationException
import com.fulcrumgenomics.testing.UnitSpec


class SetMateInformationTest extends UnitSpec {
  val dir = Paths.get("src/test/resources/com/fulcrumgenomics/bam")
  val querySortedSam      = dir.resolve("set_mate_querysorted.sam")
  val queryGroupedSam     = dir.resolve("set_mate_querygrouped.sam")
  val coordinateSortedSam = dir.resolve("set_mate_coordinatesorted.sam")

  "SetMateInformation" should "correctly set mate information in a query sorted file" in {
    val out = makeTempFile("mated.", ".bam")
    val fixer = new SetMateInformation(input=querySortedSam, output=out)
    fixer.execute()
    val in = SamSource(out)
    in.iterator.filter(r => r.mapped && r.mateMapped).foreach(rec => {
      rec.get("MC") shouldBe defined
      rec.get("MQ") shouldBe defined
    })
    in.close()
  }

  it should "correctly set mate information in a query GROUPED file" in {
    val out = makeTempFile("mated.", ".bam")
    val fixer = new SetMateInformation(input=queryGroupedSam, output=out)
    fixer.execute()
    val in = SamSource(out)
    in.iterator.filter(r => r.mapped && r.mateMapped).foreach(rec => {
      rec.get("MC") shouldBe defined
      rec.get("MQ") shouldBe defined
    })
    in.close()
  }

  it should "throw an exception on a coordinate sorted file" in {
    val out = makeTempFile("mated.", ".bam")
    an[ValidationException] should be thrownBy new SetMateInformation(input=coordinateSortedSam, output=out)
  }
} 
Example 192
Source File: EstimatePoolingFractionsTest.scala    From fgbio   with MIT License 5 votes vote down vote up
package com.fulcrumgenomics.bam

import java.nio.file.Paths

import com.fulcrumgenomics.FgBioDef._
import com.fulcrumgenomics.bam.api.{SamRecord, SamSource, SamWriter}
import com.fulcrumgenomics.testing.UnitSpec
import com.fulcrumgenomics.util.Metric
import htsjdk.samtools.SAMFileHeader.SortOrder
import htsjdk.samtools.{MergingSamRecordIterator, SamFileHeaderMerger}
import org.scalatest.ParallelTestExecution

import scala.collection.JavaConverters._

class EstimatePoolingFractionsTest extends UnitSpec with ParallelTestExecution {
  private val Samples = Seq("HG01879", "HG01112", "HG01583", "HG01500", "HG03742", "HG03052")
  private val DataDir = Paths.get("src/test/resources/com/fulcrumgenomics/bam/estimate_pooling_fractions")
  private val Bams    = Samples.map(s => DataDir.resolve(s + ".bam"))
  private val Vcf     = DataDir.resolve("variants.vcf.gz")
  private val Regions = DataDir.resolve("regions.interval_list")

  
  def merge(bams: Seq[PathToBam]): PathToBam = {
    val readers = bams.map(bam => SamSource(bam))

    // Mangle the library names in the header so that the merger sees duplicate RGs as different RGs.
    readers.zipWithIndex.foreach { case (reader, index) =>
        reader.header.getReadGroups.foreach(rg => rg.setLibrary(rg.getLibrary + ":" + index))
    }
    val headerMerger = new SamFileHeaderMerger(SortOrder.coordinate, readers.iterator.map(_.header).toJavaList, false)
    val iterator     = new MergingSamRecordIterator(headerMerger, readers.iterator.map(_.toSamReader).toJavaList, true)

    val output = makeTempFile("merged.", ".bam")
    val out    = SamWriter(output, headerMerger.getMergedHeader, compression = 0)
    iterator.map(_.asInstanceOf[SamRecord]).foreach { r =>
       // Add the RG ID to the read name so we don't have identical read names when merging the same BAM 2+ times
      r.name = r.readGroup.getReadGroupId + ":" + r.name
      out += r
    }
    out.close()
    readers.foreach(_.safelyClose())
    output
  }

  "EstimatePoolingFractions" should "estimate approximately 50/50 for two samples mixed 50/50" in {
    val bam = merge(Bams.take(2))
    val out = makeTempFile("pooling_metrics.", ".txt")
    new EstimatePoolingFractions(vcf=Vcf, bam=bam, output=out, samples=Samples.take(2)).execute()
    val metrics = Metric.read[PoolingFractionMetric](out)
    metrics should have size 2
    metrics.foreach(m => 0.5 should (be >= m.ci99_low and be <= m.ci99_high))
  }

  Range.inclusive(3, Samples.size-1).foreach { n =>
    it should s"accurately estimate a mixof $n samples" in {
        val bam = merge(Bams.take(n))
        val out = makeTempFile("pooling_metrics.", ".txt")
        new EstimatePoolingFractions(vcf=Vcf, bam=bam, output=out, samples=Samples.take(n)).execute()
        val metrics = Metric.read[PoolingFractionMetric](out)
        metrics should have size n
        metrics.foreach(m => (1/n.toDouble) should (be >= m.ci99_low and be <= m.ci99_high))
    }
  }

  it should "work with an interval list, and also use all samples if no samples are provided" in {
    val bam = merge(Bams)
    val out = makeTempFile("pooling_metrics.", ".txt")
    new EstimatePoolingFractions(vcf=Vcf, bam=bam, output=out, intervals=Seq(Regions)).execute()
    val metrics = Metric.read[PoolingFractionMetric](out)
    metrics should have size Samples.size
    metrics.foreach(m => (1/Samples.size.toDouble) should (be >= m.ci99_low and be <= m.ci99_high))
  }

  it should "accurately estimate unequal mixes of two samples" in {
    val samples         = Samples.take(2)
    val Seq(bam1, bam2) = Bams.take(2)
    val bam = merge(Seq(bam1, bam1, bam1, bam2))
    val out = makeTempFile("pooling_metrics.", ".txt")
    new EstimatePoolingFractions(vcf=Vcf, bam=bam, output=out, samples=samples).execute()
    val metrics = Metric.read[PoolingFractionMetric](out)
    metrics should have size 2
    metrics.foreach {m =>
      val expected = if (m.sample == samples.head) 0.75 else 0.25
      expected should (be >= m.ci99_low and be <= m.ci99_high)
    }
  }
} 
Example 193
Source File: NcbiRefSeqGffSourceTest.scala    From fgbio   with MIT License 5 votes vote down vote up
package com.fulcrumgenomics.util

import java.nio.file.Paths

import com.fulcrumgenomics.fasta.{SequenceDictionary, SequenceMetadata}
import com.fulcrumgenomics.testing.UnitSpec
import com.fulcrumgenomics.util.GeneAnnotations.Exon
import org.scalatest.OptionValues

class NcbiRefSeqGffSourceTest extends UnitSpec with OptionValues {
  // Excerpted from https://ftp.ncbi.nlm.nih.gov/genomes/refseq/vertebrate_mammalian/Homo_sapiens/latest_assembly_versions/GCF_000001405.39_GRCh38.p13/GCF_000001405.39_GRCh38.p13_genomic.gff.gz
  private val GffFile   = Paths.get("src/test/resources/com/fulcrumgenomics/util/human.gff.gz")
  private val Chr1      = SequenceMetadata(name="chr1", length=249250621, aliases=Seq("1", "NC_000001.11"))
  private val AltChr1   = SequenceMetadata(name="1", length=249250621, aliases=Seq("chr1", "NC_000001.11"))
  private val DictEmpty = SequenceDictionary()
  private val DictChr1  = SequenceDictionary(Chr1)
  private val DictAlt1  = SequenceDictionary(AltChr1)

  "NcbiRefSeqSource" should "auto-map the accession to chr1 when given an empty sequence dictionary" in {
    val source = NcbiRefSeqGffSource(GffFile, includeXs=true, dict=DictEmpty)
    source should have size 7

    // Pseudo-gene should not have been included
    source.get("DDX11L1") shouldBe None

    // Check a micro-RNA for details
    val mir = source.get("MIR6859-1").value
    mir.loci should have size 1
    mir.loci.head.chrom shouldBe "chr1"
    mir.loci.head.start shouldBe 17369
    mir.loci.head.end   shouldBe 17436
    mir.loci.head.transcripts should have size 1
    mir.loci.head.transcripts.head.chrom shouldBe "chr1"
    mir.loci.head.transcripts.head.start shouldBe 17369
    mir.loci.head.transcripts.head.end   shouldBe 17436
    mir.loci.head.transcripts.head.cdsStart shouldBe None
    mir.loci.head.transcripts.head.cdsEnd shouldBe None
    mir.loci.head.transcripts.head.negativeStrand shouldBe true
    mir.loci.head.transcripts.head.exons shouldBe Seq(Exon(17369, 17436))

    // Check a lncRNA for specifics
    val lnc = source.get("MIR1302-2HG").value
    lnc.loci should have size 1
    lnc.loci.head.transcripts should have size 1
    lnc.loci.head.transcripts.head.chrom shouldBe "chr1"
    lnc.loci.head.transcripts.head.start shouldBe 29926
    lnc.loci.head.transcripts.head.end   shouldBe 31295
    lnc.loci.head.transcripts.head.cdsStart shouldBe None
    lnc.loci.head.transcripts.head.cdsEnd shouldBe None
    lnc.loci.head.transcripts.head.negativeStrand shouldBe false
    lnc.loci.head.transcripts.head.exons shouldBe Seq(Exon(29926, 30039), Exon(30564, 30667), Exon(30976, 31295))

    // Check a coding gene for
    val gene = source.get("OR4F5").value
    gene.loci should have size 1
    gene.loci.head.transcripts should have size 1
    gene.loci.head.transcripts.head.name shouldBe "NM_001005484.1"
    gene.loci.head.transcripts.head.chrom shouldBe "chr1"
    gene.loci.head.transcripts.head.start shouldBe 69091
    gene.loci.head.transcripts.head.end   shouldBe 70008
    gene.loci.head.transcripts.head.cdsStart.value shouldBe 69091
    gene.loci.head.transcripts.head.cdsEnd.value   shouldBe 70008
    gene.loci.head.transcripts.head.negativeStrand shouldBe false

    // Check a gene that has multiple transcripts
    val g2 = source.get("LOC100996442").value
    g2.loci should have size 1
    g2.loci.head.transcripts should have size 14
  }

  it should "still load all genes when given a dictionary that has all the used chroms in it" in {
    val source = NcbiRefSeqGffSource(GffFile, includeXs=true, dict=DictChr1)
    source should have size 7
    for (gene <- source; locus <- gene.loci; tx <- locus) {
      locus.chrom shouldBe "chr1"
      tx.chrom shouldBe "chr1"
    }
  }

  it should "map the chromosome name using the dictionary" in {
    val source = NcbiRefSeqGffSource(GffFile, includeXs=true, dict=DictAlt1)
    source should have size 7
    for (gene <- source; locus <- gene.loci; tx <- locus) {
      locus.chrom shouldBe "1"
      tx.chrom shouldBe "1"
    }
  }

  it should "exclude experimental transcripts (and genes with only exp transcripts)" in {
    val source = NcbiRefSeqGffSource(GffFile, includeXs=false, dict=DictChr1)
    source should have size 5

    for (gene <- source; locus <- gene.loci; tx <- locus) {
      tx.name.charAt(0) should not be 'X'
    }
  }
} 
Example 194
Source File: Configuration.scala    From toketi-iothubreact   with MIT License 5 votes vote down vote up
// Copyright (c) Microsoft. All rights reserved.

package it.helpers

import java.nio.file.{Files, Paths}

import com.microsoft.azure.eventhubs.EventHubClient
import com.typesafe.config.{Config, ConfigFactory}
import org.json4s._
import org.json4s.jackson.JsonMethods._
import scala.reflect.io.File


object Configuration {

  // JSON parser setup, brings in default date formats etc.
  implicit val formats = DefaultFormats

  private[this] val confConnPath      = "iothub-react.connection."
  private[this] val confStreamingPath = "iothub-react.streaming."

  private[this] val conf: Config = ConfigFactory.load()

  // Read-only settings
  val iotHubNamespace : String = conf.getString(confConnPath + "namespace")
  val iotHubName      : String = conf.getString(confConnPath + "name")
  val iotHubPartitions: Int    = conf.getInt(confConnPath + "partitions")
  val accessPolicy    : String = conf.getString(confConnPath + "accessPolicy")
  val accessKey       : String = conf.getString(confConnPath + "accessKey")

  // Tests can override these
  var receiverConsumerGroup: String = EventHubClient.DEFAULT_CONSUMER_GROUP_NAME
  var receiverTimeout      : Long   = conf.getDuration(confStreamingPath + "receiverTimeout").toMillis
  var receiverBatchSize    : Int    = conf.getInt(confStreamingPath + "receiverBatchSize")

  // Read devices configuration from JSON file
  private[this] lazy val devicesJsonFile                       = conf.getString(confConnPath + "devices")
  private[this] lazy val devicesJson: String                   = File(devicesJsonFile).slurp()
  private[this] lazy val devices    : Array[DeviceCredentials] = parse(devicesJson).extract[Array[DeviceCredentials]]

  def deviceCredentials(id: String): DeviceCredentials = {
    val deviceData: Option[DeviceCredentials] = devices.find(x ⇒ x.deviceId == id)
    if (deviceData == None) {
      throw new RuntimeException(s"Device '${id}' credentials not found")
    }
    deviceData.get
  }

  if (!Files.exists(Paths.get(devicesJsonFile))) {
    throw new RuntimeException("Devices credentials not found")
  }
} 
Example 195
Source File: Config.scala    From scalastringcourseday7   with Apache License 2.0 5 votes vote down vote up
package util

import java.io.File
import java.nio.file.{Path, Paths}

import com.typesafe.config.{Config => TypeSafeConfig, ConfigFactory}
import net.ceedubs.ficus.Ficus._


object Config {
  final private[this] var config: TypeSafeConfig = ConfigFactory.load()

  def set(configFile: File): Unit = {
    config = ConfigFactory.load(ConfigFactory.parseFile(configFile))
  }

  private val defaultPath: String = "src/main/resources"

  final lazy val resourcesDir: String = {
    val path: String = config.as[Option[String]]("resourcesDir").getOrElse(defaultPath)
    val dir: File = new File(path)
    if (dir.canRead && dir.isDirectory) {
      dir.toString
    } else {
      defaultPath
    }
  }

  def resourceFile(filename: String*): Path = {
    val file: File = Paths.get(resourcesDir, filename: _*).toAbsolutePath.toFile
    if (file.canRead && file.isFile) {
      file.toPath
    } else {
      Paths.get(defaultPath, filename: _*).toAbsolutePath
    }
  }

  final lazy val nGram: Int = config.as[Option[Int]]("concept.nGram.n") match {
    case Some(n) if 1 <= n =>
      n
    case _ =>
      1
  }

  final lazy val nGramGap: Int = config.as[Option[Int]]("concept.nGram.gap") match {
    case Some(gap) if 0 <= gap =>
      gap
    case Some(gap) if gap < 0 =>
      Int.MaxValue
    case _ =>
      0
  }

  final lazy val tokenizer: String = config.as[Option[String]]("concept.tokenizer").getOrElse("CharacterNGram")
} 
Example 196
Source File: package.scala    From scaladex   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package ch.epfl.scala.index
package data

import model.Parsers
import model.misc.GithubRepo

import java.nio.file.Paths

package object github extends Parsers {
  private[github] def path(paths: DataPaths, github: GithubRepo) = {
    val GithubRepo(user, repo) = github
    paths.github.resolve(Paths.get(user, repo))
  }
  def githubReadmePath(paths: DataPaths, github: GithubRepo) =
    path(paths, github).resolve(Paths.get("README.html"))

  def githubRepoInfoPath(paths: DataPaths, github: GithubRepo) =
    path(paths, github).resolve(Paths.get("repo.json"))

  def githubRepoIssuesPath(paths: DataPaths, github: GithubRepo) =
    path(paths, github).resolve(Paths.get("issues.json"))

  def githubRepoContributorsPath(paths: DataPaths, github: GithubRepo) =
    path(paths, github).resolve(Paths.get("contributors.json"))

  def githubRepoTopicsPath(paths: DataPaths, github: GithubRepo) =
    path(paths, github).resolve(Paths.get("topics.json"))

  def githubRepoCommunityProfilePath(paths: DataPaths, github: GithubRepo) =
    path(paths, github).resolve(Paths.get("community.json"))

  def githubRepoChatroomPath(paths: DataPaths, github: GithubRepo) =
    path(paths, github).resolve(Paths.get("chatroom.txt"))

  
  def extractLastPage(links: String): Int = {
    val pattern = """page=([0-9]+)>; rel=["]?([a-z]+)["]?""".r
    val pages = pattern
      .findAllIn(links)
      .matchData
      .map(x => x.group(2) -> x.group(1).toInt)
      .toMap
    pages.getOrElse("last", 1)
  }
} 
Example 197
Source File: PidLock.scala    From scaladex   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package ch.epfl.scala.index
package data
package util

import java.lang.management.ManagementFactory

import java.nio.file.{Files, Paths}
import java.nio.charset.StandardCharsets

object PidLock {
  def create(prefix: String): Unit = {
    val pid = ManagementFactory.getRuntimeMXBean().getName().split("@").head
    val pidFile = Paths.get(s"$prefix-PID")
    Files.write(pidFile, pid.getBytes(StandardCharsets.UTF_8))
    sys.addShutdownHook {
      Files.delete(pidFile)
    }

    ()
  }
} 
Example 198
Source File: PublishData.scala    From scaladex   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package ch.epfl.scala.index
package server
package routes
package api
package impl

import data.{DataPaths, LocalPomRepository}
import data.github
import ch.epfl.scala.index.model.misc.Sha1
import org.joda.time.DateTime
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Path, Paths}

import org.slf4j.LoggerFactory

import scala.util.control.NonFatal


  private def tmpPath(sha1: String): Path = {
    val tmpDir =
      Files.createTempDirectory(Paths.get(Server.config.tempDirPath), sha1)
    Files.createTempFile(tmpDir, "", "")
  }
} 
Example 199
Source File: Main.scala    From scaldy   with Apache License 2.0 5 votes vote down vote up
package com.paytrue.scaldy

import java.io._
import java.nio.charset.StandardCharsets
import java.nio.file.{ Files, Path, Paths }
import scala.collection.JavaConversions._

case class Config(inputPath: String = ".", outputPath: File = new File("scaldy.dot"), groupSelector: String = "") {
  // allows to generate only one subgraph by specifying the name of a class in the subgraph, see def subGraph
  val selectGroup = groupSelector != ""

  val selectClasses = groupSelector.split(",")
}

object Main extends App {
  val parser = new scopt.OptionParser[Config]("scaldy") {
    head("scaldy", "0.1.x")
    opt[String]('i', "in") valueName "<directory>" action {
      (x, c) ⇒ c.copy(inputPath = x)
    } text "in is an optional input directory path, by default the current directory"
    opt[File]('o', "out") valueName "<file>" action {
      (x, c) ⇒ c.copy(outputPath = x)
    } text "out is an optional output file, scaldy.dot by default"
    opt[String]('g', "group") valueName "<class name>" action {
      (x, c) ⇒ c.copy(groupSelector = x)
    } text "group is an optional subgraph selector, list classes (comma separated class names) within the group to generate only their subgraph"
  }

  parser.parse(args, Config()) match {
    case Some(config) ⇒
      val output = exportGraph(config)
      val charOutput: OutputStreamWriter = new OutputStreamWriter(
        new FileOutputStream(
          config.outputPath
        ),
        StandardCharsets.UTF_8
      )

      print(output)
      charOutput.write(output)
      charOutput.close()

    case None ⇒ // bad arguments, error already printed
  }

  def exportGraph(c: Config) = {
    val sourceFiles = FileFinder.listFiles(Paths.get(c.inputPath), ".scala")
    val allClasses = sourceFiles.flatMap(FileClassFinder.getClassesFromFile).filterNot(_.name == "Validated")
    val allNames = allClasses.map(_.name)
    val allRelationships = allClasses.flatMap(_.relationships).filter(rel ⇒ allNames.contains(rel.to))
    val allConnectedClasses =
      allClasses.filter(c ⇒ allRelationships.exists(rel ⇒ rel.from == c.name || rel.to == c.name || c.properties.nonEmpty))
        .groupBy(_.sourceFile)
        .zip(GraphColors.cycledColors)
        .map { case ((file, clazz), color) ⇒ (file, clazz, color) }

    def subGraph(sourceFile: Path, classes: Traversable[BeanClass], color: Color) = {
      val classList = classes.toList
      if (!c.selectGroup || classList.map(_.name).exists(c.selectClasses.contains(_))) {
        val (innerRels, outerRels) = classList.flatMap(_.relationships).filter(rel ⇒ allNames.contains(rel.to)).partition(rel ⇒ classList.map(_.name).contains(rel.to))
        s"""subgraph "cluster_${sourceFile.toString}" {
           |style=invis
           |margin=30
           |${classes.map(_.node(color)).mkString("\n")}
           |${innerRels.map(_.edge).mkString("\n")}
           |}
           |${outerRels.map(_.edge).mkString("\n")}
           |""".stripMargin
      } else {
        ""
      }
    }

    s"""digraph "Class diagram" {
       |graph[splines=true dpi=55]
       |node[shape=none width=0 height=0 margin=0 fontname=Verdana fontsize=14]
       |edge[fontname=Verdana fontsize=12 arrowsize=1.5 minlen=2.5]
       |
       |${allConnectedClasses.map((subGraph _).tupled).mkString("\n")}
         |}
      |""".
      stripMargin
  }
}

object FileFinder {
  def listFiles(root: Path, fileSuffix: String): List[Path] = {
    var files = List.empty[Path]

    Files.newDirectoryStream(root).foreach(path ⇒ {
      if (Files.isDirectory(path)) {
        files = files ++ listFiles(path, fileSuffix)
      } else if (path.toString.endsWith(fileSuffix)) {
        files = files ++ List(path.toAbsolutePath)
      }
    })

    files
  }
} 
Example 200
Source File: PathUtils.scala    From pureconfig   with Mozilla Public License 2.0 5 votes vote down vote up
package pureconfig

import java.nio.charset.StandardCharsets
import java.nio.file.{ Files, Path, Paths }

object PathUtils {

  def createTempFile(content: String): Path = {
    val path = Files.createTempFile("pureconfig", "conf")
    path.toFile.deleteOnExit()
    val writer = Files.newBufferedWriter(path, StandardCharsets.UTF_8)
    writer.write(content)
    writer.close()
    path
  }

  lazy val nonExistingPath: Path = {
    val path = Files.createTempFile("pureconfig", "conf")
    Files.delete(path)
    path
  }

  def resourceFromName(name: String): Path = {
    Paths.get(getClass.getResource(name).getPath)
  }

  def listResourcesFromNames(names: String*): Seq[Path] = names.map(resourceFromName)
}