java.nio.file.Files Scala Examples

The following examples show how to use java.nio.file.Files. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: ScalajHttpClient.scala    From telegram   with Apache License 2.0 6 votes vote down vote up
package com.bot4s.telegram.clients

import java.net.Proxy
import java.nio.file.Files

import cats.instances.future._
import com.bot4s.telegram.api.RequestHandler
import com.bot4s.telegram.methods.{Request, JsonRequest, MultipartRequest, Response}
import com.bot4s.telegram.models.InputFile
import com.bot4s.telegram.marshalling
import io.circe.parser.parse
import io.circe.{Decoder, Encoder}
import scalaj.http.{Http, MultiPart}
import slogging.StrictLogging

import scala.concurrent.{ExecutionContext, Future, blocking}


class ScalajHttpClient(token: String, proxy: Proxy = Proxy.NO_PROXY, telegramHost: String = "api.telegram.org")
                      (implicit ec: ExecutionContext) extends RequestHandler[Future] with StrictLogging {

  val connectionTimeoutMs = 10000
  val readTimeoutMs = 50000

  private val apiBaseUrl = s"https://$telegramHost/bot$token/"

  def sendRequest[R, T <: Request[_]](request: T)(implicit encT: Encoder[T], decR: Decoder[R]): Future[R] = {
    val url = apiBaseUrl + request.methodName

    val scalajRequest = request match {
      case r: JsonRequest[_] =>
        Http(url)
          .postData(marshalling.toJson(request))
          .header("Content-Type", "application/json")

      case r: MultipartRequest[_] =>

        // InputFile.FileIds are encoded as query params.
        val (fileIds, files) = r.getFiles.partition {
          case (key, _: InputFile.FileId) => true
          case _ => false
        }

        val parts = files.map {
          case (camelKey, inputFile) =>
            val key = marshalling.snakenize(camelKey)
            inputFile match {
              case InputFile.FileId(id) =>
                throw new RuntimeException("InputFile.FileId cannot must be encoded as a query param")

              case InputFile.Contents(filename, contents) =>
                MultiPart(key, filename, "application/octet-stream", contents)

              case InputFile.Path(path) =>
                MultiPart(key, path.getFileName.toString(),
                  "application/octet-stream",
                  Files.newInputStream(path),
                  Files.size(path),
                  _ => ())

              case other =>
                throw new RuntimeException(s"InputFile $other not supported")
            }
        }

        val fields = parse(marshalling.toJson(request)).fold(throw _, _.asObject.map {
          _.toMap.mapValues {
            json =>
              json.asString.getOrElse(marshalling.printer.pretty(json))
          }
        })

        val fileIdsParams = fileIds.map {
          case (key, inputFile: InputFile.FileId) =>
            marshalling.snakenize(key) -> inputFile.fileId
        }

        val params = fields.getOrElse(Map())

        Http(url).params(params ++ fileIdsParams).postMulti(parts: _*)
    }

    import marshalling.responseDecoder

    Future {
      blocking {
        scalajRequest
          .timeout(connectionTimeoutMs, readTimeoutMs)
          .proxy(proxy)
          .asString
      }
    } map {
      x =>
        if (x.isSuccess)
          marshalling.fromJson[Response[R]](x.body)
        else
          throw new RuntimeException(s"Error ${x.code} on request")
    } map (processApiResponse[R])
  }

} 
Example 2
Source File: MainWithEphemeralDirectory.scala    From daml   with Apache License 2.0 6 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.ledger.on.sql

import java.nio.file.Files

import akka.stream.Materializer
import com.daml.ledger.participant.state.kvutils.app.{
  Config,
  LedgerFactory,
  ParticipantConfig,
  ReadWriteService,
  Runner
}
import com.daml.lf.engine.Engine
import com.daml.logging.LoggingContext
import com.daml.resources.{ProgramResource, Resource, ResourceOwner}
import scopt.OptionParser

import scala.concurrent.ExecutionContext

object MainWithEphemeralDirectory {
  private val DirectoryPattern = "%DIR"

  def main(args: Array[String]): Unit = {
    new ProgramResource(new Runner("SQL Ledger", TestLedgerFactory).owner(args)).run()
  }

  object TestLedgerFactory extends LedgerFactory[ReadWriteService, ExtraConfig] {
    override val defaultExtraConfig: ExtraConfig = SqlLedgerFactory.defaultExtraConfig

    override def extraConfigParser(parser: OptionParser[Config[ExtraConfig]]): Unit =
      SqlLedgerFactory.extraConfigParser(parser)

    override def manipulateConfig(config: Config[ExtraConfig]): Config[ExtraConfig] =
      SqlLedgerFactory.manipulateConfig(config)

    override def readWriteServiceOwner(
        config: Config[ExtraConfig],
        participantConfig: ParticipantConfig,
        engine: Engine,
    )(
        implicit materializer: Materializer,
        logCtx: LoggingContext
    ): ResourceOwner[ReadWriteService] =
      new Owner(config, participantConfig, engine)

    class Owner(
        config: Config[ExtraConfig],
        participantConfig: ParticipantConfig,
        engine: Engine,
    )(implicit materializer: Materializer, logCtx: LoggingContext)
        extends ResourceOwner[ReadWriteService] {
      override def acquire()(
          implicit executionContext: ExecutionContext
      ): Resource[ReadWriteService] = {
        val directory = Files.createTempDirectory("ledger-on-sql-ephemeral-")
        val jdbcUrl = config.extra.jdbcUrl.map(_.replace(DirectoryPattern, directory.toString))
        SqlLedgerFactory
          .readWriteServiceOwner(
            config.copy(extra = config.extra.copy(jdbcUrl = jdbcUrl)),
            participantConfig,
            engine,
          )
          .acquire()
      }
    }

  }
} 
Example 3
Source File: KeyUtils.scala    From daml   with Apache License 2.0 6 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.jwt

import java.io.{File, FileInputStream}
import java.nio.charset.StandardCharsets
import java.nio.file.Files
import java.security.cert.CertificateFactory
import java.security.interfaces.{ECPublicKey, RSAPrivateKey, RSAPublicKey}
import java.security.spec.PKCS8EncodedKeySpec
import java.security.KeyFactory

import com.daml.lf.data.TryOps.Bracket.bracket
import scalaz.Show
import scalaz.syntax.show._

import scala.util.Try

object KeyUtils {
  final case class Error(what: Symbol, message: String)

  object Error {
    implicit val showInstance: Show[Error] =
      Show.shows(e => s"KeyUtils.Error: ${e.what}, ${e.message}")
  }

  private val mimeCharSet = StandardCharsets.ISO_8859_1

  
  def generateJwks(keys: Map[String, RSAPublicKey]): String = {
    def generateKeyEntry(keyId: String, key: RSAPublicKey): String =
      s"""    {
         |      "kid": "$keyId",
         |      "kty": "RSA",
         |      "alg": "RS256",
         |      "use": "sig",
         |      "e": "${java.util.Base64.getUrlEncoder
           .encodeToString(key.getPublicExponent.toByteArray)}",
         |      "n": "${java.util.Base64.getUrlEncoder.encodeToString(key.getModulus.toByteArray)}"
         |    }""".stripMargin

    s"""
       |{
       |  "keys": [
       |${keys.toList.map { case (keyId, key) => generateKeyEntry(keyId, key) }.mkString(",\n")}
       |  ]
       |}
    """.stripMargin
  }
} 
Example 4
Source File: Util.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.extractor.helpers

import java.net.URI
import java.nio.file.{Files, Path, Paths}

object Util {

  @annotation.varargs
  def guessRelativeFileLocation(filenames: String*): URI = {
    val uri = guessPath(filenames)
    Paths.get(".").toAbsolutePath.relativize(uri).toUri
  }

  @annotation.varargs
  def guessFileLocation(filenames: String*): URI = guessPath(filenames).toUri

  private def cwd = Paths.get(".").toAbsolutePath

  def guessPath(filenames: Seq[String]): Path = {
    def folders(from: Path): Stream[Path] =
      if (from == null) Stream.empty else from #:: folders(from.getParent)

    def guess(from: Path): Stream[Path] =
      folders(from).flatMap { d =>
        filenames.toStream.map(d.resolve)
      }

    val guesses = guess(cwd)

    guesses
      .find(Files.exists(_))
      .getOrElse(throw new IllegalStateException(s"""Could not find ${filenames
                                                      .mkString(", ")}, having searched:
                                         |${guesses.mkString("\n")}""".stripMargin))
  }

} 
Example 5
Source File: AuthorizationTest.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.http

import java.nio.file.Files

import akka.actor.ActorSystem
import akka.stream.Materializer
import com.daml.auth.TokenHolder
import com.daml.bazeltools.BazelRunfiles.rlocation
import com.daml.grpc.adapter.{AkkaExecutionSequencerPool, ExecutionSequencerFactory}
import com.daml.http.util.TestUtil.requiredFile
import com.daml.ledger.api.auth.{AuthServiceStatic, Claim, ClaimPublic, Claims}
import com.daml.ledger.client.LedgerClient
import org.scalatest.{AsyncFlatSpec, BeforeAndAfterAll, Matchers}
import org.slf4j.LoggerFactory

import scala.concurrent.ExecutionContext
import scala.util.control.NonFatal

final class AuthorizationTest extends AsyncFlatSpec with BeforeAndAfterAll with Matchers {

  private val dar = requiredFile(rlocation("docs/quickstart-model.dar"))
    .fold(e => throw new IllegalStateException(e), identity)

  private val testId: String = this.getClass.getSimpleName

  implicit val asys: ActorSystem = ActorSystem(testId)
  implicit val mat: Materializer = Materializer(asys)
  implicit val aesf: ExecutionSequencerFactory = new AkkaExecutionSequencerPool(testId)(asys)
  implicit val ec: ExecutionContext = asys.dispatcher

  private val publicToken = "public"
  private val emptyToken = "empty"
  private val mockedAuthService = Option(AuthServiceStatic {
    case `publicToken` => Claims(Seq[Claim](ClaimPublic))
    case `emptyToken` => Claims(Nil)
  })

  private val accessTokenFile = Files.createTempFile("Extractor", "AuthSpec")
  private val tokenHolder = Option(new TokenHolder(accessTokenFile))

  private def setToken(string: String): Unit = {
    val _ = Files.write(accessTokenFile, string.getBytes())
  }

  override protected def afterAll(): Unit = {
    super.afterAll()
    try {
      Files.delete(accessTokenFile)
    } catch {
      case NonFatal(e) =>
        LoggerFactory
          .getLogger(classOf[AuthorizationTest])
          .warn("Unable to delete temporary token file", e)
    }
  }

  protected def withLedger[A] =
    HttpServiceTestFixture
      .withLedger[A](List(dar), testId, Option(publicToken), mockedAuthService) _

  private def packageService(client: LedgerClient): PackageService =
    new PackageService(HttpService.loadPackageStoreUpdates(client.packageClient, tokenHolder))

  behavior of "PackageService against an authenticated sandbox"

  it should "fail immediately if the authorization is insufficient" in withLedger { client =>
    setToken(emptyToken)
    packageService(client).reload.failed.map(_ => succeed)
  }

  it should "succeed if the authorization is sufficient" in withLedger { client =>
    setToken(publicToken)
    packageService(client).reload.map(_ => succeed)
  }

} 
Example 6
Source File: HttpServiceIntegrationTest.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.http

import java.io.File
import java.nio.file.Files

import akka.http.scaladsl.Http
import akka.http.scaladsl.model.{HttpMethods, HttpRequest, StatusCodes, Uri}
import com.daml.http.Statement.discard
import com.daml.http.util.TestUtil.writeToFile
import org.scalacheck.Gen
import org.scalatest.{Assertion, BeforeAndAfterAll}

import scala.concurrent.Future

class HttpServiceIntegrationTest extends AbstractHttpServiceIntegrationTest with BeforeAndAfterAll {

  private val staticContent: String = "static"

  private val staticContentDir: File =
    Files.createTempDirectory("integration-test-static-content").toFile

  override def staticContentConfig: Option[StaticContentConfig] =
    Some(StaticContentConfig(prefix = staticContent, directory = staticContentDir))

  override def jdbcConfig: Option[JdbcConfig] = None

  private val expectedDummyContent: String = Gen
    .listOfN(100, Gen.identifier)
    .map(_.mkString(" "))
    .sample
    .getOrElse(throw new IllegalStateException(s"Cannot create dummy text content"))

  private val dummyFile: File =
    writeToFile(new File(staticContentDir, "dummy.txt"), expectedDummyContent).get
  require(dummyFile.exists)

  override protected def afterAll(): Unit = {
    // clean up temp directory
    discard { dummyFile.delete() }
    discard { staticContentDir.delete() }
    super.afterAll()
  }

  "should serve static content from configured directory" in withHttpService { (uri: Uri, _, _) =>
    Http()
      .singleRequest(
        HttpRequest(
          method = HttpMethods.GET,
          uri = uri.withPath(Uri.Path(s"/$staticContent/${dummyFile.getName}"))))
      .flatMap { resp =>
        discard { resp.status shouldBe StatusCodes.OK }
        val bodyF: Future[String] = getResponseDataBytes(resp, debug = false)
        bodyF.flatMap { body =>
          body shouldBe expectedDummyContent
        }
      }: Future[Assertion]
  }

  "Forwarded" - {
    import Endpoints.Forwarded
    "can 'parse' sample" in {
      Forwarded("for=192.168.0.1;proto=http;by=192.168.0.42").proto should ===(Some("http"))
    }

    "can 'parse' quoted sample" in {
      Forwarded("for=192.168.0.1;proto = \"https\" ;by=192.168.0.42").proto should ===(
        Some("https"))
    }
  }
} 
Example 7
Source File: JsonProtocol.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml

import java.nio.file.{Files, Path}

import com.daml.ledger.api.v1.value.Record
import com.daml.ledger.api.validation.ValueValidator
import com.daml.lf.value.json.ApiCodecCompressed
import spray.json._

object JsonProtocol extends DefaultJsonProtocol {

  private def cannotReadDamlLf(): RuntimeException =
    new UnsupportedOperationException("Reading JSON-encoded DAML-LF value is not supported")

  implicit object RecordJsonFormat extends JsonFormat[Record] {
    override def read(json: JsValue): Record =
      throw cannotReadDamlLf()
    override def write(record: Record): JsValue =
      ApiCodecCompressed.apiValueToJsValue(
        ValueValidator.validateRecord(record).right.get.mapContractId(_.coid)
      )
  }

  private implicit class JsObjectWith(val jsObject: JsObject) extends AnyVal {
    def +(pair: (String, JsValue)): JsObject =
      jsObject.copy(fields = jsObject.fields + pair)
  }

  import Application._
  implicit val createdFormat: RootJsonFormat[CreatedResult] =
    jsonFormat4(CreatedResult.apply)
  implicit val archivedFormat: RootJsonFormat[ArchivedResult] =
    jsonFormat3(ArchivedResult.apply)
  implicit val eventFormat: RootJsonFormat[EventResult] =
    new RootJsonFormat[EventResult] {
      override def read(json: JsValue): Application.EventResult =
        throw cannotReadDamlLf()
      override def write(eventResult: EventResult): JsValue =
        eventResult match {
          case create: CreatedResult =>
            createdFormat
              .write(create)
              .asJsObject + ("type" -> JsString("created"))
          case archive: ArchivedResult =>
            archivedFormat
              .write(archive)
              .asJsObject + ("type" -> JsString("archived"))
        }
    }
  implicit val contractFormat: RootJsonFormat[ContractResult] =
    jsonFormat2(Application.ContractResult.apply)
  implicit val transactionFormat: RootJsonFormat[TransactionResult] =
    jsonFormat2(Application.TransactionResult.apply)

  def saveAsJson[A: JsonWriter](outputFile: Path, a: A): Unit = {
    val _ = Files.write(outputFile, a.toJson.prettyPrint.getBytes())
  }

} 
Example 8
Source File: InterfaceReaderMain.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.lf.iface.reader

import java.io.BufferedInputStream
import java.nio.file.Files

import com.daml.daml_lf_dev.DamlLf

object InterfaceReaderMain extends App {

  val lfFile = new java.io.File(args.apply(0))

  val is = Files.newInputStream(lfFile.toPath)
  try {
    val bis = new BufferedInputStream(is)
    val archive = DamlLf.Archive.parser().parseFrom(bis)
    val out = InterfaceReader.readInterface(archive)
    println(s"out: $out")
  } finally {
    is.close()
  }
} 
Example 9
Source File: UtilTest.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.codegen

import com.daml.lf.data.Ref.{QualifiedName, PackageId}

import java.io.IOException
import java.nio.file.attribute.BasicFileAttributes
import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor}
import com.daml.lf.{iface => I}

import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}
import org.scalatest.prop.GeneratorDrivenPropertyChecks

class UtilTest extends UtilTestHelpers with GeneratorDrivenPropertyChecks {

  val packageInterface =
    I.Interface(packageId = PackageId.assertFromString("abcdef"), typeDecls = Map.empty)
  val scalaPackageParts = Array("com", "digitalasset")
  val scalaPackage: String = scalaPackageParts.mkString(".")
  val util =
    lf.LFUtil(
      scalaPackage,
      I.EnvironmentInterface fromReaderInterfaces packageInterface,
      outputDir.toFile)

  def damlScalaName(damlNameSpace: Array[String], name: String): util.DamlScalaName =
    util.DamlScalaName(damlNameSpace, name)

  behavior of "Util"

  it should "mkDamlScalaName for a Contract named Test" in {
    val result = util.mkDamlScalaNameFromDirsAndName(Array(), "Test")
    result shouldEqual damlScalaName(Array.empty, "Test")
    result.packageName shouldEqual scalaPackage
    result.qualifiedName shouldEqual (scalaPackage + ".Test")
  }

  it should "mkDamlScalaName for a Template names foo.bar.Test" in {
    val result = util.mkDamlScalaName(Util.Template, QualifiedName assertFromString "foo.bar:Test")
    result shouldEqual damlScalaName(Array("foo", "bar"), "Test")
    result.packageName shouldEqual (scalaPackage + ".foo.bar")
    result.qualifiedName shouldEqual (scalaPackage + ".foo.bar.Test")
  }

  "partitionEithers" should "equal scalaz separate in simple cases" in forAll {
    iis: List[Either[Int, Int]] =>
      import scalaz.syntax.monadPlus._, scalaz.std.list._, scalaz.std.either._
      Util.partitionEithers(iis) shouldBe iis.separate
  }

}

abstract class UtilTestHelpers extends FlatSpec with Matchers with BeforeAndAfterAll {

  val outputDir = Files.createTempDirectory("codegenUtilTest")

  override protected def afterAll(): Unit = {
    super.afterAll()
    deleteRecursively(outputDir)
  }

  def deleteRecursively(dir: Path): Unit = {
    Files.walkFileTree(
      dir,
      new SimpleFileVisitor[Path] {
        override def postVisitDirectory(dir: Path, exc: IOException) = {
          Files.delete(dir)
          FileVisitResult.CONTINUE
        }

        override def visitFile(file: Path, attrs: BasicFileAttributes) = {
          Files.delete(file)
          FileVisitResult.CONTINUE
        }
      }
    )
    ()
  }
} 
Example 10
Source File: HeadDamli.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.navigator.test.runner

import java.io.File
import java.nio.file.Files

import scala.sys.error
import scala.sys.process.Process


object HeadDamlc {
  private val packageName = "Test"

  def run(damlPath: String): (File, Unit => Unit) = {
    val damlFile = new File(damlPath)

    val tempDirectory = Files.createTempDirectory("navigator-integration-test").toFile
    val darFile = new File(tempDirectory, s"$packageName.dar")

    tempDirectory.mkdirs()
    tempDirectory.deleteOnExit()
    val shutdown: Unit => Unit = _ => { tempDirectory.delete(); () }

    // DAML -> DAR
    val exitCode = Process(
      s"bazel run damlc -- package $damlPath $packageName --output ${darFile.getAbsolutePath}").!
    if (exitCode != 0) {
      shutdown(())
      error(s"Dar packager: error while running damlc package for $damlPath: exit code $exitCode")
    }

    (darFile, shutdown)
  }
} 
Example 11
Source File: NavigatorBackend.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.navigator

import java.nio.file.{Files, Paths}
import java.util.UUID

import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import com.daml.buildinfo.BuildInfo

object NavigatorBackend extends UIBackend {

  private val configFile = "frontend-config.js"
  override def customEndpoints: Set[CustomEndpoint[_]] = Set()
  override def customRoutes: List[Route] = List(frontendConfigRoute)
  override def applicationInfo: ApplicationInfo = ApplicationInfo(
    id = s"Navigator-${UUID.randomUUID().toString}",
    name = "Navigator",
    version = BuildInfo.Version,
  )
  override def banner: Option[String] =
    Some(
      raw"""   _  __          _           __
        |  / |/ /__ __  __(_)__ ____ _/ /____  ____
        | /    / _ `/ |/ / / _ `/ _ `/ __/ _ \/ __/
        |/_/|_/\_,_/|___/_/\_, /\_,_/\__/\___/_/
        |                 /___/
        |Version """.stripMargin + applicationInfo.version
    )

  
  private val frontendConfigRoute: Route = {
    path("api" / "config") {
      if (Files.exists(Paths.get(configFile)))
        getFromFile(configFile)
      else
        complete(StatusCodes.NotFound)
    }
  }
} 
Example 12
Source File: PortFiles.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.ports

import java.nio.file.{Files, Path}

import scalaz.{Show, \/}

import scala.collection.JavaConverters._

object PortFiles {
  sealed abstract class Error extends Serializable with Product
  final case class FileAlreadyExists(path: Path) extends Error
  final case class CannotWriteToFile(path: Path, reason: String) extends Error

  object Error {
    implicit val showInstance: Show[Error] = Show.shows {
      case FileAlreadyExists(path) =>
        s"Port file already exists: ${path.toAbsolutePath: Path}"
      case CannotWriteToFile(path, reason) =>
        s"Cannot write to port file: ${path.toAbsolutePath: Path}, reason: $reason"
    }
  }

  
  def write(path: Path, port: Port): Error \/ Unit =
    \/.fromTryCatchNonFatal {
      writeUnsafe(path, port)
    }.leftMap {
      case _: java.nio.file.FileAlreadyExistsException => FileAlreadyExists(path)
      case e => CannotWriteToFile(path, e.toString)
    }

  private def writeUnsafe(path: Path, port: Port): Unit = {
    import java.nio.file.StandardOpenOption.CREATE_NEW
    val lines: java.lang.Iterable[String] = List(port.value.toString).asJava
    val created = Files.write(path, lines, CREATE_NEW)
    created.toFile.deleteOnExit()
  }
} 
Example 13
Source File: PortLock.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.testing.postgresql

import java.io.RandomAccessFile
import java.nio.channels.{
  ClosedChannelException,
  FileChannel,
  FileLock,
  OverlappingFileLockException
}
import java.nio.file.{Files, Path, Paths}

import com.daml.ports.Port

private[postgresql] object PortLock {

  // We can't use `sys.props("java.io.tmpdir")` because Bazel changes this for each test run.
  // For this to be useful, it needs to be shared across concurrent runs.
  private val portLockDirectory: Path = {
    val tempDirectory =
      if (sys.props("os.name").startsWith("Windows")) {
        Paths.get(sys.props("user.home"), "AppData", "Local", "Temp")
      } else {
        Paths.get("/tmp")
      }
    tempDirectory.resolve(Paths.get("daml", "build", "postgresql-testing", "ports"))
  }

  def lock(port: Port): Either[FailedToLock, Locked] = {
    Files.createDirectories(portLockDirectory)
    val portLockFile = portLockDirectory.resolve(port.toString)
    val file = new RandomAccessFile(portLockFile.toFile, "rw")
    val channel = file.getChannel
    try {
      val lock = channel.tryLock()
      val locked = new Locked(port, lock, channel, file)
      if (lock != null) {
        Right(locked)
      } else {
        locked.unlock()
        Left(FailedToLock(port))
      }
    } catch {
      case _: OverlappingFileLockException =>
        channel.close()
        file.close()
        Left(FailedToLock(port))
    }
  }

  final class Locked(val port: Port, lock: FileLock, channel: FileChannel, file: RandomAccessFile) {
    def unlock(): Unit = {
      try {
        lock.release()
      } catch {
        // ignore
        case _: ClosedChannelException =>
      }
      channel.close()
      file.close()
    }
  }

  case class FailedToLock(port: Port) extends RuntimeException(s"Failed to lock port $port.")

} 
Example 14
Source File: LocalImageFiles.scala    From BigDL   with Apache License 2.0 5 votes vote down vote up
package com.intel.analytics.bigdl.dataset.image

import java.awt.color.ColorSpace
import java.nio.file.{Files, Path}

import org.apache.log4j.Logger

object LocalImageFiles {
  Class.forName("javax.imageio.ImageIO")
  Class.forName("java.awt.color.ICC_ColorSpace")
  // Class.forName("sun.java2d.cmm.lcms.LCMS")
  ColorSpace.getInstance(ColorSpace.CS_sRGB).toRGB(Array[Float](0, 0, 0))

  val logger = Logger.getLogger(getClass)

  
  private[bigdl] def readPaths(path: Path, hasLabel: Boolean = true)
  : Array[LocalLabeledImagePath] = {
    if (hasLabel) readPathsWithLabel(path) else readPathsNoLabel(path)
  }
} 
Example 15
Source File: COCOSeqFileGenerator.scala    From BigDL   with Apache License 2.0 5 votes vote down vote up
package com.intel.analytics.bigdl.models.utils

import com.intel.analytics.bigdl.dataset.segmentation.{COCODataset, COCOSerializeContext}
import java.io.File
import java.nio.file.{Files, Paths}
import java.util.concurrent.atomic.AtomicInteger
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.io.SequenceFile.Writer
import org.apache.hadoop.io.compress.BZip2Codec
import org.apache.hadoop.io.{BytesWritable, SequenceFile}
import scala.collection.parallel.ForkJoinTaskSupport
import scopt.OptionParser

object COCOSeqFileGenerator {

  
  case class COCOSeqFileGeneratorParams(
    folder: String = ".",
    metaPath: String = "instances_val2014.json",
    output: String = ".",
    parallel: Int = 1,
    blockSize: Int = 12800
  )

  private val parser = new OptionParser[COCOSeqFileGeneratorParams]("BigDL COCO " +
    "Sequence File Generator") {
    head("BigDL COCO Sequence File Generator")
    opt[String]('f', "folder")
      .text("where you put the COCO image files")
      .action((x, c) => c.copy(folder = x))
    opt[String]('o', "output folder")
      .text("where you put the generated seq files")
      .action((x, c) => c.copy(output = x))
    opt[Int]('p', "parallel")
      .text("parallel num")
      .action((x, c) => c.copy(parallel = x))
    opt[Int]('b', "blockSize")
      .text("block size")
      .action((x, c) => c.copy(blockSize = x))
    opt[String]('m', "metaPath")
      .text("metadata json file path")
      .action((x, c) => c.copy(metaPath = x))
  }

  def main(args: Array[String]): Unit = {
    parser.parse(args, COCOSeqFileGeneratorParams()).foreach { param =>
      println("Loading COCO metadata")
      val meta = COCODataset.load(param.metaPath, param.folder)
      println("Metadata loaded")
      val conf: Configuration = new Configuration
      val doneCount = new AtomicInteger(0)
      val tasks = meta.images.filter(img => {
        val path = img.path
        val valid = Files.exists(path) && !Files.isDirectory(path)
        if (!valid) {
          System.err.print(s"[Warning] The image file ${path.getFileName} does not exist.\n")
        }
        valid
      }).grouped(param.blockSize).zipWithIndex.toArray.par
      tasks.tasksupport = new ForkJoinTaskSupport(
        new scala.concurrent.forkjoin.ForkJoinPool(param.parallel))
      tasks.foreach { case (imgs, blkId) =>
        val outFile = new Path(param.output, s"coco-seq-$blkId.seq")
        val key = new BytesWritable
        val value = new BytesWritable
        val writer = SequenceFile.createWriter(conf, Writer.file(outFile), Writer.keyClass(key
          .getClass), Writer.valueClass(value.getClass), Writer.compression(SequenceFile
          .CompressionType.BLOCK, new BZip2Codec))
        val context = new COCOSerializeContext
        imgs.foreach { img =>
          context.clear()
          context.dump(img.fileName)
          img.dumpTo(context)
          context.dump(COCODataset.MAGIC_NUM)
          val keyBytes = context.toByteArray
          key.set(keyBytes, 0, keyBytes.length)
          val bytes = img.data
          value.set(bytes, 0, bytes.length)
          writer.append(key, value)
          val cnt = doneCount.incrementAndGet()
          if (cnt % 500 == 0) {
            System.err.print(s"\r$cnt / ${meta.images.length} = ${cnt.toFloat/meta.images.length}")
          }
        }
        writer.close()
      }
      System.err.print("\n")
    }
  }
} 
Example 16
Source File: Utils.scala    From BigDL   with Apache License 2.0 5 votes vote down vote up
package com.intel.analytics.bigdl.models.autoencoder

import java.nio.ByteBuffer
import java.nio.file.{Files, Path}

import com.intel.analytics.bigdl.dataset.ByteRecord

import scopt.OptionParser

object Utils {
  val trainMean = 0.13066047740239436
  val trainStd = 0.30810779333114624

  case class TrainParams(
    folder: String = "./",
    checkpoint: Option[String] = None,
    modelSnapshot: Option[String] = None,
    stateSnapshot: Option[String] = None,
    batchSize: Int = 150,
    maxEpoch: Int = 10,
    graphModel: Boolean = false,
    optimizerVersion: Option[String] = None
  )

  val trainParser = new OptionParser[TrainParams]("BigDL Autoencoder on MNIST") {
    opt[String]('f', "folder")
      .text("where you put the MNIST data")
      .action((x, c) => c.copy(folder = x))
    opt[String]("model")
      .text("model snapshot location")
      .action((x, c) => c.copy(modelSnapshot = Some(x)))
    opt[String]("state")
      .text("state snapshot location")
      .action((x, c) => c.copy(stateSnapshot = Some(x)))
    opt[String]("checkpoint")
      .text("where to cache the model and state")
      .action((x, c) => c.copy(checkpoint = Some(x)))
    opt[Int]('b', "batchSize")
      .text("batch size")
      .action((x, c) => c.copy(batchSize = x))
    opt[Int]('e', "maxEpoch")
      .text("max epoch")
      .action((x, c) => c.copy(maxEpoch = x))
    opt[Unit]('g', "graphModel")
      .text("use graph model")
      .action((x, c) => c.copy(graphModel = true))
    opt[String]("optimizerVersion")
      .text("state optimizer version")
      .action((x, c) => c.copy(optimizerVersion = Some(x)))
  }

  private[bigdl] def load(featureFile: Path, labelFile: Path): Array[ByteRecord] = {
    val labelBuffer = ByteBuffer.wrap(Files.readAllBytes(labelFile))
    val featureBuffer = ByteBuffer.wrap(Files.readAllBytes(featureFile))
    val labelMagicNumber = labelBuffer.getInt()

    require(labelMagicNumber == 2049)
    val featureMagicNumber = featureBuffer.getInt()
    require(featureMagicNumber == 2051)

    val labelCount = labelBuffer.getInt()
    val featureCount = featureBuffer.getInt()
    require(labelCount == featureCount)

    val rowNum = featureBuffer.getInt()
    val colNum = featureBuffer.getInt()

    val result = new Array[ByteRecord](featureCount)
    var i = 0
    while (i < featureCount) {
      val img = new Array[Byte]((rowNum * colNum))
      var y = 0
      while (y < rowNum) {
        var x = 0
        while (x < colNum) {
          img(x + y * colNum) = featureBuffer.get()
          x += 1
        }
        y += 1
      }
      result(i) = ByteRecord(img, labelBuffer.get().toFloat + 1.0f)
      i += 1
    }

    result
  }
} 
Example 17
Source File: SerializeModelSpec.scala    From BigDL   with Apache License 2.0 5 votes vote down vote up
package com.intel.analytics.bigdl.nn.mkldnn

import java.io.File
import java.nio.file.{Files, Paths}

import com.intel.analytics.bigdl.nn.Module
import com.intel.analytics.bigdl.nn.mkldnn.ResNet.DatasetType.ImageNet
import com.intel.analytics.bigdl.utils.T
import org.scalatest.{FlatSpec, Matchers}

class SerializeModelSpec extends FlatSpec with Matchers {

  "Save a model" should "work correctly" in {
    val identity = System.identityHashCode(this).toString
    val name = "resnet_50." + identity
    val tmpdir = System.getProperty("java.io.tmpdir")
    val path = Paths.get(tmpdir, name).toAbsolutePath

    // do not use vgg16 model, the vgg16 model will set Xavier to average
    // mode, which will influence other test cases because of Xavier is a
    // case object.
    val model = ResNet(32, 1000, T("depth" -> 50, "dataSet" -> ImageNet))
    println(s"generate the model file ${path.toString}")
    model.save(path.toString, true)
    val loaded = Module.load[Float](path.toString)

    val length = Files.size(path) / 1024.0 / 1024.0
    length should be < 300.0

    println(s"delete the model file ${path.toString}")
    Files.deleteIfExists(path)
  }

} 
Example 18
Source File: AsymmetricCipherKeyPairLoaderSpec.scala    From mantis   with Apache License 2.0 5 votes vote down vote up
package io.iohk.ethereum.network

import java.io.File
import java.nio.file.Files

import io.iohk.ethereum.network
import io.iohk.ethereum.nodebuilder.SecureRandomBuilder
import org.scalatest.{FlatSpec, Matchers}
import org.spongycastle.crypto.AsymmetricCipherKeyPair
import org.spongycastle.crypto.params.{ECPrivateKeyParameters, ECPublicKeyParameters}

class AsymmetricCipherKeyPairLoaderSpec extends FlatSpec with Matchers with SecureRandomBuilder {

  def withFilePath(testCode: String => Any): Unit = {
    val path = Files.createTempFile("key-", "").toAbsolutePath.toString
    require(new File(path).delete(), "File deletion before test failed")
    try {
      testCode(path)
    } finally {
      val file = new File(path)
      assert(!file.exists() || file.delete(), "File deletion after test failed")
    }
  }

  def equalKeyPairs(keyPair1: AsymmetricCipherKeyPair, keyPair2: AsymmetricCipherKeyPair): Boolean = {
    //Compare public keys
    val publicKeyParam1 = keyPair1.getPublic.asInstanceOf[ECPublicKeyParameters]
    val publicKeyParam2 = keyPair2.getPublic.asInstanceOf[ECPublicKeyParameters]
    val equalPublicKey =
      publicKeyParam1.getQ == publicKeyParam2.getQ &&
        publicKeyParam1.getParameters == publicKeyParam2.getParameters &&
        publicKeyParam1.isPrivate == publicKeyParam2.isPrivate

    //Compare private keys
    val privateKeyParam1 = keyPair1.getPrivate.asInstanceOf[ECPrivateKeyParameters]
    val privateKeyParam2 = keyPair2.getPrivate.asInstanceOf[ECPrivateKeyParameters]
    val equalPrivateKey =
      privateKeyParam1.getD == privateKeyParam2.getD &&
        privateKeyParam1.getParameters == privateKeyParam2.getParameters &&
        privateKeyParam1.isPrivate == privateKeyParam2.isPrivate

    equalPrivateKey && equalPublicKey
  }

  it should "correctly save the AsymmetricCipherKeyPairLoader" in {
    withFilePath { path =>
      //Create key pair
      val newKeyPair = network.loadAsymmetricCipherKeyPair(path, secureRandom)

      //Read key pair from file
      val obtainedKeyPair = network.loadAsymmetricCipherKeyPair(path, secureRandom)

      assert(equalKeyPairs(newKeyPair, obtainedKeyPair))
    }
  }
} 
Example 19
Source File: PersistentStorage.scala    From mantis   with Apache License 2.0 5 votes vote down vote up
package io.iohk.ethereum.mpt

import java.io.File
import java.nio.file.Files

import io.iohk.ethereum.db.dataSource.{LevelDBDataSource, LevelDbConfig}
import io.iohk.ethereum.db.storage.{ArchiveNodeStorage, NodeStorage}

trait PersistentStorage {
  def withNodeStorage(testCode: NodesKeyValueStorage => Unit): Unit = {
    val dbPath = Files.createTempDirectory("testdb").toAbsolutePath.toString
    val dataSource = LevelDBDataSource(new LevelDbConfig {
      override val verifyChecksums: Boolean = true
      override val paranoidChecks: Boolean = true
      override val createIfMissing: Boolean = true
      override val path: String = dbPath
    })

    try {
      testCode(new ArchiveNodeStorage(new NodeStorage(dataSource)))
    } finally {
      val dir = new File(dbPath)
      !dir.exists() || dir.delete()
    }
  }
} 
Example 20
Source File: S3ParquetPageOutput.scala    From embulk-output-s3_parquet   with MIT License 5 votes vote down vote up
package org.embulk.output.s3_parquet

import java.io.File
import java.nio.file.{Files, Paths}

import com.amazonaws.services.s3.transfer.{TransferManager, Upload}
import com.amazonaws.services.s3.transfer.model.UploadResult
import org.apache.parquet.hadoop.ParquetWriter
import org.embulk.config.TaskReport
import org.embulk.output.s3_parquet.aws.Aws
import org.embulk.spi.{Exec, Page, PageReader, TransactionalPageOutput}

case class S3ParquetPageOutput(
    outputLocalFile: String,
    reader: PageReader,
    writer: ParquetWriter[PageReader],
    aws: Aws,
    destBucket: String,
    destKey: String
) extends TransactionalPageOutput {

  private var isClosed: Boolean = false

  override def add(page: Page): Unit = {
    reader.setPage(page)
    while (reader.nextRecord()) {
      ContextClassLoaderSwapper.usingPluginClass {
        writer.write(reader)
      }
    }
  }

  override def finish(): Unit = {}

  override def close(): Unit = {
    synchronized {
      if (!isClosed) {
        ContextClassLoaderSwapper.usingPluginClass {
          writer.close()
        }
        isClosed = true
      }
    }
  }

  override def abort(): Unit = {
    close()
    cleanup()
  }

  override def commit(): TaskReport = {
    close()
    val result: UploadResult = ContextClassLoaderSwapper.usingPluginClass {
      aws.withTransferManager { xfer: TransferManager =>
        val upload: Upload =
          xfer.upload(destBucket, destKey, new File(outputLocalFile))
        upload.waitForUploadResult()
      }
    }
    cleanup()
    Exec
      .newTaskReport()
      .set("bucket", result.getBucketName)
      .set("key", result.getKey)
      .set("etag", result.getETag)
      .set("version_id", result.getVersionId)
  }

  private def cleanup(): Unit = {
    Files.delete(Paths.get(outputLocalFile))
  }
} 
Example 21
Source File: GithubRepos.scala    From zorechka-bot   with MIT License 5 votes vote down vote up
package com.wix.zorechka.repos

import java.io.File
import java.nio.file.Files

import zio.{RIO, Task, ZIO}

import collection.JavaConverters._

case class GitRepo(owner: String, name: String, url: String)

trait GithubRepos {
  val repos: GithubRepos.Service
}

object GithubRepos {
  trait Service {
    def repos(reposFile: String): Task[List[GitRepo]]
  }

  trait Live extends GithubRepos {
    val repos: GithubRepos.Service = new GithubRepos.Service {
      override def repos(reposFile: String): Task[ List[GitRepo]] = for {
        result <- ZIO.effect {
          Files
            .readAllLines(new File(reposFile).toPath).asScala
            .map(_.trim.split(" ").toList)
            .collect {
              case ownerRepo :: Nil =>
                val (owner :: repo :: Nil) = ownerRepo.split("/").toList
                GitRepo(owner, repo, s"https://github.com/$owner/$repo.git") // https://github.com/wix-private/strategic-products.git
              case ownerRepo :: token :: Nil =>
                val (owner :: repo :: Nil) = ownerRepo.split("/").toList
                GitRepo(owner, repo, s"https://[email protected]/$owner/$repo.git")
            }
            .toList
        }
      } yield result
    }
  }

  // helpers
  def repos(reposFile: String): RIO[GithubRepos, List[GitRepo]] = ZIO.accessM(env => env.repos.repos(reposFile))
} 
Example 22
Source File: ResultNotifier.scala    From zorechka-bot   with MIT License 5 votes vote down vote up
package com.wix.zorechka.service

import java.nio.file.{Files, Path}

import com.wix.zorechka.Dep
import com.wix.zorechka.clients.{BuildozerClient, GithubClient}
import zio.console.Console
import zio.{RIO, ZIO}

import collection.JavaConverters._

trait ResultNotifier {
  val notifier: ResultNotifier.Service
}

object ResultNotifier {

  trait Service {
    def notify(forkDir: Path, updatedDeps: List[Dep], unusedDeps: List[PackageDeps]): RIO[GithubClient with BuildozerClient with Console, Unit]
  }

  trait CreatePullRequest extends ResultNotifier {
    override val notifier: Service = new Service {
      def notify(forkDir: Path, updatedDeps: List[Dep], unusedDeps: List[PackageDeps]): ZIO[GithubClient with BuildozerClient with Console, Throwable, Unit] = {
        val (depsDesc, branch) = branchName(updatedDeps)

        for {
          _ <- GithubClient.createBranch(forkDir, branch)
          _ <- ZIO.effect(applyDepUpdates(forkDir, updatedDeps))
          _ <- applyUnusedDeps(forkDir, unusedDeps)
          _ <- GithubClient.stageAllChanges(forkDir)
          _ <- GithubClient.commit(forkDir, s"zorechka found new versions for deps: $depsDesc #pr")
          _ <- GithubClient.push(forkDir, branch)
        } yield ()
      }
    }

    private def applyUnusedDeps(repoDir: Path, unusedDeps: List[PackageDeps]): RIO[BuildozerClient, List[Unit]] = {
      ZIO.collectAll {
        unusedDeps.flatMap { unusedDep =>
          unusedDep.deps.map { dep =>
            BuildozerClient.deleteDep(repoDir, dep.target, dep.dep)
          }
        }
      }
    }

    private def applyDepUpdates(repoDir: Path, deps: List[Dep]): Unit = {
      val regex = """artifact = "(.+)",""".r
      deps.foreach { dep =>
        val file = repoDir
          .resolve("third_party")
          .resolve(dep.groupId.replaceAll("\\.", "_") + ".bzl")

        if (file.toFile.exists()) {
          println(s"Rewriting deps for ${file.toAbsolutePath} to $dep")

          val lines = Files.readAllLines(file)
          val result = lines.asScala.map { line =>
            regex.findFirstMatchIn(line) match {
              case Some(m) if line.contains(s"${dep.groupId}:${dep.artifactId}:") =>
                line.replace(m.group(1), s"${dep.groupId}:${dep.artifactId}:${dep.version}")
              case _ => line
            }
          }
          Files.write(file, result.asJava)
        }
      }
    }


    private def branchName(deps: List[Dep]) = {
      val depsSample = deps.map(_.branchKey()).take(3).mkString("_")
      val depsDesc = (if (depsSample.length > 90) depsSample.substring(0, 90) else depsSample) + (if (deps.size > 3) s"_and_${deps.size - 3}_more" else "")
      (depsDesc, s"feature/update-deps-$depsDesc")
    }
  }

  trait PrintPullRequestInfo extends ResultNotifier {
    override val notifier: Service = new Service {
      override def notify(forkDir: Path, updatedDeps: List[Dep], unusedDeps: List[PackageDeps]): RIO[GithubClient with BuildozerClient with Console, Unit] = {
        ZIO.accessM[Console](_.console.putStrLn(
          s"""
             |Going to update:
             |${updatedDeps.mkString("\n")}
             |
             |Going to remove:
             |${unusedDeps.mkString("\n")}
             |""".stripMargin))
      }
    }
  }

  def notify(forkDir: Path, updatedDeps: List[Dep], unusedDeps: List[PackageDeps]): ZIO[ResultNotifier with GithubClient with BuildozerClient with Console, Throwable, Unit] =
    ZIO.accessM[ResultNotifier with GithubClient with BuildozerClient  with Console](_.notifier.notify(forkDir, updatedDeps, unusedDeps))

} 
Example 23
Source File: IndexStorable.scala    From spark-lucenerdd   with Apache License 2.0 5 votes vote down vote up
package org.zouzias.spark.lucenerdd.store

import java.nio.file.{Files, Path}

import org.apache.lucene.facet.FacetsConfig
import org.apache.lucene.store._
import org.zouzias.spark.lucenerdd.config.Configurable
import org.zouzias.spark.lucenerdd.logging.Logging


  protected def storageMode(directoryPath: Path): Directory = {
    if (Config.hasPath(IndexStoreKey)) {
      val storageMode = Config.getString(IndexStoreKey)

      storageMode match {
          // TODO: FIX: Currently there is a single lock instance for each directory.
          // TODO: Implement better lock handling here
        case "disk" => {
          logInfo(s"Config parameter ${IndexStoreKey} is set to 'disk'")
          logInfo("Lucene index will be storage in disk")
          logInfo(s"Index disk location ${tmpJavaDir}")
          // directoryPath.toFile.deleteOnExit() // Delete on exit
          new MMapDirectory(directoryPath, new SingleInstanceLockFactory)
        }
        case ow =>
          logInfo(s"Config parameter ${IndexStoreKey} is set to ${ow}")
          logInfo("Lucene index will be storage in memory (default)")
          logInfo(
            """
              Quoting from
              http://lucene.apache.org/core/7_5_0/core/org/apache/
              lucene/store/RAMDirectory.html

              A memory-resident Directory implementation. Locking
              implementation is by default the SingleInstanceLockFactory.
              Warning: This class is not intended to work with huge indexes.
              Everything beyond several hundred megabytes will waste resources
              (GC cycles), because it uses an internal buffer size of 1024 bytes,
              producing millions of byte[1024] arrays.
              This class is optimized for small memory-resident indexes.
              It also has bad concurrency on multithreaded environments.

              It is recommended to materialize large indexes on disk and
              use MMapDirectory, which is a high-performance directory
              implementation working directly on the file system cache of
              the operating system, so copying data to Java heap
              space is not useful.
            """.stripMargin)
          new RAMDirectory()
      }
    }
    else {
      logInfo(s"Config parameter ${IndexStoreKey} is not set")
      logInfo("Lucene index will be storage in disk")
      new MMapDirectory(directoryPath, new SingleInstanceLockFactory)
    }
  }

  override def close(): Unit = {
    IndexDir.close()
    TaxonomyDir.close()
  }
} 
Example 24
Source File: RewriteSwaggerConfigPlugin.scala    From matcher   with MIT License 5 votes vote down vote up
import java.io.{BufferedInputStream, ByteArrayOutputStream}
import java.nio.charset.StandardCharsets
import java.nio.file.Files

import Dependencies.Version
import org.apache.commons.compress.archivers.ArchiveStreamFactory
import org.apache.commons.io.IOUtils
import sbt.Keys._
import sbt._

// See https://github.com/swagger-api/swagger-ui/issues/5710
object RewriteSwaggerConfigPlugin extends AutoPlugin {
  override val trigger = PluginTrigger.NoTrigger
  override def projectSettings: Seq[Def.Setting[_]] =
    inConfig(Compile)(
      Seq(
        resourceGenerators += Def.task {
          val jarName       = s"swagger-ui-${Version.swaggerUi}.jar"
          val indexHtmlPath = s"META-INF/resources/webjars/swagger-ui/${Version.swaggerUi}/index.html"
          val outputFile    = resourceManaged.value / indexHtmlPath

          val html = (Compile / dependencyClasspath).value
            .find(_.data.getName == jarName)
            .flatMap(jar => fileContentFromJar(jar.data, indexHtmlPath))
            .map { new String(_, StandardCharsets.UTF_8) }

          val resource = s"$jarName:$indexHtmlPath"
          html match {
            case None => throw new RuntimeException(s"Can't find $resource")
            case Some(html) =>
              val doc = org.jsoup.parser.Parser.parse(html, "127.0.0.1")
              import scala.collection.JavaConverters._
              doc
                .body()
                .children()
                .asScala
                .find { el =>
                  el.tagName() == "script" && el.html().contains("SwaggerUIBundle")
                } match {
                case None => throw new RuntimeException("Can't patch script in index.html")
                case Some(el) =>
                  val update =
                    """
const ui = SwaggerUIBundle({
    url: "/api-docs/swagger.json",
    dom_id: '#swagger-ui',
    deepLinking: true,
    presets: [ SwaggerUIBundle.presets.apis ],
    plugins: [ SwaggerUIBundle.plugins.DownloadUrl ],
    layout: "BaseLayout",
    operationsSorter: "alpha"
});
window.ui = ui;
"""
                  // Careful! ^ will be inserted as one-liner
                  el.text(update)
              }

              Files.createDirectories(outputFile.getParentFile.toPath)
              IO.write(outputFile, doc.outerHtml())
          }

          Seq(outputFile)
        }.taskValue
      ))

  private def fileContentFromJar(jar: File, fileName: String): Option[Array[Byte]] = {
    val fs      = new BufferedInputStream(Files.newInputStream(jar.toPath))
    val factory = new ArchiveStreamFactory()
    val ais     = factory.createArchiveInputStream(fs)

    try Iterator
      .continually(ais.getNextEntry)
      .takeWhile(_ != null)
      .filter(ais.canReadEntryData)
      .find(_.getName == fileName)
      .map { _ =>
        val out = new ByteArrayOutputStream()
        IOUtils.copy(ais, out)
        out.toByteArray
      } finally fs.close()
  }
} 
Example 25
Source File: AccountStorage.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.db

import java.io.{File, FileInputStream, FileOutputStream}
import java.nio.file.Files
import java.util.Base64

import cats.syntax.either._
import com.google.common.primitives.{Bytes, Ints}
import com.wavesplatform.dex.crypto.Enigma
import com.wavesplatform.dex.db.AccountStorage.Settings.EncryptedFile
import com.wavesplatform.dex.domain.account.KeyPair
import com.wavesplatform.dex.domain.bytes.ByteStr
import com.wavesplatform.dex.domain.crypto
import net.ceedubs.ficus.readers.ValueReader

import scala.collection.mutable.ArrayBuffer

case class AccountStorage(keyPair: KeyPair)

object AccountStorage {

  sealed trait Settings

  object Settings {

    case class InMem(seed: ByteStr)                        extends Settings
    case class EncryptedFile(path: File, password: String) extends Settings

    implicit val valueReader: ValueReader[Settings] = ValueReader.relative[Settings] { config =>
      config.getString("type") match {
        case "in-mem" => InMem(Base64.getDecoder.decode(config.getString("in-mem.seed-in-base64")))
        case "encrypted-file" =>
          EncryptedFile(
            path = new File(config.getString("encrypted-file.path")),
            password = config.getString("encrypted-file.password")
          )
        case x => throw new IllegalArgumentException(s"The type of account storage '$x' is unknown. Please update your settings.")
      }
    }
  }

  def load(settings: Settings): Either[String, AccountStorage] = settings match {
    case Settings.InMem(seed) => Right(AccountStorage(KeyPair(seed)))
    case Settings.EncryptedFile(file, password) =>
      if (file.isFile) {
        val encryptedSeedBytes = readFile(file)
        val key                = Enigma.prepareDefaultKey(password)
        val decryptedBytes     = Enigma.decrypt(key, encryptedSeedBytes)
        AccountStorage(KeyPair(decryptedBytes)).asRight
      } else s"A file '${file.getAbsolutePath}' doesn't exist".asLeft
  }

  def save(seed: ByteStr, to: EncryptedFile): Unit = {
    Files.createDirectories(to.path.getParentFile.toPath)
    val key                = Enigma.prepareDefaultKey(to.password)
    val encryptedSeedBytes = Enigma.encrypt(key, seed.arr)
    writeFile(to.path, encryptedSeedBytes)
  }

  def getAccountSeed(baseSeed: ByteStr, nonce: Int): ByteStr = ByteStr(crypto.secureHash(Bytes.concat(Ints.toByteArray(nonce), baseSeed)))

  def readFile(file: File): Array[Byte] = {
    val reader = new FileInputStream(file)
    try {
      val buff = new Array[Byte](1024)
      val r    = new ArrayBuffer[Byte]
      while (reader.available() > 0) {
        val read = reader.read(buff)
        if (read > 0) {
          r.appendAll(buff.iterator.take(read))
        }
      }
      r.toArray
    } finally {
      reader.close()
    }
  }

  def writeFile(file: File, bytes: Array[Byte]): Unit = {
    val writer = new FileOutputStream(file, false)
    try writer.write(bytes)
    finally writer.close()
  }
} 
Example 26
Source File: WithDB.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.db

import java.nio.file.Files

import com.wavesplatform.dex.db.leveldb.LevelDBFactory
import com.wavesplatform.dex.domain.account.Address
import com.wavesplatform.dex.domain.asset.Asset
import com.wavesplatform.dex.util.Implicits._
import com.wavesplatform.dex.util.TestHelpers
import monix.reactive.subjects.Subject
import org.iq80.leveldb.{DB, Options}
import org.scalatest.{BeforeAndAfterEach, Suite}

trait WithDB extends BeforeAndAfterEach { this: Suite =>

  private val path                  = Files.createTempDirectory("lvl").toAbsolutePath
  private var currentDBInstance: DB = _

  def db: DB = currentDBInstance

  protected val ignoreSpendableBalanceChanged: Subject[(Address, Asset), (Address, Asset)] = Subject.empty

  override def beforeEach(): Unit = {
    currentDBInstance = LevelDBFactory.factory.open(path.toFile, new Options().createIfMissing(true))
    super.beforeEach()
  }

  override def afterEach(): Unit =
    try {
      super.afterEach()
      db.close()
    } finally {
      TestHelpers.deleteRecursively(path)
    }

  protected def tempDb(f: DB => Any): Any = {
    val path = Files.createTempDirectory("lvl-temp").toAbsolutePath
    val db   = LevelDBFactory.factory.open(path.toFile, new Options().createIfMissing(true))
    try {
      f(db)
    } finally {
      db.close()
      TestHelpers.deleteRecursively(path)
    }
  }
} 
Example 27
Source File: TestHelpers.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.util

import java.io.IOException
import java.nio.file.attribute.BasicFileAttributes
import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor}

object TestHelpers {
  def deleteRecursively(path: Path): Unit = Files.walkFileTree(
    path,
    new SimpleFileVisitor[Path] {
      override def postVisitDirectory(dir: Path, exc: IOException): FileVisitResult = {
        Option(exc).fold {
          Files.delete(dir)
          FileVisitResult.CONTINUE
        }(throw _)
      }

      override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = {
        Files.delete(file)
        FileVisitResult.CONTINUE
      }
    }
  )
} 
Example 28
Source File: RequestDeleter.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.load

import java.io.{File, PrintWriter}
import java.nio.file.Files

import scala.io.Source

object RequestDeleter {

  def delRequests(file: File, deletedCount: Int): Unit = {
    if (Files.exists(file.toPath)) {
      val source = Source.fromFile(file)
      val outputFile = s"requests-after-drop-${System.currentTimeMillis}.txt"
      val output = new PrintWriter(outputFile, "utf-8")

      var i = 0
      var j = 0
      var r = 0

      try {
        source
          .getLines()
          .map(line => {
            if (r < deletedCount)
              i = i + 1
            if (line.isEmpty || line.indexOf("{") == 0) {
              j = j + 1
              if (j % 3 == 0) {
                j = 0
                r = r + 1
              }
            }
            line
          })
          .drop(i)
          .foreach(line => output.print(s"$line\r\n"))
        println(s"$deletedCount of $r requests have been dropped from ${file.getAbsolutePath}, and saved to $outputFile")
      } finally output.close()
    }
  }
} 
Example 29
Source File: ScalapRenderer.scala    From jardiff   with Apache License 2.0 5 votes vote down vote up
package scala.tools.jardiff

import java.nio.file.{Files, Path}

import scala.tools.scalap.scalax.rules.ScalaSigParserError

class ScalapRenderer(privates: Boolean) extends FileRenderer {
  def outFileExtension: String = ".scalap"
  override def render(in: Path, out: Path): Unit = {
    val classBytes = Files.readAllBytes(in)
    try {
      val main = new scala.tools.scalap.Main
      main.printPrivates = privates
      val decompiled = main.decompileScala(classBytes, in.getFileName.toString == "package.class")
      if (decompiled != "") {
        Files.createDirectories(out.getParent)
        Files.write(out, decompiled.getBytes("UTF-8"))
      }
    } catch {
      case err: ScalaSigParserError =>
        System.err.println("WARN: unable to invoke scalap on: " + in + ": " + err.getMessage)
    }
  }
} 
Example 30
Source File: ScalapSigRenderer.scala    From jardiff   with Apache License 2.0 5 votes vote down vote up
package scala.tools.jardiff

import java.nio.file.{Files, Path}

import scala.tools.scalap.scalax.rules.ScalaSigParserError
import scala.tools.scalap.scalax.rules.scalasig.{ByteCode, ScalaSigAttributeParsers}

class ScalapSigRenderer(privates: Boolean) extends FileRenderer {
  def outFileExtension: String = ".scalap"
  override def render(in: Path, out: Path): Unit = {
    val classBytes = Files.readAllBytes(in)
    try {
      val scalaSig = ScalaSigAttributeParsers.parse(ByteCode(classBytes))
      val main = new scala.tools.scalap.Main
      main.printPrivates = privates
      val decompiled = main.parseScalaSignature(scalaSig, in.getFileName.toString == "package.sig")
      if (decompiled != "") {
        Files.createDirectories(out.getParent)
        Files.write(out, decompiled.getBytes("UTF-8"))
      }
    } catch {
      case err: ScalaSigParserError =>
        System.err.println("WARN: unable to invoke scalap on: " + in + ": " + err.getMessage)
    }
  }
} 
Example 31
Source File: IOUtil.scala    From jardiff   with Apache License 2.0 5 votes vote down vote up
package scala.tools.jardiff

import java.io.IOException
import java.net.URI
import java.nio.file._
import java.nio.file.attribute.BasicFileAttributes
import java.util

object IOUtil {
  def rootPath(fileOrZip: Path): Path = {
    if (fileOrZip.getFileName.toString.endsWith(".jar")) {
      val uri = URI.create(s"jar:${fileOrZip.toUri}")
      newFileSystem(uri, new util.HashMap[String, Any]()).getPath("/")
    } else {
      val extSlash = ".jar/"
      val index = fileOrZip.toString.indexOf(extSlash)
      if (index == -1) {
        fileOrZip
      } else {
        val uri = URI.create("jar:" + Paths.get(fileOrZip.toString.substring(0, index + extSlash.length - 1)).toUri.toString)
        val jarEntry = fileOrZip.toString.substring(index + extSlash.length - 1)
        val system = newFileSystem(uri, new util.HashMap[String, Any]())
        system.getPath(jarEntry)
      }
    }
  }

  private def newFileSystem(uri: URI, map: java.util.Map[String, Any]) =
    try FileSystems.newFileSystem(uri, map)
    catch { case _: FileSystemAlreadyExistsException => FileSystems.getFileSystem(uri) }

  def mapRecursive(source: java.nio.file.Path, target: java.nio.file.Path)(f: (Path, Path) => Unit) = {
    Files.walkFileTree(source, util.EnumSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE, new FileVisitor[Path] {
      def preVisitDirectory(dir: Path, sourceBasic: BasicFileAttributes): FileVisitResult = {
        val relative = source.relativize(dir).toString
        if (!Files.exists(target.resolve(relative)))
          Files.createDirectory(target.resolve(relative))
        FileVisitResult.CONTINUE
      }

      def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = {
        val relative = source.relativize(file).toString
        f(file, target.resolve(relative))
        FileVisitResult.CONTINUE
      }

      def visitFileFailed(file: Path, e: IOException) = throw e

      def postVisitDirectory(dir: Path, e: IOException): FileVisitResult = {
        if (e != null) throw e
        FileVisitResult.CONTINUE
      }
    })
  }

  def deleteRecursive(p: Path): Unit = {
    import java.io.IOException
    import java.nio.file.attribute.BasicFileAttributes
    import java.nio.file.{FileVisitResult, Files, SimpleFileVisitor}
    Files.walkFileTree(p, new SimpleFileVisitor[Path]() {
      override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = {
        Files.delete(file)
        FileVisitResult.CONTINUE
      }

      override def preVisitDirectory(dir: Path, attrs: BasicFileAttributes): FileVisitResult = {
        if (dir.getFileName.toString == ".git")
          FileVisitResult.SKIP_SUBTREE
        else super.preVisitDirectory(dir, attrs)
      }
      override def postVisitDirectory(dir: Path, exc: IOException): FileVisitResult = {
        val listing = Files.list(dir)
        try {
          if (!listing.iterator().hasNext)
            Files.delete(dir)
        } finally {
          listing.close()
        }
        FileVisitResult.CONTINUE
      }
    })
  }
} 
Example 32
Source File: AsmTextifyRenderer.scala    From jardiff   with Apache License 2.0 5 votes vote down vote up
package scala.tools.jardiff

import java.io.PrintWriter
import java.nio.file.{Files, Path}

import scala.collection.JavaConverters._
import org.objectweb.asm.{ClassReader, Opcodes}
import org.objectweb.asm.tree.{ClassNode, FieldNode, InnerClassNode, MethodNode}
import org.objectweb.asm.util.TraceClassVisitor

class AsmTextifyRenderer(code: Boolean, raw: Boolean, privates: Boolean) extends FileRenderer {
  def outFileExtension: String = ".asm"
  override def render(in: Path, out: Path): Unit = {
    val classBytes = Files.readAllBytes(in)
    val rawNode = classFromBytes(classBytes)
    val node = if (raw) rawNode else zapScalaClassAttrs(sortClassMembers(rawNode))
    if (!code)
      node.methods.forEach(_.instructions.clear())
    if (!privates) {
      node.methods.removeIf((m: MethodNode) => isPrivate(m.access))
      node.fields.removeIf((m: FieldNode) => isPrivate(m.access))
      node.innerClasses.removeIf((m: InnerClassNode) => isPrivate(m.access))
    }
    Files.createDirectories(out.getParent)
    val pw = new PrintWriter(Files.newBufferedWriter(out))
    try {
      val trace = new TraceClassVisitor(pw)
      node.accept(trace)
    } finally {
      pw.close()
    }
  }

  private def isPrivate(access: Int): Boolean = {
    (access & Opcodes.ACC_PRIVATE) != 0

  }

  def sortClassMembers(node: ClassNode): node.type = {
    node.fields.sort(_.name compareTo _.name)
    node.methods.sort(_.name compareTo _.name)
    node
  }

  private def isScalaSigAnnot(desc: String) =
    List("Lscala/reflect/ScalaSignature", "Lscala/reflect/ScalaLongSignature").exists(desc.contains)

  // drop ScalaSig annotation and class attributes
  private def zapScalaClassAttrs(node: ClassNode): node.type = {
    if (node.visibleAnnotations != null)
      node.visibleAnnotations = node.visibleAnnotations.asScala.filterNot(a => a == null || isScalaSigAnnot(a.desc)).asJava

    node.attrs = null
    node
  }

  private def classFromBytes(bytes: Array[Byte]): ClassNode = {
    val node = new ClassNode()
    new ClassReader(bytes).accept(node, if (raw) 0 else ClassReader.SKIP_DEBUG | ClassReader.SKIP_FRAMES)

    node
  }

} 
Example 33
Source File: RocksMapTest.scala    From utils   with Apache License 2.0 5 votes vote down vote up
package com.indix.utils.store

import java.io.Serializable
import java.nio.file.{Paths, Files}

import org.apache.commons.io.FileUtils
import org.scalatest.{Matchers, FlatSpec}


case class TestObject(a: Int, b: String, c: Array[Int], d: Array[String]) extends Serializable {

  def equals(other: TestObject): Boolean = {
    this.a.equals(other.a) && this.b.equals(other.b) && this.c.sameElements(other.c) && this.d.sameElements(other.d)
  }

}

case class ComplexTestObject(a: Int, b: TestObject) extends Serializable {
  def equals(other: ComplexTestObject): Boolean = {
    this.a.equals(other.a) && this.b.equals(other.b)
  }
}

class RocksMapTest extends FlatSpec with Matchers {

  "RocksMap" should "serialize and deserialize the keys and values" in {
    val db = new RocksMap("test")

    val a: Int = 1
    val b: String = "hello"
    val c: Array[Int] = Array(1, 2, 3)

    val d: Array[String] = Array("a", "b", "c")

    val serialized_a = db.serialize(a)
    val serialized_b = db.serialize(b)
    val serialized_c = db.serialize(c)
    val serialized_d = db.serialize(d)
    val serialized_TestObject = db.serialize(TestObject(a, b, c, d))
    val serialized_ComplexObject = db.serialize(ComplexTestObject(a, TestObject(a, b, c, d)))

    db.deserialize[Int](serialized_a) should be(a)
    db.deserialize[String](serialized_b) should be(b)
    db.deserialize[Array[Int]](serialized_c) should be(c)
    db.deserialize[Array[String]](serialized_d) should be(d)
    db.deserialize[TestObject](serialized_TestObject).equals(TestObject(a, b, c, d)) should be(true)
    db.deserialize[ComplexTestObject](serialized_ComplexObject).equals(ComplexTestObject(a, TestObject(a, b, c, d))) should be(true)
    db.drop()
    db.close()
  }

  it should "put and get values" in {
    val db = new RocksMap("test")

    db.put(1, 1.0)
    db.get[Int, Double](1).getOrElse(0) should be(1.0)
    db.clear()
    db.drop()
    db.close()
  }

  it should "remove values" in {
    val db = new RocksMap("test")

    db.put(1, 1L)
    db.get[Int, Long](1).getOrElse(0) should be(1L)
    db.remove(1)
    db.get[Int, Long](1) should be(None)
    db.drop()
    db.close()
  }

  it should "clear all the values" in {
    val db = new RocksMap(name = "test")
    db.put(1, "hello")
    db.put(2, "yello")
    db.get(1) should not be (None)
    db.get(2) should not be (None)
    db.clear()
    db.get(1) should be(None)
    db.get(2) should be(None)
    db.drop()
    db.close()
  }

  it should "clear the data files when drop is called" in {
    val db = new RocksMap(name = "test")
    Files.exists(Paths.get(db.pathString)) should be (true)
    db.drop()
    Files.exists(Paths.get(db.pathString)) should be (false)
    db.close()
  }


} 
Example 34
Source File: StandardizationQueryV1.scala    From daf-semantics   with Apache License 2.0 5 votes vote down vote up
package it.almawave.daf.standardization.v1

import com.typesafe.config.Config
import java.nio.file.Paths
import java.nio.file.Files
import it.almawave.linkeddata.kb.catalog.VocabularyBox
import java.io.FileFilter
import java.io.File
import java.nio.file.Path
import org.slf4j.LoggerFactory


  def details(voc_box: VocabularyBox, level: Int, uri: String, lang: String) = {

    val onto_id = detect_ontology(voc_box)

    val query_path: Path = detailsQueryFile(onto_id)
      .map(_.toPath())
      .getOrElse(default_query_details)

    // disabled for too many logs! logger.debug(s"daf.standardization> try ${voc_box.id} with details query: ${query_path}")

    val query = new String(Files.readAllBytes(query_path))
    query
      .replace("${vocabularyID}", voc_box.id)
      .replace("${level}", level.toString())
      .replace("${uri}", uri)
      .replace("${lang}", lang)

  }

} 
Example 35
Source File: SemanticRepositorySpecs.scala    From daf-semantics   with Apache License 2.0 5 votes vote down vote up
package specs

import org.junit.runner.RunWith

import scala.concurrent.{ Await, Future }
import scala.concurrent.duration.Duration

import play.api.test._
import play.api.http.Status
import play.api.Application
import play.api.inject.guice.GuiceApplicationBuilder
import play.api.libs.ws.WSResponse
import play.api.libs.ws.ahc.AhcWSClient
import org.specs2.runner.JUnitRunner
import org.specs2.mutable.Specification
import play.api.libs.json.Json
//import it.almawave.linkeddata.kb.utils.ConfigHelper

import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import play.twirl.api.Content
import play.api.test.Helpers._
import play.api.libs.json.JsObject
import java.io.File
import play.api.http.Writeable
import akka.stream.scaladsl.Source
import play.api.mvc.MultipartFormData
import play.api.libs.Files.TemporaryFile
import java.nio.file.Files
import org.asynchttpclient.AsyncHttpClient
import play.api.libs.ws.WS
import akka.util.ByteString
import play.api.mvc.MultipartFormData.DataPart
import play.api.mvc.MultipartFormData.FilePart
import akka.stream.scaladsl.FileIO
import play.api.libs.ws.WSClient

/*
 * TODO: REWRITE
 */
@RunWith(classOf[JUnitRunner])
class SemanticRepositorySpecs extends Specification {

  def application: Application = GuiceApplicationBuilder().build()

  "The semantic repository" should {

    "call kb/v1/contexts to obtain a list of contexts" in {
      new WithServer(app = application, port = 9999) {
        WsTestClient.withClient { implicit client =>

          val response: WSResponse = Await.result[WSResponse](
            client.url(s"http://localhost:${port}/kb/v1/contexts").execute,
            Duration.Inf)

          response.status must be equalTo Status.OK
          response.json.as[Seq[JsObject]].size must be equals 0
          // response.json.as[Seq[JsObject]].size must be greaterThan 0 // if pre-loaded ontologies!

        }
      }
    }

    "call kb/v1/contexts ensuring all contexts have triples" in {
      new WithServer(app = application, port = 9999) {
        WsTestClient.withClient { implicit client =>

          val response: WSResponse = Await.result[WSResponse](
            client.url(s"http://localhost:${port}/kb/v1/contexts").execute,
            Duration.Inf)

          val json_list = response.json.as[Seq[JsObject]]
          forall(json_list)((_) must not beNull)
          forall(json_list)(_.keys must contain("context", "triples"))
          forall(json_list)(item => (item \ "triples").get.as[Int] > 0)

        }
      }
    }

  }

} 
Example 36
Source File: ParquetWriterItSpec.scala    From parquet4s   with MIT License 5 votes vote down vote up
package com.github.mjakubowski84.parquet4s

import java.nio.file.Files

import org.apache.parquet.hadoop.ParquetFileWriter
import org.scalatest.{BeforeAndAfter, FreeSpec, Matchers}

import scala.util.Random

class ParquetWriterItSpec
  extends FreeSpec
    with Matchers
    with BeforeAndAfter {

  case class Record(i: Int, d: Double, s: String)
  object Record {
    def random(n: Int): Seq[Record] =
      (1 to n).map(_ =>
        Record(Random.nextInt(), Random.nextDouble(), Random.nextString(10)))
  }

  private val tempDir = com.google.common.io.Files.createTempDir().toPath.toAbsolutePath
  private val writePath = tempDir.resolve("file.parquet")

  // Generate records and do a single batch write.
  private val records = Record.random(5000)

  private def readRecords: Seq[Record] = {
    val iter = ParquetReader.read[Record](writePath.toString)
    try iter.toSeq
    finally iter.close()
  }

  after { // Delete written files
    Files.deleteIfExists(writePath)
  }

  "Batch write should result in proper number of records in the file" in {
    ParquetWriter.writeAndClose(writePath.toString, records)
    readRecords should be(records)
  }

  "Multiple incremental writes produce same result as a single batch write" in {
    val w = ParquetWriter.writer[Record](writePath.toString)
    try records.grouped(5).foreach(w.write)
    finally w.close()
    readRecords shouldBe records
  }

  "Writing record by record works as well" in {
    val w = ParquetWriter.writer[Record](writePath.toString)
    try records.foreach(record => w.write(record))
    finally w.close()
    readRecords shouldBe records
  }

  "Incremental writes work with write mode OVERWRITE" in {
    val w = ParquetWriter.writer[Record](
      writePath.toString,
      ParquetWriter.Options(ParquetFileWriter.Mode.OVERWRITE))
    try records.grouped(5).foreach(w.write)
    finally w.close()
    readRecords shouldBe records
  }

  "Writing to closed writer throws an exception" in {
    val w = ParquetWriter.writer[Record](writePath.toString)
    w.close()
    an[IllegalStateException] should be thrownBy records
      .grouped(2)
      .foreach(w.write)
  }

  "Closing writer without writing anything to it throws no exception" in {
    val w = ParquetWriter.writer[Record](writePath.toString)
    noException should be thrownBy w.close()
  }

  "Closing writer twice throws no exception" in {
    val w = ParquetWriter.writer[Record](writePath.toString)
    noException should be thrownBy w.close()
    noException should be thrownBy w.close()
  }

} 
Example 37
Source File: EventsReportGenerator.scala    From scala-serialization   with MIT License 5 votes vote down vote up
package com.komanov.serialization.converters

import java.io.{ByteArrayOutputStream, File}
import java.nio.file.{Files, StandardOpenOption}
import java.util.zip.GZIPOutputStream

import com.komanov.serialization.converters.IoUtils._


object EventsReportGenerator extends App {

  val flush = false

  val dir = new File(new File(System.getProperty("user.home"), "123"), "events")
  require(!flush || dir.exists() || dir.mkdirs())

  val (raws, gzips, both) = (Seq.newBuilder[(String, Seq[Int])], Seq.newBuilder[(String, Seq[Int])], Seq.newBuilder[(String, Seq[Int])])

  for ((converterName, converter) <- Converters.all if converter ne ScroogeConverter if converter ne ScalaPbConverter) {
    val results = Seq.newBuilder[(Int, Int)]
    for ((name, site, events) <- TestData.all) {
      val bytes = converter.toByteArray(site)
      val gzipLen = getGzipByteLength(bytes)

      val eventsAndBytes = events.map(e => e -> converter.toByteArray(e.event))
      val eventsLen = eventsAndBytes.map(_._2.length).sum
      val eventsGzipLen = eventsAndBytes.map(_._2).map(getGzipByteLength).sum

      results += bytes.length -> gzipLen
      results += eventsLen -> eventsGzipLen

      if (flush) {
        val normalizedConverterName = converterName.toLowerCase().replace(" ", "-")
        Files.write(dir.getParentFile.toPath.resolve(s"site_${name}_$normalizedConverterName.bin"), bytes, StandardOpenOption.CREATE)
        for ((event, eventBytes) <- eventsAndBytes) {
          Files.write(dir.toPath.resolve(s"${name}_${normalizedConverterName}_${event.event.getClass.getSimpleName}.bin"), eventBytes, StandardOpenOption.CREATE)
        }
      }
    }

    raws += converterName -> results.result().map(_._1)
    gzips += converterName -> results.result().map(_._2)
    both += (converterName + " (rw)") -> results.result().map(_._1)
    both += (converterName + " (gz)") -> results.result().map(_._2)
  }

  println("Data Sizes (raw)")
  printHeaders
  printSizes(raws.result())

  println("Data Sizes (gzip)")
  printHeaders
  printSizes(gzips.result())

  println("Data Sizes")
  printHeaders
  printSizes(both.result())

  private def printHeaders: Any = {
    println("Converter," + TestData.sites.flatMap(t => Seq(t._1, "ev " + t._1)).mkString(","))
  }

  private def printSizes(all: Seq[(String, Seq[Int])]): Unit = {
    for ((name, list) <- all) {
      println(name + "," + list.mkString(","))
    }
  }

  private def getGzipByteLength(bytes: Array[Byte]): Int = {
    using(new ByteArrayOutputStream()) { baos =>
      using(new GZIPOutputStream(baos)) { os =>
        os.write(bytes)
      }
      baos.toByteArray.length
    }
  }

} 
Example 38
Source File: ReportGenerator.scala    From scala-serialization   with MIT License 5 votes vote down vote up
package com.komanov.serialization.converters

import java.io.{ByteArrayOutputStream, File}
import java.nio.file.{Files, StandardOpenOption}
import java.util.zip.GZIPOutputStream

import com.komanov.serialization.converters.IoUtils._


object ReportGenerator extends App {

  val flush = true

  val dir = new File(System.getProperty("user.home"), "123")
  require(!flush || dir.exists() || dir.mkdirs())

  val (raws, gzips) = (Seq.newBuilder[(String, Seq[Int])], Seq.newBuilder[(String, Seq[Int])])

  for ((converterName, converter) <- Converters.all if converter ne ScalaPbConverter if converter ne ScroogeConverter) {
    val results = Seq.newBuilder[(Int, Int)]
    for ((name, site) <- TestData.sites) {
      val bytes = converter.toByteArray(site)
      val gzipLen = getGzipByteLength(bytes)

      results += bytes.length -> gzipLen

      if (flush) {
        val normalizedConverterName = converterName.toLowerCase().replace(" ", "-")
        Files.write(dir.toPath.resolve(s"site_${name}_$normalizedConverterName.bin"), bytes, StandardOpenOption.CREATE)
      }
    }

    raws += converterName -> results.result().map(_._1)
    gzips += converterName -> results.result().map(_._2)
  }

  println("Data Sizes (raw)")
  printHeaders
  printSizes(raws.result())

  println("Data Sizes (gzip)")
  printHeaders
  printSizes(gzips.result())

  private def printHeaders: Any = {
    println("Converter," + TestData.sites.map(_._1).mkString(","))
  }

  private def printSizes(all: Seq[(String, Seq[Int])]): Unit = {
    for ((name, list) <- all) {
      println(name + "," + list.mkString(","))
    }
  }

  private def getGzipByteLength(bytes: Array[Byte]): Int = {
    using(new ByteArrayOutputStream()) { baos =>
      using(new GZIPOutputStream(baos)) { os =>
        os.write(bytes)
      }
      baos.toByteArray.length
    }
  }

} 
Example 39
Source File: fixtures.scala    From sonar-scala   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.mwz.sonar.scala

import java.io.File
import java.nio.file.{Files, Path}

import cats.effect.IO
import cats.effect.concurrent.Ref
import com.mwz.sonar.scala.util.Logger

trait WithFiles {
  def withFiles(paths: String*)(test: Seq[File] => Any): Unit = {
    val tmpDir: Path = Files.createTempDirectory("")
    val files: Seq[File] = paths.map(path => Files.createFile(tmpDir.resolve(path)).toFile)
    try test(files)
    finally {
      files.foreach(f => Files.deleteIfExists(f.toPath))
      Files.deleteIfExists(tmpDir)
    }
  }
}

trait WithTracing {
  def withTracing(test: Ref[IO, List[String]] => Any): Unit =
    test(Ref.unsafe[IO, List[String]](List.empty))
}

trait WithLogging {
  object LogLevel {
    sealed trait Level
    final case object Debug extends Level
    final case object Info extends Level
    final case object Warn extends Level
    final case object Error extends Level
  }

  def withLogging(test: (Ref[IO, List[(LogLevel.Level, String)]], Logger[IO]) => Any): Unit = {
    val logs = Ref.unsafe[IO, List[(LogLevel.Level, String)]](List.empty)
    val logger: Logger[IO] = new Logger[IO] {
      def debug(s: String): IO[Unit] = logs.update((LogLevel.Debug, s) :: _)
      def info(s: String): IO[Unit] = logs.update((LogLevel.Info, s) :: _)
      def warn(s: String): IO[Unit] = logs.update((LogLevel.Warn, s) :: _)
      def error(s: String): IO[Unit] = logs.update((LogLevel.Error, s) :: _)
      def error(s: String, e: Throwable): IO[Unit] = logs.update((LogLevel.Error, s) :: _)
    }
    test(logs, logger)
  }
} 
Example 40
Source File: TestStreamsConfig.scala    From scalatest-embedded-kafka   with MIT License 5 votes vote down vote up
package net.manub.embeddedkafka.streams

import java.nio.file.Files

import net.manub.embeddedkafka.EmbeddedKafkaConfig
import org.apache.kafka.clients.consumer.{ConsumerConfig, OffsetResetStrategy}
import org.apache.kafka.streams.StreamsConfig


  def streamConfig(streamName: String,
                   extraConfig: Map[String, AnyRef] = Map.empty)(
      implicit kafkaConfig: EmbeddedKafkaConfig): StreamsConfig = {
    import scala.collection.JavaConverters._

    val defaultConfig = Map(
      StreamsConfig.APPLICATION_ID_CONFIG -> streamName,
      StreamsConfig.BOOTSTRAP_SERVERS_CONFIG -> s"localhost:${kafkaConfig.kafkaPort}",
      StreamsConfig.STATE_DIR_CONFIG -> Files
        .createTempDirectory(streamName)
        .toString,
      // force stream consumers to start reading from the beginning so as not to lose messages
      ConsumerConfig.AUTO_OFFSET_RESET_CONFIG -> OffsetResetStrategy.EARLIEST.toString.toLowerCase
    )
    val configOverwrittenByExtra = defaultConfig ++
      extraConfig
    new StreamsConfig(configOverwrittenByExtra.asJava)
  }
} 
Example 41
Source File: IDEPathHelper.scala    From keycloak-benchmark   with Apache License 2.0 5 votes vote down vote up
import java.net.URI
import java.nio.file.attribute.{FileAttribute, BasicFileAttributes}
import java.nio.file.{StandardCopyOption, Paths, Files, Path}

import io.gatling.core.util.PathHelper._

class Directories(
						 val data: Path,
						 val bodies: Path,
						 val binaries: Path,
						 val results: Path
)

object IDEPathHelper {
	private val uri: URI = getClass.getClassLoader.getResource("gatling.conf").toURI

	val directories: Directories = if (uri.getScheme.startsWith("jar")) {
		val testDir = System.getProperty("test.dir");
		val mainDir: Path = if (testDir != null) {
			val dir = Paths.get(testDir);
			if (dir.exists) {
				if (!dir.isDirectory) {
					throw new IllegalArgumentException(testDir + " is not a directory")
				}
				dir
			} else {
				Files.createDirectory(dir)
			}
		} else {
			Files.createTempDirectory("gatling-")
		}
		System.out.println("Using " + mainDir + " as gatling directory")
		// unpack gatling.conf
		Files.copy(getClass.getResourceAsStream("gatling.conf"), mainDir.resolve("gatling.conf"), StandardCopyOption.REPLACE_EXISTING)
		// using createDirectories to ignore existing
		val directories = new Directories(
			Files.createDirectories(mainDir.resolve("data")),
			Files.createDirectories(mainDir.resolve("bodies")),
			Files.createDirectories(mainDir.resolve("binaries")),
			Files.createDirectories(mainDir.resolve("results")))
		val simulationFile: String = Engine.simulationClass.replace('.', '/') + ".class"
		// unpack simulation
		val targetFile: Path = mainDir.resolve("binaries").resolve(simulationFile)
		Files.createDirectories(targetFile.getParent)
		Files.copy(getClass.getResourceAsStream(simulationFile), targetFile, StandardCopyOption.REPLACE_EXISTING)
		directories
	} else {
		val projectRootDir = RichPath(uri).ancestor(3)
		val mavenResourcesDirectory = projectRootDir / "src" / "test" / "resources"
		val mavenTargetDirectory = projectRootDir / "target"

		new Directories(
			mavenResourcesDirectory / "data",
			mavenResourcesDirectory / "bodies",
			mavenTargetDirectory / "test-classes",
			mavenTargetDirectory / "results")
	}
} 
Example 42
Source File: Test.scala    From connectors   with Apache License 2.0 5 votes vote down vote up
package test

import shadedelta.io.delta.tables._

import java.nio.file.Files

import scala.collection.JavaConverters._

import org.apache.spark.sql._
import org.apache.spark.sql.functions._

// scalastyle:off println
object Test {

  def main(args: Array[String]): Unit = {
    // Create a Spark Session
    val spark = SparkSession
      .builder()
      .appName("Quickstart")
      .master("local[*]")
      .config("spark.ui.enabled", "false")
      .getOrCreate()


    // Create a table
    val dir = Files.createTempDirectory("delta-table")
    println(s"Creating a table at $dir")
    val path = dir.toFile.getCanonicalPath
    var data = spark.range(0, 5)
    data.write.format("delta").mode("overwrite").save(path)

    // Read table
    println("Reading the table")
    val df = spark.read.format("delta").load(path)
    df.show()

    spark.stop()

    // Cleanup
    println("Finished the test. Cleaning up the table")
    Files.walk(dir).iterator.asScala.toSeq.reverse.foreach { f =>
      println(s"Deleting $f")
      Files.delete(f)
    }
  }
} 
Example 43
Source File: KafkaServer.scala    From akka_streams_tutorial   with MIT License 5 votes vote down vote up
package alpakka.env

import java.io.File
import java.net.InetSocketAddress
import java.nio.file.{Files, Paths}
import java.util.Properties

import kafka.server.{KafkaConfig, KafkaServerStartable}
import org.apache.commons.io.FileUtils
import org.apache.zookeeper.server.quorum.QuorumPeerConfig
import org.apache.zookeeper.server.{ServerConfig, ZooKeeperServerMain}


object KafkaServer extends App {

  val zookeeperPort = 2181

  val kafkaLogs = "/tmp/kafka-logs"
  val kafkaLogsPath = Paths.get(kafkaLogs)

  // See: https://stackoverflow.com/questions/59592518/kafka-broker-doesnt-find-cluster-id-and-creates-new-one-after-docker-restart/60864763#comment108382967_60864763
  def fix25Behaviour() = {
    val fileWithConflictingContent = kafkaLogsPath.resolve("meta.properties").toFile
    if (fileWithConflictingContent.exists())  FileUtils.forceDelete(fileWithConflictingContent)
  }

  def removeKafkaLogs(): Unit = {
    if (kafkaLogsPath.toFile.exists()) FileUtils.forceDelete(kafkaLogsPath.toFile)
  }

  // Keeps the persistent data
  fix25Behaviour()
  // If everything fails
  //removeKafkaLogs()

  val quorumConfiguration = new QuorumPeerConfig {
    // Since we do not run a cluster, we are not interested in zookeeper data
    override def getDataDir: File = Files.createTempDirectory("zookeeper").toFile
    override def getDataLogDir: File = Files.createTempDirectory("zookeeper-logs").toFile
    override def getClientPortAddress: InetSocketAddress = new InetSocketAddress(zookeeperPort)
  }

  class StoppableZooKeeperServerMain extends ZooKeeperServerMain {
    def stop(): Unit = shutdown()
  }

  val zooKeeperServer = new StoppableZooKeeperServerMain()

  val zooKeeperConfig = new ServerConfig()
  zooKeeperConfig.readFrom(quorumConfiguration)

  val zooKeeperThread = new Thread {
    override def run(): Unit = zooKeeperServer.runFromConfig(zooKeeperConfig)
  }

  zooKeeperThread.start()

  val kafkaProperties = new Properties()
  kafkaProperties.put("zookeeper.connect", s"localhost:$zookeeperPort")
  kafkaProperties.put("broker.id", "0")
  kafkaProperties.put("offsets.topic.replication.factor", "1")
  kafkaProperties.put("log.dirs", kafkaLogs)
  kafkaProperties.put("delete.topic.enable", "true")
  kafkaProperties.put("group.initial.rebalance.delay.ms", "0")
  kafkaProperties.put("transaction.state.log.min.isr", "1")
  kafkaProperties.put("transaction.state.log.replication.factor", "1")
  kafkaProperties.put("zookeeper.connection.timeout.ms", "6000")
  kafkaProperties.put("num.partitions", "10")

  val kafkaConfig = KafkaConfig.fromProps(kafkaProperties)

  val kafka = new KafkaServerStartable(kafkaConfig)

  println("About to start...")
  kafka.startup()

  scala.sys.addShutdownHook{
    println("About to shutdown...")
    kafka.shutdown()
    kafka.awaitShutdown()
    zooKeeperServer.stop()
  }

  zooKeeperThread.join()
} 
Example 44
Source File: files.scala    From spatial   with MIT License 5 votes vote down vote up
package utils.io

import java.io._
import java.nio.file._
import java.util.function.Consumer
import java.nio.file.{Files,Paths}

import scala.io.Source

object files {
  def sep: String = java.io.File.separator
  def cwd: String = new java.io.File("").getAbsolutePath
  final val BUFFER_SIZE: Int = 1024 * 4
  final val EOF = -1

  
  def copyResource(src: String, dest: String): Unit = {
    val outFile = new File(dest)
    val outPath = outFile.getParentFile
    outPath.mkdirs()
    val url = getClass.getResource(src)
    val in: InputStream = url.openStream()
    val out: OutputStream = new FileOutputStream(outFile)
    val buffer = new Array[Byte](BUFFER_SIZE)
    var n: Int = 0
    while ({n = in.read(buffer); n != EOF}) {
      out.write(buffer, 0, n)
    }
    out.close()
    in.close()
  }

  def listFiles(dir:String, exts:List[String]=Nil):List[java.io.File] = {
    val d = new java.io.File(dir)
    if (d.exists && d.isDirectory) {
      d.listFiles.filter { file =>
        file.isFile && exts.exists { ext => file.getName.endsWith(ext) }
      }.toList
    } else {
      Nil
    }
  }

  def splitPath(path:String) = {
    val file = new File(path)
    (file.getParent, file.getName)
  }

  def buildPath(parts:String*):String = {
    parts.mkString(sep)
  }

  def dirName(fullPath:String) = fullPath.split(sep).dropRight(1).mkString(sep)

  def createDirectories(dir:String) = {
    val path = Paths.get(dir)
    if (!Files.exists(path)) Files.createDirectories(path)
  }

} 
Example 45
Source File: LatencyAnalyzer.scala    From spatial   with MIT License 5 votes vote down vote up
package spatial.dse

import argon._
import spatial.lang._
import spatial.node._
import spatial.util.spatialConfig
import spatial.util.modeling._
import spatial.traversal._
import spatial.targets._
import java.io.File
import models._
import argon.node._


case class LatencyAnalyzer(IR: State, latencyModel: LatencyModel) extends AccelTraversal {
  var cycleScope: List[Double] = Nil
  var intervalScope: List[Double] = Nil
  var totalCycles: Seq[Long] = Seq()
  val batchSize = 1000

  def getListOfFiles(d: String):List[String] = {
    import java.nio.file.{FileSystems, Files}
    import scala.collection.JavaConverters._
    val dir = FileSystems.getDefault.getPath(d) 
    Files.walk(dir).iterator().asScala.filter(Files.isRegularFile(_)).map(_.toString).toList//.foreach(println)
  }
  
  override def silence(): Unit = {
    super.silence()
  }


  def test(rewriteParams: Seq[Seq[Any]]): Unit = {
    import scala.language.postfixOps
    import java.io.File
    import sys.process._

    val gen_dir = if (config.genDir.startsWith("/")) config.genDir + "/" else config.cwd + s"/${config.genDir}/"
    val modelJar = getListOfFiles(gen_dir + "/model").filter(_.contains("RuntimeModel-assembly")).head
    totalCycles = rewriteParams.grouped(batchSize).flatMap{params => 
      val batchedParams = params.map{rp => "tune " + rp.mkString(" ")}.mkString(" ")
      val cmd = s"""java -jar ${modelJar} ni ${batchedParams}"""
      // println(s"running cmd: $cmd")
      val output = Process(cmd, new File(gen_dir)).!!
      output.split("\n").filter(_.contains("Total Cycles for App")).map{r => 
        "^.*: ".r.replaceAllIn(r,"").trim.toLong
      }.toSeq
    }.toSeq
    // println(s"DSE Model result: $totalCycles")

  }

  override protected def preprocess[A](b: Block[A]): Block[A] = {

    super.preprocess(b)
  }

  override protected def postprocess[A](b: Block[A]): Block[A] = {
    super.postprocess(b)
  }

  override protected def visit[A](lhs: Sym[A], rhs: Op[A]): Unit = {  }



} 
Example 46
Source File: FilePatternTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels

import java.nio.file.Files

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.scalatest.{Matchers, WordSpec}

class FilePatternTest extends WordSpec with Matchers {

  implicit val fs = FileSystem.get(new Configuration())

  "FilePattern" should {
    "detect single hdfs path without name server" ignore {
      FilePattern("hdfs:///mypath").toPaths() shouldBe List(new Path("hdfs:///mypath"))
    }
    "detect single hdfs path with name server" ignore {
      FilePattern("hdfs://nameserver/mypath").toPaths() shouldBe List(new Path("hdfs://nameserver/mypath"))
    }
    "detect absolute local file" in {
      FilePattern("file:///absolute/file").toPaths() shouldBe List(new Path("file:///absolute/file"))
    }
    "detect relative local file" in {
      FilePattern("file:///local/file").toPaths() shouldBe List(new Path("file:///local/file"))
    }
    "detect relative local file expansion" in {
      val dir = Files.createTempDirectory("filepatterntest")
      val files = List("a", "b", "c").map { it =>
        dir.resolve(it)
      }
      val hdfsPaths = files.map { it =>
        new Path(it.toUri)
      }
      files.foreach(file => Files.createFile(file))
      FilePattern(dir.toUri.toString() + "/*").toPaths().toSet shouldBe hdfsPaths.toSet
      files.foreach(Files.deleteIfExists)
      Files.deleteIfExists(dir)
    }

    //not working on windows
    "detect relative local file expansion with schema" in {
      val dir = Files.createTempDirectory("filepatterntest")
      val files = List("a", "b", "c").map { it =>
        dir.resolve(it)
      }
      val hdfsPaths = files.map { it =>
        new Path(it.toUri)
      }
      files.foreach(file => Files.createFile(file))
      FilePattern(dir.toUri.toString() + "/*").toPaths().toSet shouldBe hdfsPaths.toSet
      files.foreach(Files.deleteIfExists)
      Files.deleteIfExists(dir)
    }

    "use filter if supplied" in {
      val dir = Files.createTempDirectory("filepatterntest")
      val files = List("a", "b", "c").map { it => dir.resolve(it) }
      files.foreach { it => Files.createFile(it) }
      val a = FilePattern(dir.toAbsolutePath().toString() + "/*")
        .withFilter(_.toString().endsWith("a"))
        .toPaths.toSet
      a shouldBe Set(new Path("file:///" + dir.resolve("a")))
      files.foreach { it => Files.deleteIfExists(it) }
      Files.deleteIfExists(dir)
    }
  }
} 
Example 47
Source File: package.scala    From fs2-blobstore   with Apache License 2.0 5 votes vote down vote up
import java.io.OutputStream
import java.nio.file.Files

import cats.effect.{ContextShift, Sync, Blocker}
import fs2.{Pipe, Pull, Stream}
import cats.implicits._

package object blobstore {
  protected[blobstore] def _writeAllToOutputStream1[F[_]](in: Stream[F, Byte], out: OutputStream, blocker: Blocker)(
    implicit F: Sync[F], CS: ContextShift[F]): Pull[F, Nothing, Unit] = {
    in.pull.uncons.flatMap {
      case None => Pull.done
      case Some((hd, tl)) => Pull.eval[F, Unit](blocker.delay(out.write(hd.toArray))) >> _writeAllToOutputStream1(tl, out, blocker)
    }
  }

  protected[blobstore] def bufferToDisk[F[_]](chunkSize: Int, blocker: Blocker)(implicit F: Sync[F], CS: ContextShift[F])
  : Pipe[F, Byte, (Long, Stream[F, Byte])] = {
    in => Stream.bracket(F.delay(Files.createTempFile("bufferToDisk", ".bin")))(
      p => F.delay(p.toFile.delete).void).flatMap { p =>
        in.through(fs2.io.file.writeAll(p, blocker)).drain ++
        Stream.emit((p.toFile.length, fs2.io.file.readAll(p, blocker, chunkSize)))
    }
  }

} 
Example 48
Source File: StoreOpsTest.scala    From fs2-blobstore   with Apache License 2.0 5 votes vote down vote up
package blobstore

import java.nio.charset.Charset
import java.nio.file.Files
import java.util.concurrent.Executors

import cats.effect.{Blocker, IO}
import cats.effect.laws.util.TestInstances
import cats.implicits._
import fs2.Pipe
import org.scalatest.Assertion
import org.scalatest.flatspec.AnyFlatSpec
import implicits._
import org.scalatest.matchers.must.Matchers

import scala.collection.mutable.ArrayBuffer
import scala.concurrent.ExecutionContext


class StoreOpsTest extends AnyFlatSpec with Matchers with TestInstances {

  implicit val cs = IO.contextShift(ExecutionContext.global)
  val blocker = Blocker.liftExecutionContext(ExecutionContext.fromExecutor(Executors.newCachedThreadPool))

  behavior of "PutOps"
  it should "buffer contents and compute size before calling Store.put" in {
    val bytes: Array[Byte] = "AAAAAAAAAA".getBytes(Charset.forName("utf-8"))
    val store = DummyStore(_.size must be(Some(bytes.length)))

    fs2.Stream.emits(bytes).covary[IO].through(store.bufferedPut(Path("path/to/file.txt"), blocker)).compile.drain.unsafeRunSync()
    store.buf.toArray must be(bytes)

  }

  it should "upload a file from a nio Path" in {
    val bytes = "hello".getBytes(Charset.forName("utf-8"))
    val store = DummyStore(_.size must be(Some(bytes.length)))

    fs2.Stream.bracket(IO(Files.createTempFile("test-file", ".bin"))) { p =>
      IO(p.toFile.delete).void
    }.flatMap { p =>
      fs2.Stream.emits(bytes).covary[IO].through(fs2.io.file.writeAll(p, blocker)).drain ++
        fs2.Stream.eval(store.put(p, Path("path/to/file.txt"), blocker))
    }.compile.drain.unsafeRunSync()
    store.buf.toArray must be(bytes)
  }

}

final case class DummyStore(check: Path => Assertion) extends Store[IO] {
  val buf = new ArrayBuffer[Byte]()
  override def put(path: Path): Pipe[IO, Byte, Unit] = {
    check(path)
    in => {
      buf.appendAll(in.compile.toVector.unsafeRunSync())
      fs2.Stream.emit(())
    }
  }
  override def list(path: Path): fs2.Stream[IO, Path] = ???
  override def get(path: Path, chunkSize: Int): fs2.Stream[IO, Byte] = ???
  override def move(src: Path, dst: Path): IO[Unit] = ???
  override def copy(src: Path, dst: Path): IO[Unit] = ???
  override def remove(path: Path): IO[Unit] = ???
} 
Example 49
Source File: FileUtil.scala    From wookiee   with Apache License 2.0 5 votes vote down vote up
package com.webtrends.harness.utils

import java.io.File
import java.nio.file.{FileSystems, Files, Path}

import scala.io.Source


  def getSymLink(f:File) : File = {
    if (f == null)
      throw new NullPointerException("File must not be null")
    val path = FileSystems.getDefault.getPath(f.getPath)
    if (Files.isSymbolicLink(path)) {
      f.getCanonicalFile
    } else {
      f.getAbsoluteFile
    }
  }
} 
Example 50
Source File: Utils.scala    From tispark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.test

import java.io.{File, PrintWriter}
import java.nio.file.{Files, Paths}
import java.util.Properties

import org.slf4j.Logger

import scala.collection.JavaConversions._

object Utils {

  def writeFile(content: String, path: String): Unit =
    TryResource(new PrintWriter(path))(_.close()) {
      _.print(content)
    }

  def TryResource[T](res: T)(closeOp: T => Unit)(taskOp: T => Unit): Unit =
    try {
      taskOp(res)
    } finally {
      closeOp(res)
    }

  def readFile(path: String): List[String] =
    Files.readAllLines(Paths.get(path)).toList

  def getOrThrow(prop: Properties, key: String): String = {
    val jvmProp = System.getProperty(key)
    if (jvmProp != null) {
      jvmProp
    } else {
      val v = prop.getProperty(key)
      if (v == null) {
        throw new IllegalArgumentException(key + " is null")
      } else {
        v
      }
    }
  }

  def getFlagOrFalse(prop: Properties, key: String): Boolean =
    getFlag(prop, key, "false")

  private def getFlag(prop: Properties, key: String, defValue: String): Boolean =
    getOrElse(prop, key, defValue).equalsIgnoreCase("true")

  def getOrElse(prop: Properties, key: String, defValue: String): String = {
    val jvmProp = System.getProperty(key)
    if (jvmProp != null) {
      jvmProp
    } else {
      Option(prop.getProperty(key)).getOrElse(defValue)
    }
  }

  def getFlagOrTrue(prop: Properties, key: String): Boolean =
    getFlag(prop, key, "true")

  def time[R](block: => R)(logger: Logger): R = {
    val t0 = System.nanoTime()
    val result = block
    val t1 = System.nanoTime()
    logger.info("Elapsed time: " + (t1 - t0) / 1000.0 / 1000.0 / 1000.0 + "s")
    result
  }

  def ensurePath(basePath: String, paths: String*): Boolean =
    new File(joinPath(basePath, paths: _*)).mkdirs()

  def joinPath(basePath: String, paths: String*): String =
    Paths.get(basePath, paths: _*).toAbsolutePath.toString
} 
Example 51
Source File: GlobalWatchService.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up
package org.apache.iota.fey

import java.nio.file.{Files, Path, Paths, WatchEvent}

import akka.actor.{Actor, ActorLogging, ActorRef}
import org.apache.iota.fey.GlobalWatchService.REGISTER_WATCHER_PERFORMER
import org.apache.iota.fey.WatchingDirectories.STOPPED

class GlobalWatchService extends Actor with ActorLogging{

  //WatchService
  var watchThread:Thread = null
  val watchFileTask:GlobalWatchServiceTask = new GlobalWatchServiceTask(self)

  override def preStart(): Unit = {
    startWatcher("PRE-START")
  }

  override def postStop(): Unit = {
    stopWatcher("POST-STOP")
  }

  private def startWatcher(from: String) = {
    log.info(s"Starting Global Watcher from $from")
    watchThread = new Thread(watchFileTask, "FEY_GLOBAL_WATCH_SERVICE_PERFORMERS")
    watchThread.setDaemon(true)
    watchThread.start()
  }

  private def stopWatcher(from: String) = {
    log.info(s"Stopping Global Watcher from $from")
    if(watchThread != null && watchThread.isAlive){
      watchThread.interrupt()
      watchThread = null
    }
  }

  override def receive: Receive = {
    case REGISTER_WATCHER_PERFORMER(path, file_name, actor, events, loadExists) =>
      registerPath(path,file_name,actor,events,loadExists)
    case STOPPED =>
      stopWatcher("STOPPED-THREAD")
      startWatcher("STOPPED-THREAD")
    case x => log.error(s"Unknown message $x")
  }

  private def broadcastMessageIfFileExists(actor: ActorRef, pathWithFile: String) = {
    val filePath = Paths.get(pathWithFile)
    if(Files.exists(filePath)){
      log.info(s"File $pathWithFile exists. Broadcasting message to actor ${actor.path.toString}")
      actor ! GlobalWatchService.ENTRY_CREATED(filePath)
    }
  }

  private def registerPath(dir_path: String, file_name:Option[String], actor: ActorRef, events: Array[WatchEvent.Kind[_]], loadExists: Boolean) = {
    WatchingDirectories.actorsInfo.get((dir_path,file_name)) match {
      case Some(info) =>
        val newInfo:Map[WatchEvent.Kind[_], Array[ActorRef]] = events.map(event => {
          info.get(event) match {
            case Some(actors) => (event, (Array(actor) ++ actors))
            case None => (event, Array(actor))
          }
        }).toMap
        WatchingDirectories.actorsInfo.put((dir_path,file_name), info ++ newInfo)
        watchFileTask.watch(Paths.get(dir_path),actor.path.toString,events)
      case None =>
        val tmpEvents:Map[WatchEvent.Kind[_], Array[ActorRef]] = events.map(event => {(event, Array(actor))}).toMap
        WatchingDirectories.actorsInfo.put((dir_path,file_name), tmpEvents)
        watchFileTask.watch(Paths.get(dir_path),actor.path.toString,events)
    }

    if(file_name.isDefined && loadExists){
      log.info(s"Checking if file $dir_path/${file_name.get} already exist")
      broadcastMessageIfFileExists(actor, s"$dir_path/${file_name.get}")
    }

  }

}

object GlobalWatchService{
  sealed case class ENTRY_CREATED(path:Path)
  sealed case class ENTRY_MODIFIED(path:Path)
  sealed case class ENTRY_DELETED(path:Path)
  sealed case class REGISTER_WATCHER_PERFORMER(dir_path: String, file_name:Option[String],
                                               actor: ActorRef, events: Array[WatchEvent.Kind[_]],
                                               loadIfExists: Boolean)
} 
Example 52
Source File: FeyGenericActorReceiver.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up
package org.apache.iota.fey

import java.io.{File, FileOutputStream}
import java.net.URL
import java.nio.file.{Files, Paths}
import com.eclipsesource.schema._
import akka.actor.ActorRef
import com.eclipsesource.schema.SchemaValidator
import org.apache.commons.io.IOUtils
import play.api.libs.json._
import scala.concurrent.duration._
import scala.util.Properties._

abstract class FeyGenericActorReceiver(override val params: Map[String,String] = Map.empty,
                                       override val backoff: FiniteDuration = 1.minutes,
                                       override val connectTo: Map[String,ActorRef] = Map.empty,
                                       override val schedulerTimeInterval: FiniteDuration = 2.seconds,
                                       override val orchestrationName: String = "",
                                       override val orchestrationID: String = "",
                                       override val autoScale: Boolean = false) extends FeyGenericActor{

  private[fey] val feyCore = FEY_CORE_ACTOR.actorRef

  override final def processMessage[T](message: T, sender: ActorRef): Unit = {
    try {
      val jsonString = getJSONString(message)
      if(jsonString != "{}") {
        processJson(jsonString)
      }
      startBackoff()
    }catch{
      case e: Exception => log.error(e, s"Could not process message $message")
    }
  }

  private[fey] def processJson(jsonString: String) = {
    var orchID:String = "None"
    try{
      val orchestrationJSON = Json.parse(jsonString)
      orchID = (orchestrationJSON \ JSON_PATH.GUID).as[String]
      val valid = validJson(orchestrationJSON)
      if(valid && (orchestrationJSON \ JSON_PATH.COMMAND).as[String].toUpperCase != "DELETE"){
        checkForLocation(orchestrationJSON)
      }
      if(valid) {
        feyCore ! FeyCore.ORCHESTRATION_RECEIVED(orchestrationJSON, None)
      }else{
        log.warning(s"Could not forward Orchestration $orchID. Invalid JSON schema")
      }
    } catch {
      case e: Exception =>
        log.error(e, s"Orchestration $orchID could not be forwarded")
    }
  }

  
  def resolveCredentials(credentials: Option[JsObject]):Option[(String, String)] = {
    credentials match {
      case None => None
      case Some(cred) =>
        val user = (cred \ JSON_PATH.JAR_CRED_USER).as[String]
        val password = (cred \ JSON_PATH.JAR_CRED_PASSWORD).as[String]
        Option(envOrElse(user,user), envOrElse(password,password))
    }
  }

} 
Example 53
Source File: JsonReceiver.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up
package org.apache.iota.fey

import java.io.FileOutputStream
import java.net.URL
import java.io.File

import com.eclipsesource.schema._
import org.slf4j.LoggerFactory
import play.api.libs.json._
import JSON_PATH._
import java.nio.file.{Files, Paths}

import org.apache.commons.io.IOUtils
import org.apache.commons.codec.binary.Base64
import scala.util.Properties._


  def exceptionOnRun(e: Exception): Unit
}

object HttpBasicAuth {
  val BASIC = "Basic"
  val AUTHORIZATION = "Authorization"

  def encodeCredentials(username: String, password: String): String = {
    new String(Base64.encodeBase64((username + ":" + password).getBytes))
  }

  def getHeader(username: String, password: String): String =
    BASIC + " " + encodeCredentials(username, password)
} 
Example 54
Source File: JsonReceiverSpec.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up
package org.apache.iota.fey

import java.nio.file.{Files, Paths}

import akka.actor.ActorRef
import akka.testkit.{EventFilter, TestProbe}
import ch.qos.logback.classic.Level
import scala.concurrent.duration.{DurationInt, FiniteDuration}

class JsonReceiverSpec extends BaseAkkaSpec with LoggingTest{


  class ReceiverTest(verifyActor: ActorRef) extends JsonReceiver{

    override def execute(): Unit = {
      verifyActor ! "EXECUTED"
      Thread.sleep(500)
    }

    override def exceptionOnRun(e: Exception): Unit = {
      verifyActor ! "INTERRUPTED"
    }

  }

  val verifyTB = TestProbe("RECEIVER-TEST")
  val receiver = new ReceiverTest(verifyTB.ref)

  "Executing validJson in JsonReceiver" should {
    "return false when json schema is not right" in {
      receiver.validJson(getJSValueFromString(Utils_JSONTest.test_json_schema_invalid)) should be(false)
    }
    "log message to Error" in {
      ("Incorrect JSON schema \n/ensembles/0 \n\tErrors: Property command missing") should beLoggedAt(Level.ERROR)
    }
    "return true when Json schema is valid" in {
      receiver.validJson(getJSValueFromString(Utils_JSONTest.create_json_test)) should be(true)
    }
  }

  "Executing checkForLocation in JsonReceiver" should {
    "log message at Debug level" in {
      receiver.checkForLocation(getJSValueFromString(Utils_JSONTest.test_json_schema_invalid))
      "Location not defined in JSON" should beLoggedAt(Level.DEBUG)
    }
    "download jar dynamically from URL" in {
      receiver.checkForLocation(getJSValueFromString(Utils_JSONTest.location_test))
      Files.exists(Paths.get(s"${CONFIG.DYNAMIC_JAR_REPO}/fey-stream.jar")) should be(true)
    }
  }

  var watchThread: Thread = _
  "Start a Thread with the JSON receiver" should {
    "Start Thread" in {
      watchThread = new Thread(receiver, "TESTING-RECEIVERS-IN-THREAD")
      watchThread.setDaemon(true)
      watchThread.start()
      TestProbe().isThreadRunning("TESTING-RECEIVERS-IN-THREAD") should be(true)
    }
    "execute execute() method inside run" in {
      verifyTB.expectMsgAllOf(600.milliseconds,"EXECUTED","EXECUTED")
    }
  }

  "Interrupting the receiver Thread" should {
    "Throw Interrupted exception" in {
      EventFilter[InterruptedException]() intercept {
        watchThread.interrupt()
        watchThread.join()
      }
    }
    "execute exceptionOnRun method" in {
      verifyTB.receiveWhile(1200.milliseconds) {
        case "EXECUTED" =>
      }
      verifyTB.expectMsg("INTERRUPTED")
    }
  }


} 
Example 55
Source File: WatchServiceReceiverSpec.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up
package org.apache.iota.fey

import java.nio.file.{Files, Paths}
import java.nio.charset.StandardCharsets

import akka.testkit.{EventFilter, TestProbe}

import scala.concurrent.duration.{DurationInt, FiniteDuration}
import java.io.File

import ch.qos.logback.classic.Level

class WatchServiceReceiverSpec extends BaseAkkaSpec{

  val watcherTB = TestProbe("WATCH-SERVICE")
  var watchFileTask:WatchServiceReceiver = _
  val watchTestDir = s"${CONFIG.JSON_REPOSITORY}/watchtest"

  "Creating WatchServiceReceiver" should {
    "process initial files in the JSON repository" in {
      CONFIG.JSON_EXTENSION = "json.not"
      watchFileTask = new WatchServiceReceiver(watcherTB.ref)
      watcherTB.expectMsgAllClassOf(classOf[JsonReceiverActor.JSON_RECEIVED])
      CONFIG.JSON_EXTENSION = "json.test"
    }
  }

  var watchThread: Thread = _
  "Start a Thread with WatchServiceReceiver" should {
    "Start Thread" in {
      watchThread = new Thread(watchFileTask, "TESTING-WATCHER-IN-THREAD")
      watchThread.setDaemon(true)
      watchThread.start()
      TestProbe().isThreadRunning("TESTING-WATCHER-IN-THREAD") should be(true)
    }
  }

  "Start watching directory" should {
    "Starting receiving CREATED event" taggedAs(SlowTest) in {
      watchFileTask.watch(Paths.get(watchTestDir))
      Files.write(Paths.get(s"$watchTestDir/watched.json.test"), Utils_JSONTest.create_json_test.getBytes(StandardCharsets.UTF_8))
      watcherTB.expectMsgAllClassOf(20.seconds, classOf[JsonReceiverActor.JSON_RECEIVED])
    }
    "Starting receiving UPDATE event" taggedAs(SlowTest) in {
      Files.write(Paths.get(s"$watchTestDir/watched-update.json.test"), Utils_JSONTest.delete_json_test.getBytes(StandardCharsets.UTF_8))
      Thread.sleep(200)
      Files.write(Paths.get(s"$watchTestDir/watched-update.json.test"), Utils_JSONTest.create_json_test.getBytes(StandardCharsets.UTF_8))
      watcherTB.expectMsgAllClassOf(20.seconds, classOf[JsonReceiverActor.JSON_RECEIVED])
    }
  }

  "processJson" should {
    "log to warn level when json has invalid schema" in {
      Files.write(Paths.get(s"$watchTestDir/watched-invalid.json.test"), Utils_JSONTest.test_json_schema_invalid.getBytes(StandardCharsets.UTF_8))
      watchFileTask.processJson(s"$watchTestDir/watched-invalid.json.test",new File(s"$watchTestDir/watched-invalid.json.test"))
      s"File $watchTestDir/watched-invalid.json.test not processed. Incorrect JSON schema" should beLoggedAt(Level.WARN)
    }
  }

  "interrupt watchservice" should{
    "interrupt thread" in {
      watchThread.interrupt()
    }
  }

} 
Example 56
Source File: Tryout.scala    From spark-es   with Apache License 2.0 5 votes vote down vote up
import java.nio.file.Files

import org.apache.commons.io.FileUtils
import org.apache.spark.SparkContext
import org.elasticsearch.common.settings.Settings
import org.elasticsearch.node.NodeBuilder
import org.apache.spark.elasticsearch._

object Tryout {
  def main(args: Array[String]): Unit = {
    val sparkContext = new SparkContext("local[2]", "SparkES")

    val dataDir = Files.createTempDirectory("elasticsearch").toFile

    dataDir.deleteOnExit()

    val settings = Settings.settingsBuilder()
      .put("path.home", dataDir.getAbsolutePath)
      .put("path.logs", s"${dataDir.getAbsolutePath}/logs")
      .put("path.data", s"${dataDir.getAbsolutePath}/data")
      .put("index.store.fs.memory.enabled", true)
      .put("index.number_of_shards", 1)
      .put("index.number_of_replicas", 0)
      .put("cluster.name", "SparkES")
      .build()

    val node = NodeBuilder.nodeBuilder().settings(settings).node()

    val client = node.client()

    sparkContext
      .parallelize(Seq(
      ESDocument(ESMetadata("2", "type1", "index1"), """{"name": "John Smith"}"""),
      ESDocument(ESMetadata("1", "type1", "index1"), """{"name": "Sergey Shumov"}""")
    ), 2)
      .saveToES(Seq("localhost"), "SparkES")
    
    client.admin().cluster().prepareHealth("index1").setWaitForGreenStatus().get()

    val documents = sparkContext.esRDD(
      Seq("localhost"), "SparkES", Seq("index1"), Seq("type1"), "name:sergey")

    println(documents.count())

    documents.foreach(println)

    sparkContext.stop()

    client.close()

    node.close()

    FileUtils.deleteQuietly(dataDir)
  }
} 
Example 57
Source File: LocalElasticSearch.scala    From spark-es   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.elasticsearch

import java.nio.file.Files
import java.util.UUID

import org.apache.commons.io.FileUtils
import org.elasticsearch.common.settings.Settings
import org.elasticsearch.node.{NodeBuilder, Node}

class LocalElasticSearch(val clusterName: String = UUID.randomUUID().toString) {
  lazy val node = buildNode()
  lazy val client = node.client()
  val dataDir = Files.createTempDirectory("elasticsearch").toFile

  private var started = false

  def buildNode(): Node = {
    val settings = Settings.settingsBuilder()
      .put("path.home", dataDir.getAbsolutePath)
      .put("path.logs", s"${dataDir.getAbsolutePath}/logs")
      .put("path.data", s"${dataDir.getAbsolutePath}/data")
      .put("index.store.fs.memory.enabled", true)
      .put("index.number_of_shards", 1)
      .put("index.number_of_replicas", 0)
      .put("cluster.name", clusterName)
      .build()

    val instance = NodeBuilder.nodeBuilder().settings(settings).node()

    started = true

    instance
  }

  def close(): Unit = {
    if (started) {
      client.close()
      node.close()
    }

    try {
      FileUtils.forceDelete(dataDir)
    } catch {
      case e: Exception =>
    }
  }
} 
Example 58
Source File: MavenCoordinatesListReader.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wixpress.build.maven

import java.nio.file.{Files, Path}

import scala.io.Source

object MavenCoordinatesListReader {
  def coordinatesIn(filePath:Path):Set[Coordinates] = {
    val lines = Source.fromInputStream(Files.newInputStream(filePath)).getLines().toSet
    coordinatesInText(lines)
  }

  def coordinatesInText(content: Set[String]):Set[Coordinates] = {
    content
      .map(_.trim)
      .filterNot(_.isEmpty)
      .filterNot(_.startsWith("#"))
      .map(l=>Coordinates.deserialize(l))
  }
} 
Example 59
Source File: ZincAnalysisParserTest.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.build.zinc.analysis

import java.io.InputStream
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Paths}
import java.util.UUID

import com.github.marschall.memoryfilesystem.MemoryFileSystemBuilder
import com.wixpress.build.maven.Coordinates
import org.specs2.mutable.SpecificationWithJUnit
import org.specs2.specification.Scope

class ZincAnalysisParserTest extends SpecificationWithJUnit {
  "ZincAnalysisParser" should {
    "parse repo with zinc analysis" in new baseCtx {
      private val parser = new ZincAnalysisParser(repoRoot)
      private val coordinatesToAnalyses: Map[Coordinates, List[ZincModuleAnalysis]] = parser.readModules()
      coordinatesToAnalyses must haveLength(greaterThan(0))
      private val analysisList: List[ZincModuleAnalysis] = coordinatesToAnalyses.head._2
      analysisList must haveLength(greaterThan(0))
    }
  }

  abstract class baseCtx extends Scope {
    val fileSystem = MemoryFileSystemBuilder.newLinux().build()
    val repoRoot = fileSystem.getPath("repoRoot")
    Files.createDirectories(repoRoot)
    writeResourceAsFileToPath("/pom.xml", "pom.xml", "java-junit-sample/")
    writeResourceAsFileToPath("/aggregate-pom.xml", "pom.xml", "")
    writeResourceAsFileToPath("/compile.relations", "compile.relations","java-junit-sample/target/analysis/")
    writeResourceAsFileToPath("/test-compile.relations", "test-compile.relations","java-junit-sample/target/analysis/")

    private def writeResourceAsFileToPath(resource: String, fileName: String, path: String) = {
      if (path.nonEmpty)
        Files.createDirectories(repoRoot.resolve(path))
      val stream: InputStream = getClass.getResourceAsStream(s"$resource")
      val compileRelations = scala.io.Source.fromInputStream(stream).mkString
      Files.write(repoRoot.resolve(s"$path$fileName"), compileRelations.getBytes(StandardCharsets.UTF_8))
    }

    def path(withName: String) = repoRoot.resolve(withName)
    def random = UUID.randomUUID().toString
  }
} 
Example 60
Source File: BazelRcWriter.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator

import java.nio.file.{Files, Path}

class BazelRcWriter(repoRoot: Path) {

  def write(): Unit = {
    val contents =
      """#
        |# DO NOT EDIT - this line imports shared managed bazel configuration
        |#
        |import %workspace%/tools/bazelrc/.bazelrc.managed.dev.env
        |
        |#
        |# ADDITIONS ONLY UNDER THIS LINE
        |#
        |
      """.stripMargin
    writeToDisk(contents)
  }

  private def writeToDisk(contents: String): Unit =
    Files.write(repoRoot.resolve(".bazelrc"), contents.getBytes)
} 
Example 61
Source File: SourcesPackageWriter.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator

import java.nio.file.{Files, Path, StandardOpenOption}

import com.wix.bazel.migrator.model.{Package, Target}

class SourcesPackageWriter(repoRoot: Path, bazelPackages: Set[Package]) {
  def write(): Unit = {
    bazelPackages
      .flatMap(jvmTargetsAndRelativePathFromMonoRepoRoot)
      .flatMap(sourceDirAndRelativePackagePaths)
      .foreach(writeSourcesTarget)
  }

  private def jvmTargetsAndRelativePathFromMonoRepoRoot(bazelPackage: Package): Set[JvmTargetAndRelativePath] = {
    val r = bazelPackage.targets.collect {
      case jvm: Target.Jvm => (jvm, bazelPackage.relativePathFromMonoRepoRoot)
    }

    r.map(JvmTargetAndRelativePathFromMonoRepoRoot(_))
  }

  def sourceDirAndRelativePackagePaths(jvmTargetAndRelativePath: JvmTargetAndRelativePath): Set[SourceDirPathAndRelativePackagePath] = {
    val basePackagePath = repoRoot.resolve(jvmTargetAndRelativePath.relativePath)
    jvmTargetAndRelativePath.jvm.sources.map { source =>
      val sourceDirPath = basePackagePath.resolve(adjustSource(source))
      SourceDirPathAndRelativePackagePath(sourceDirPath, jvmTargetAndRelativePath.relativePath)
    }
  }

  private def writeSourcesTarget(s: SourceDirPathAndRelativePackagePath) =
    Files.write(
      s.sourceDirBuildPath,
      s.sourcesTarget.getBytes,
      StandardOpenOption.CREATE, StandardOpenOption.APPEND
    )


  private def adjustSource(source: String) = {
    if (source.startsWith("/"))
      source.drop(1)
    else
      source
  }

  private[migrator] case class SourcesTargetAndSourceDirPath(sourceDirBuildPath: Path, sourcesTarget: Array[Byte])

  private[migrator] case class JvmTargetAndRelativePath(jvm: Target.Jvm, relativePath: String)

  private[migrator] object JvmTargetAndRelativePathFromMonoRepoRoot {
    def apply(targetAndRelativePath: (Target.Jvm, String)) =
      JvmTargetAndRelativePath(targetAndRelativePath._1, targetAndRelativePath._2)
  }

}

private[migrator] case class SourceDirPathAndRelativePackagePath(sourceDirPath: Path, relativePackagePath: String){
  def sourcesTarget: String = {
    if (sourceDirPath.endsWith(relativePackagePath))
      """
        |sources()
        |""".stripMargin
    else
      s"""
         |sources(
         |    visibility = ["//$relativePackagePath:__pkg__"]
         |)
         |""".stripMargin
  }

  def sourceDirBuildPath: Path = sourceDirPath.resolve("BUILD.bazel")
} 
Example 62
Source File: DockerImagesWriter.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator

import java.nio.file.{Files, Path}

import com.wix.bazel.migrator.overrides.InternalTargetsOverrides

class DockerImagesWriter(repoRoot: Path, overrides: InternalTargetsOverrides) {

  private val dockerImagesRootPath = repoRoot.resolve("third_party/docker_images")

  def write(): Unit = {
    val images = overrides.targetOverrides.toSeq.flatMap(_.dockerImagesDeps).flatten.map(DockerImage(_)).toSet

    createBzlFile(images)
    createBuildFile(images)
  }

  private def writeToDisk(fileName: String, contents: String): Unit = {
    val filePath = dockerImagesRootPath.resolve(fileName)
    Files.createDirectories(dockerImagesRootPath)
    Files.createFile(filePath)
    Files.write(filePath, contents.getBytes)
  }

  private def createBzlFile(images: Set[DockerImage]): Unit = {
    val header =
      s"""load(
          |  "@io_bazel_rules_docker//container:container.bzl",
          |  "container_pull",
          |  container_repositories = "repositories"
          |)
          |
          |def docker_images():
          |  container_repositories()
          |""".stripMargin

    val contents = images.map(_.toContainerPullRule).mkString("\n\n")
    writeToDisk("docker_images.bzl", header + contents)
  }

  private def createBuildFile(images: Set[DockerImage]): Unit = {
    val header =
      s"""
         |package(default_visibility = ["//visibility:public"])
         |licenses(["reciprocal"])
         |load("@io_bazel_rules_docker//container:container.bzl", "container_image")
         |""".stripMargin

    val contents = images.map(_.toContainerImageRule).mkString("\n\n")

    writeToDisk("BUILD.bazel", header + contents)
  }
} 
Example 63
Source File: BazelRcRemoteWriter.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator

import java.nio.file.{Files, Path}

class BazelRcRemoteWriter(repoRoot: Path) {

  def write(): Unit = {
    val contents =
      """# Copyright 2016 The Bazel Authors. All rights reserved.
       |#
       |# Licensed under the Apache License, Version 2.0 (the "License");
       |# you may not use this file except in compliance with the License.
       |# You may obtain a copy of the License at
       |#
       |#    http://www.apache.org/licenses/LICENSE-2.0
       |#
       |# Unless required by applicable law or agreed to in writing, software
       |# distributed under the License is distributed on an "AS IS" BASIS,
       |# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
       |# See the License for the specific language governing permissions and
       |# limitations under the License.
       |
       |# Depending on how many machines are in the remote execution instance, setting
       |# this higher can make builds faster by allowing more jobs to run in parallel.
       |# Setting it too high can result in jobs that timeout, however, while waiting
       |# for a remote machine to execute them.
       |build:remote --jobs=100
       |
       |# (no need to import %workspace%/.bazelrc, bazel does this by default)
       |import %workspace%/tools/bazelrc/.bazelrc.remotesettings
       |
       |# Set various strategies so that all actions execute remotely. Mixing remote
       |# and local execution will lead to errors unless the toolchain and remote
       |# machine exactly match the host machine.
       |build:remote --spawn_strategy=remote
       |build:remote --strategy=Javac=remote
       |build:remote --strategy=Closure=remote
       |build:remote --genrule_strategy=remote
       |build:remote --define=EXECUTOR=remote
       |build:remote --strategy=Scalac=remote
       |test:remote --strategy=Scalac=remote
       |test:remote --test_tmpdir=/tmp
       |
       |# Enable the remote cache so action results can be shared across machines,
       |# developers, and workspaces.
       |build:remote --remote_cache=remotebuildexecution.googleapis.com
       |
       |# Enable remote execution so actions are performed on the remote systems.
       |build:remote --remote_executor=remotebuildexecution.googleapis.com
       |
       |# Set flags for uploading to BES in order to view results in the Bazel Build
       |# Results UI.
       |build:results --bes_backend="buildeventservice.googleapis.com"
       |build:results --bes_timeout=10s
       |
      """.stripMargin
    writeToDisk(contents)
  }

  private def writeToDisk(contents: String): Unit =
    Files.write(repoRoot.resolve(".bazelrc.remote"), contents.getBytes)


} 
Example 64
Source File: JDepsParserImpl.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator.analyze.jdk

import java.nio.file.Files

import com.wix.bazel.migrator.model.SourceModule

import scala.collection.JavaConverters._

class JDepsParserImpl(sourceModules: Set[SourceModule]) extends JDepsParser {
  private val jarPattern = "(.*).jar".r("artifactIdAndVersion")
  private val dotFileLinePattern = " *\"([^\"]+)\" +-> +\"([^\"]+)\" *;".r("src", "dep")
  private val dependencyWithSourcePattern = "([^ ]+) \\(([^)]+)\\)".r("className", "resolvedFrom")
  private val providedDependencyPattern = "[^ ]+".r

  case class JdepsEntry(src: JVMClass, dependency: Option[JVMClass])

  private def extractEntry(currentModule: SourceModule, jdepsSource: String, jdepsTarget: String): JdepsEntry = {
    val sourceJvmClass = JVMClass(jdepsSource, currentModule)
    val emptyResult = JdepsEntry(sourceJvmClass, None)
    jdepsTarget match {
      case providedDependencyPattern() => emptyResult
      case dependencyWithSourcePattern(_, resolvedFrom) if resolvedFrom == "not found" => emptyResult
      case dependencyWithSourcePattern(className, resolvedFrom) =>
        val maybeDependency = toSourceModule(resolvedFrom, currentModule)
          .map(fromSourceModule => JVMClass(className, fromSourceModule, resolvedFrom == "test-classes"))
        JdepsEntry(sourceJvmClass, maybeDependency)
      case _ =>
        throw new RuntimeException(s"Could not match jdeps dependency with source '$jdepsTarget'")
    }
  }

  override def convert(deps: ClassDependencies, currentAnalysisModule: SourceModule): Map[JVMClass, Set[JVMClass]] = {
    Files.readAllLines(deps.dotFile).asScala.toList.collect {
      case dotFileLinePattern(src, dep) => extractEntry(currentAnalysisModule, src, dep)
    }.groupBy(_.src)
      .mapValues(_.flatMap(_.dependency).toSet)
  }

  private def toSourceModule(jar: String, currentSourceModule: SourceModule): Option[SourceModule] = {
    jar match {
      case "classes" => Some(currentSourceModule)
      case "test-classes" => Some(currentSourceModule)
      case jarPattern(artifactIdAndVersion) =>
        sourceModules.find(module => artifactIdAndVersion.contains(module.coordinates.artifactId))
      case _ => None
    }
  }
}

case class TargetFQNAndJar(targetFqn: String, jar: String) 
Example 65
Source File: SourceFileTracer.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator.analyze.jdk

import java.nio.file.{FileSystem, FileSystems, Files, Path}

import com.wix.bazel.migrator.model.SourceModule
import MavenRelativeSourceDirPathFromModuleRoot.PossibleLocation
import com.wix.bazel.migrator.analyze.CodePath

trait SourceFileTracer {
  def traceSourceFile(module: SourceModule, fqn: String, pathToJar: String, testClass: Boolean): CodePath
}

class JavaPSourceFileTracer(repoRoot: Path,
                            processRunner: ProcessRunner = new JavaProcessRunner,
                            fileSystem: FileSystem = FileSystems.getDefault) extends SourceFileTracer {
  private val Command = "javap"

  private def parseFileName(stdOut: String) = {
    val firstLine = stdOut.split("\n")(0)
    firstLine.split('"') match {
      case Array(_, fileName) => fileName
      case _ => throw new RuntimeException(s"Unknown stdout format $stdOut")
    }
  }

  private def findLocationIn(relativePathFromMonoRepoRoot: String, possibleLocations: Set[PossibleLocation], filePath: String): Option[String] =
    possibleLocations.find { location => {
      val possiblePath = repoRoot.resolve(relativePathFromMonoRepoRoot).resolve(location).resolve(filePath)
      Files.exists(possiblePath)
    }
    }


  override def traceSourceFile(module: SourceModule, fqn: String, pathToClasses: String, testClass: Boolean): CodePath = {
    val packagePart = fqn.splitAt(fqn.lastIndexOf('.'))._1.replace('.', '/')
    val cmdArgs = List(
      "-cp",
      pathToClasses,
      fqn)
    val runResult = processRunner.run(repoRoot, "javap", cmdArgs)
    if (runResult.exitCode != 0) {
      throw new RuntimeException(s"Problem locating the source file of class $fqn in $pathToClasses")
    }
    val filePath = packagePart + "/" + parseFileName(runResult.stdOut)
    val locations = MavenRelativeSourceDirPathFromModuleRoot.getPossibleLocationFor(testClass)
    findLocationIn(module.relativePathFromMonoRepoRoot, locations, filePath) match {
      case Some(location) =>CodePath(module, location, filePath)
      case None => {
        throw new RuntimeException(s"Could not find location of $filePath in ${module.relativePathFromMonoRepoRoot}")
      }
    }
  }
}



object MavenRelativeSourceDirPathFromModuleRoot {
  type PossibleLocation = String
  private val mainCodePrefixes = Set("src/main")
  private val testCodePrefixes = Set("src/test", "src/it", "src/e2e")
  private val languages = Set("java", "scala")

  private val ProdCodeLocations: Set[PossibleLocation] =
    mainCodePrefixes.flatMap(prefix => languages.map(language => s"$prefix/$language"))

  private val TestCodeLocations: Set[PossibleLocation] =
    testCodePrefixes.flatMap(prefix => languages.map(language => s"$prefix/$language"))

  def getPossibleLocationFor(testCode:Boolean): Set[PossibleLocation] =
    if (testCode) TestCodeLocations else ProdCodeLocations
} 
Example 66
Source File: JDepsCommandImpl.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator.analyze.jdk

import java.nio.file.{Files, Path, Paths}


class JDepsCommandImpl(repoRoot: Path) extends JDepsCommand {

  override def analyzeClassesDependenciesPerJar(jarPath: String, classPath: List[String]): Option[ClassDependencies] = {
    val fileName = Paths.get(jarPath).getFileName.toString
    val dotDirectory = Files.createTempDirectory("dot")
    val classpath = classPath.mkString(":")
    val cmdArgs = List("jdeps",
      "-dotoutput",
      dotDirectory.toString,
      "-v",
      "-cp",
      classpath,
      jarPath)
    val process = (new ProcessBuilder).directory(repoRoot.toFile).command(cmdArgs:_*)
    process.redirectOutput()
    val process1 = process.start()
    val stream = process1.getInputStream
    process1.waitFor()
    val path = dotDirectory.resolve(fileName + ".dot")
    if (Files.exists(path)) Some(ClassDependencies(path)) else None
  }
} 
Example 67
Source File: CachingEagerEvaluatingDependencyAnalyzer.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator.analyze

import java.nio.file.{Files, Path, Paths}
import java.util
import java.util.concurrent.atomic.AtomicInteger

import com.fasterxml.jackson.annotation.JsonTypeInfo
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.wix.bazel.migrator.model._
import com.wixpress.build.maven.MavenScope
import org.slf4j.LoggerFactory

import scala.collection.JavaConverters._
import scala.collection.parallel.ParMap

//this is needed since currently the transformer isn't thread safe but the dependency analyzer is
class CachingEagerEvaluatingDependencyAnalyzer(sourceModules: Set[SourceModule], dependencyAnalyzer: DependencyAnalyzer, performSourceAnalysis: Boolean) extends DependencyAnalyzer {
  private val log = LoggerFactory.getLogger(getClass)
  private val cachePath = Files.createDirectories(Paths.get("./cache"))
  private val objectMapper = new ObjectMapper()
    .registerModule(DefaultScalaModule)
    .registerModule(new RelativePathSupportingModule)
    .registerModule(new SourceModuleSupportingModule(sourceModules))
    .addMixIn(classOf[Target], classOf[TypeAddingMixin])
    .addMixIn(classOf[CodePurpose], classOf[TypeAddingMixin])
    .addMixIn(classOf[TestType], classOf[TypeAddingMixin])
    .addMixIn(classOf[MavenScope], classOf[TypeAddingMixin])

  private val collectionType = objectMapper.getTypeFactory.constructCollectionType(classOf[util.Collection[Code]], classOf[Code])
  private val clean = performSourceAnalysis

  private def cachePathForSourceModule(m: SourceModule) = {
    cachePath.resolve(m.relativePathFromMonoRepoRoot + ".cache")
  }

  private val size = sourceModules.size
  private val counter = new AtomicInteger()
  private val tenthSize = size / 10

  private def initCachePathForSourceModule(p: Path) = Files.createDirectories(p.getParent)

  private def maybeCodeFromCache(p: Path): Option[List[Code]] = {
    if (clean || !Files.exists(p)) return None
    try {
      val value: util.Collection[Code] = objectMapper.readValue(p.toFile, collectionType)
      val codeList = value.asScala.toList
      Some(codeList)
    } catch {
      case e: Exception =>
        log.warn(s"Error reading $p ,deleting cache file.")
        log.warn(e.getMessage)
        Files.deleteIfExists(p)
        None
    }
  }

  private def retrieveCodeAndCache(m: SourceModule, cachePath: Path): List[Code] = {
    val codeList = dependencyAnalyzer.allCodeForModule(m)
    Files.deleteIfExists(cachePath)
    initCachePathForSourceModule(cachePath)
    Files.createFile(cachePath)
    try {
      objectMapper.writeValue(cachePath.toFile, codeList)
    } catch {
      case e: InterruptedException =>
        log.warn(s"aborting write to file $cachePath")
        Files.deleteIfExists(cachePath)
        throw e
      case e: Exception =>
        log.warn(s"could not write to file $cachePath")
        log.warn(e.getMessage)
    }
    codeList
  }

  private def calculateMapEntryFor(sourceModule: SourceModule) = {
    printProgress()
    val cachePath = cachePathForSourceModule(sourceModule)
    (sourceModule, maybeCodeFromCache(cachePath).getOrElse(retrieveCodeAndCache(sourceModule, cachePath)))
  }

  private def printProgress(): Unit = {
    if (tenthSize > 0) {
      val currentCount = counter.incrementAndGet()
      if (currentCount % tenthSize == 0) {
        log.info(s"DependencyAnalyzer:allCodeForModule:\t ${currentCount / tenthSize * 10}% done")
      }
    }
  }

  private val allCode: ParMap[SourceModule, List[Code]] = sourceModules.par.map(calculateMapEntryFor).toMap

  override def allCodeForModule(module: SourceModule): List[Code] = allCode(module)
}

@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS, property = "__class")
trait TypeAddingMixin 
Example 68
Source File: Manifest.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator

import java.io._
import java.nio.file.{Files, Path}
import java.util.jar

import com.wix.bazel.migrator.Manifest.Attributes

case class Manifest(ImplementationArtifactId: String,
                    ImplementationVersion: String,
                    ImplementationVendorId: String) {

  @throws[IOException]
  def write(dir: Path): Path = {
    val m = new jar.Manifest()
    val attr = m.getMainAttributes
    attr.put(jar.Attributes.Name.MANIFEST_VERSION, "1.0") // mandatory attribute
    pairs foreach (attr.putValue _).tupled
    val file = manifestFileAt(dir)
    val os = Files.newOutputStream(file)
    try {
      m.write(os)
      file
    } finally {
      os.close()
    }
  }

  private def pairs: Seq[(String, String)] = Seq(
    Attributes.ImplementationArtifactId -> ImplementationArtifactId,
    Attributes.ImplementationVersion -> ImplementationVersion,
    Attributes.ImplementationVendorId -> ImplementationVendorId)

  @throws[IOException]
  private def manifestFileAt(dir: Path) = {
    Files.createDirectories(dir)
    dir.resolve("MANIFEST.MF")
  }

}

private object Manifest {

  object Attributes {
    val ImplementationArtifactId = "Implementation-ArtifactId"
    val ImplementationVersion = "Implementation-Version"
    val ImplementationVendorId = "Implementation-Vendor-Id"
  }

} 
Example 69
Source File: GitIgnoreCleaner.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator

import java.nio.file.{Files, Path}

import scala.collection.JavaConverters._

class GitIgnoreCleaner(repoRoot: Path, blackListItems: Set[String] = GitIgnoreCleaner.DefaultBlackListItems) {
  val gitIgnorePath = repoRoot.resolve(".gitignore")

  def clean() = if (Files.isRegularFile(gitIgnorePath)) {
    val lines = Files.readAllLines(gitIgnorePath)
    val modified = removeBlackListItems(lines.asScala)

    if (lines != modified)
      Files.write(gitIgnorePath, modified.asJava)
  }

  private def removeBlackListItems(lines: Seq[String]): Seq[String] = lines.filterNot(blackListItems)
}

object GitIgnoreCleaner {
  val DefaultBlackListItems = Set(
    "maven"
  )
} 
Example 70
Source File: BazelRcManagedDevEnvWriter.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator

import java.nio.file.{Files, Path, StandardOpenOption}


class BazelRcManagedDevEnvWriter(repoRoot: Path, defaultOptions: List[String]) {

  private val bazelRcManagedDevEnvPath = repoRoot.resolve("tools/bazelrc/.bazelrc.managed.dev.env")

  def resetFileWithDefaultOptions(): Unit = {
    deleteIfExists()
    appendLines(defaultOptions)
  }

  def appendLine(line: String): Unit = appendLines(List(line))

  def appendLines(lines: List[String]): Unit = writeToDisk(lines.mkString("", System.lineSeparator(), System.lineSeparator()))

  private def deleteIfExists(): Unit = Files.deleteIfExists(bazelRcManagedDevEnvPath)

  private def writeToDisk(contents: String): Unit = {
    Files.createDirectories(bazelRcManagedDevEnvPath.getParent)
    Files.write(bazelRcManagedDevEnvPath, contents.getBytes, StandardOpenOption.APPEND, StandardOpenOption.CREATE)
  }

}

object BazelRcManagedDevEnvWriter {
  val defaultExodusOptions: List[String] = List(
    "# fetch",
    "fetch --experimental_multi_threaded_digest=true",
    "",
    "# query",
    "query --experimental_multi_threaded_digest=true",
    "",
    "# test",
    "test --test_tmpdir=/tmp",
    "test --test_output=errors",
    "",
    "# build",
    "build:bazel16uplocal --action_env=PLACE_HOLDER=SO_USING_CONFIG_GROUP_WILL_WORK_BW_CMPTBL",
    "build --strategy=Scalac=worker",
    "build --strict_java_deps=off",
    "build --strict_proto_deps=off",
    "build --experimental_multi_threaded_digest=true",
    "",
    "# this flag makes Bazel keep the analysis cache when test flags such as 'test_arg' (and other 'test_xxx' flags) change",
    "build --trim_test_configuration=true",
    "",
    "# the following flags serve tests but associated with the build command in order to avoid mutual analysis cache",
    "# invalidation between test commands and build commands (see https://github.com/bazelbuild/bazel/issues/7450)",
    "build --test_env=BUILD_TOOL=BAZEL",
    "build --test_env=DISPLAY",
    "build --test_env=LC_ALL=en_US.UTF-8",
  )
} 
Example 71
Source File: SourceModulesOverridesReader.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator.overrides

import java.nio.file.{Files, Path}

import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.wix.build.maven.analysis.SourceModulesOverrides

object SourceModulesOverridesReader {

  private val mapper = new ObjectMapper().registerModule(DefaultScalaModule)
  def from(repoRoot: Path): SourceModulesOverrides = {
    val overridesPath = repoRoot.resolve("bazel_migration").resolve("source_modules.overrides")
    if (Files.exists(overridesPath))
      mapper.readValue(
        Files.newBufferedReader(overridesPath),
        classOf[SourceModulesOverrides]
      )
    else
      SourceModulesOverrides.empty
  }

} 
Example 72
Source File: GeneratedCodeOverridesReader.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator.overrides

import java.nio.file.{Files, Path}

import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule

object GeneratedCodeOverridesReader {
  private val mapper = new ObjectMapper()
    .registerModule(DefaultScalaModule)

  def from(repoRoot: Path): GeneratedCodeLinksOverrides = {
    val overridesPath = repoRoot.resolve("bazel_migration").resolve("code_paths.overrides")
    if (Files.exists(overridesPath))
      mapper.readValue(
        Files.newBufferedReader(overridesPath),
        classOf[GeneratedCodeLinksOverrides]
      )
    else
      GeneratedCodeLinksOverrides.empty
  }
} 
Example 73
Source File: MavenArchiveTargetsOverridesReader.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator.overrides

import java.nio.file.{Files, Path}

import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule

object MavenArchiveTargetsOverridesReader {
  def from(repoRoot: Path): MavenArchiveTargetsOverrides = {
    val overridesPath = repoRoot.resolve("bazel_migration").resolve("maven_archive_targets.overrides")
    if (Files.exists(overridesPath)) {
      val objectMapper = new ObjectMapper().registerModule(DefaultScalaModule)
      objectMapper.readValue(Files.readAllBytes(overridesPath), classOf[MavenArchiveTargetsOverrides])
    } else {
      MavenArchiveTargetsOverrides(Set.empty)
    }
  }

} 
Example 74
Source File: InternalTargetOverridesReader.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator.overrides

import java.nio.file.{Files, Path}

import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper}
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.wix.bazel.migrator.model.TestType
import com.wix.bazel.migrator.utils.TypeAddingMixin

object InternalTargetOverridesReader {
  private val objectMapper = new ObjectMapper()
    .registerModule(DefaultScalaModule)
    .addMixIn(classOf[TestType], classOf[TypeAddingMixin])
    .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)

  def from(repoRootPath: Path): InternalTargetsOverrides = {
    val internalTargetsOverrides = repoRootPath.resolve("bazel_migration").resolve("internal_targets.overrides")

    if (Files.isReadable(internalTargetsOverrides)) {
      objectMapper.readValue(Files.newInputStream(internalTargetsOverrides), classOf[InternalTargetsOverrides])
    } else {
      InternalTargetsOverrides()
    }
  }
} 
Example 75
Source File: WorkspaceOverridesReader.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator.overrides

import java.nio.file.{Files, Path}

object WorkspaceOverridesReader {
  def from(repoRoot: Path): WorkspaceOverrides = {
    val overridesPath = repoRoot.resolve("bazel_migration").resolve("workspace.suffix.overrides")
    if (Files.exists(overridesPath))
      WorkspaceOverrides(readPath(overridesPath))
    else
      WorkspaceOverrides("")
  }

  private def readPath(path: Path) = new String(Files.readAllBytes(path))

}

case class WorkspaceOverrides(suffix: String) 
Example 76
Source File: AdditionalDepsByMavenDepsOverridesReader.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator.overrides

import java.nio.file.{Files, Path}

import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule

import scala.util.{Failure, Success, Try}

object AdditionalDepsByMavenDepsOverridesReader {
  private val mapper = new ObjectMapper()
    .registerModule(DefaultScalaModule)

  def from(filepath: Path): AdditionalDepsByMavenDepsOverrides = {
    if (Files.exists(filepath))
      readContentIn(filepath)
    else
      AdditionalDepsByMavenDepsOverrides.empty
  }

  private def readContentIn(filepath: Path) = {
    Try(mapper.readValue(
      Files.newBufferedReader(filepath),
      classOf[AdditionalDepsByMavenDepsOverrides]
    )) match {
      case Success(overrides) => overrides
      case Failure(e) => throw OverrideParsingException(s"cannot parse $filepath", e)
    }
  }
} 
Example 77
Source File: InternalFileDepsOverridesReader.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator.overrides

import java.nio.file.{Files, Path}

import com.fasterxml.jackson.databind._
import com.fasterxml.jackson.module.scala.DefaultScalaModule

object InternalFileDepsOverridesReader {

  private val mapper = new ObjectMapper().registerModule(DefaultScalaModule)
  def from(repoRoot: Path): InternalFileDepsOverrides = {
    val overridesPath = repoRoot.resolve("bazel_migration").resolve("internal_file_deps.overrides")
    if (Files.exists(overridesPath))
      mapper.readValue(
        Files.newBufferedReader(overridesPath),
        classOf[InternalFileDepsOverrides]
      )
    else
      InternalFileDepsOverrides.empty
  }

} 
Example 78
Source File: Persister.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator

import java.io.File
import java.nio.file.attribute.BasicFileAttributes
import java.nio.file.{Files, Paths}
import java.time.Instant
import java.time.temporal.TemporalUnit
import java.util

import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper}
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.wix.bazel.migrator.model.{CodePurpose, Package, Target, TestType}
import com.wix.bazel.migrator.utils.{IgnoringIsArchiveDefMixin, IgnoringIsProtoArtifactDefMixin, IgnoringIsWarDefMixin, TypeAddingMixin}
import com.wix.build.maven.analysis.SourceModules
import com.wixpress.build.maven.{Coordinates, MavenScope, Packaging}

import scala.collection.JavaConverters._

object Persister {

  private val transformedFile = new File("dag.bazel")
  private val mavenCache = Paths.get("classpathModules.cache")
  val objectMapper = new ObjectMapper().registerModule(DefaultScalaModule)
    .addMixIn(classOf[Target], classOf[TypeAddingMixin])
    .addMixIn(classOf[CodePurpose], classOf[TypeAddingMixin])
    .addMixIn(classOf[TestType], classOf[TypeAddingMixin])
    .addMixIn(classOf[MavenScope], classOf[TypeAddingMixin])
    .addMixIn(classOf[Packaging], classOf[IgnoringIsArchiveDefMixin])
    .addMixIn(classOf[Packaging], classOf[IgnoringIsWarDefMixin])
    .addMixIn(classOf[Coordinates], classOf[IgnoringIsProtoArtifactDefMixin])
    .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)

  def persistTransformationResults(bazelPackages: Set[Package]): Unit = {
    println("Persisting transformation")
    objectMapper.writeValue(transformedFile, bazelPackages)
  }

  def readTransformationResults(): Set[Package] = {
    val collectionType = objectMapper.getTypeFactory.constructCollectionType(classOf[util.Collection[Package]], classOf[Package])
    val value: util.Collection[Package] = objectMapper.readValue(transformedFile, collectionType)
    val bazelPackages = value.asScala.toSet
    bazelPackages
  }

  def persistMavenClasspathResolution(sourceModules: SourceModules): Unit = {
    println("Persisting maven")
    objectMapper.writeValue(mavenCache.toFile, sourceModules)
  }

  def readTransMavenClasspathResolution(): SourceModules = {
    objectMapper.readValue[SourceModules](mavenCache.toFile, classOf[SourceModules])
  }

  def mavenClasspathResolutionIsUnavailableOrOlderThan(amount: Int, unit: TemporalUnit): Boolean =
    !Files.isReadable(mavenCache) ||
      lastModifiedMavenCache().toInstant.isBefore(Instant.now().minus(amount, unit))

  private def lastModifiedMavenCache() =
    Files.readAttributes(mavenCache, classOf[BasicFileAttributes]).lastModifiedTime()

} 
Example 79
Source File: PreludeWriter.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator

import java.nio.file.{Files, Path}

import com.wix.bazel.migrator.PreludeWriter._

class PreludeWriter(repoRoot: Path, preludeContent: Seq[String]) {
  def write(): Unit = {
    val path = repoRoot.resolve("tools/build_rules/")
    Files.createDirectories(path)

    writeEmptyBuildFile(path)
    writePrelude(path)
  }

  private def writePrelude(dest: Path): Unit = {
    writeToDisk(dest, "prelude_bazel", preludeContent.mkString(System.lineSeparator))
  }

  private def writeEmptyBuildFile(dest: Path): Unit =
    writeToDisk(dest, "BUILD.bazel", "")

  private def writeToDisk(dest: Path, filename: String, content: String): Unit =
    Files.write(dest.resolve(filename), content.getBytes)
}

object PreludeWriter {
  val ScalaLibraryImport = """|load(
                              |    "@io_bazel_rules_scala//scala:scala.bzl",
                              |    "scala_binary",
                              |    "scala_library",
                              |    "scala_test",
                              |    "scala_macro_library",
                              |    "scala_specs2_junit_test",
                              |)
                           """.stripMargin
  val ScalaImport = """load("@io_bazel_rules_scala//scala:scala_import.bzl", "scala_import",)"""
  val JavaTestImport = """load("@rules_jvm_test_discovery//:java_test_discovery.bzl", "java_test_discovery")"""
  val TestImport = """load("//:tests.bzl", "specs2_unit_test", "specs2_ite2e_test", "specs2_mixed_test")"""
  val Junit5Import = """load("//:junit5.bzl", "java_junit5_test")"""
  val SourcesImport = """load("//:macros.bzl", "sources")"""
} 
Example 80
Source File: TemplateOfThirdPartyDepsSkylarkFileWriter.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator

import java.nio.file.{Files, Path}

class TemplateOfThirdPartyDepsSkylarkFileWriter(repoRoot: Path, mavenArchiveMacroPath: String) {

  def write(): Unit = {
    val thirdPartyDepsSkylarkFileContents =
      s"""
         |load("$mavenArchiveMacroPath", "maven_archive", "maven_proto")
         |
         |def third_party_dependencies():
      """.stripMargin

    writeToDisk(thirdPartyDepsSkylarkFileContents)
    createBuildFileIfMissing()
  }

  private def writeToDisk(thirdPartyDepsSkylarkFileContents: String): Unit =
    Files.write(repoRoot.resolve("third_party.bzl"), thirdPartyDepsSkylarkFileContents.getBytes)

  private def createBuildFileIfMissing(): Unit = {
    val buildFilePath = repoRoot.resolve("BUILD.bazel")
    if (!Files.exists(buildFilePath))
      Files.createFile(buildFilePath)
  }
} 
Example 81
Source File: BazelRcRemoteSettingsWriter.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator

import java.nio.file.{Files, Path}

class BazelRcRemoteSettingsWriter(repoRoot: Path) {

  def write(): Unit = {
    val contents =
      """# NOTE - THIS FILE IS MANUALLY DUPLICATED INSIDE WAZEL CONTAINER (see BazelRcRemoteSettingsWriter.writeToDisk for explanation)
        |
        |# Remote Build Execution requires a strong hash function, such as SHA256.
        |startup --host_jvm_args=-Dbazel.DigestFunction=SHA256
        |
        |# Set several flags related to specifying the toolchain and java properties.
        |# These flags are duplicated rather than imported from (for example)
        |# %workspace%/configs/debian8_clang/0.2.0/toolchain.bazelrc to make this
        |# bazelrc a standalone file that can be copied more easily.
        |build:rbe_based --host_javabase=@core_server_build_tools//rbe-toolchains/jdk:jdk8
        |build:rbe_based --javabase=@core_server_build_tools//rbe-toolchains/jdk:jdk8
        |build --crosstool_top=@core_server_build_tools//toolchains:crosstool_top
        |build --action_env=BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN=1
        |build --extra_toolchains=@core_server_build_tools//toolchains:extra_toolchains
        |build --host_platform=@core_server_build_tools//rbe-toolchains/jdk:rbe_ubuntu1604
        |build --platforms=@core_server_build_tools//rbe-toolchains/jdk:rbe_ubuntu1604
        |build:rbe_based --action_env=PLACE_HOLDER=SO_USING_CONFIG_GROUP_WILL_WORK_BW_CMPTBL
        |
        |# Enable encryption.
        |build --tls_enabled=true
        |
        |# Enforce stricter environment rules, which eliminates some non-hermetic
        |# behavior and therefore improves both the remote cache hit rate and the
        |# correctness and repeatability of the build.
        |build --experimental_strict_action_env=true
        |
        |# Set a higher timeout value, just in case.
        |build --remote_timeout=3600
        |
        |# Enable authentication. This will pick up application default credentials by
        |# default. You can use --auth_credentials=some_file.json to use a service
        |# account credential instead.
        |build --auth_enabled=true
        |
        |#The following environment variable is used by bazel integration e2e tests which need to know if we're using the
        |#`remote` configuration and so add custom toolchains which means the tests need to add them as well
        |test --test_env=REMOTE="true"
        |
        |test --test_env=CC
        |
        |build:rbe_based --extra_execution_platforms=@core_server_build_tools//platforms:rbe_small,@core_server_build_tools//platforms:rbe_large,@core_server_build_tools//platforms:rbe_default
        |test:rbe_based --extra_execution_platforms=@core_server_build_tools//platforms:rbe_small,@core_server_build_tools//platforms:rbe_large,@core_server_build_tools//platforms:rbe_default
        |
      """.stripMargin
    writeToDisk(contents)
  }

  // currently this file is duplicated between the global location (generated by the migrator) and between wazel container.
  // This is because docker cannot ADD files if they're not in the build context (symlinks included)
  // The global file is currently used for the jenkins rbe step AND gcb container (which runs rbe)
  // plan for removing this duplication - once we move to building all our images with docker-rules,
  // move .bazelrc.remotesettings to be a resource for both the gcb-bazel-step container AND for wazel container
  // (NOTE - if jenkins is still alive when this happens, it should also be added to the jenkins execution image)
  private def writeToDisk(contents: String): Unit =
    Files.write(repoRoot.resolve("tools/bazelrc/.bazelrc.remotesettings"), contents.getBytes)
} 
Example 82
Source File: MavenCoordinatesListReaderIT.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator.utils

import java.nio.charset.StandardCharsets
import java.nio.file.{Files, NoSuchFileException, Path}

import com.github.marschall.memoryfilesystem.MemoryFileSystemBuilder
import com.wixpress.build.maven.MavenCoordinatesListReader
import com.wixpress.build.maven.MavenMakers.someCoordinates
import org.specs2.mutable.SpecificationWithJUnit
import org.specs2.specification.Scope

//noinspection TypeAnnotation
class MavenCoordinatesListReaderIT extends SpecificationWithJUnit{
  "MavenCoordinatesListReader" should {
    "read file with coordinates" in new Ctx{
      val coordinatesA = someCoordinates("a")
      val coordinatesB = someCoordinates("b")
      val fileContent = s"""${coordinatesA.serialized}
                            |${coordinatesB.serialized}""".stripMargin
      val filePath:Path = fileWithContent(fileContent)

      MavenCoordinatesListReader.coordinatesIn(filePath) mustEqual Set(coordinatesA,coordinatesB)
    }

    "ignore empty line" in new Ctx{
      val coordinatesA = someCoordinates("a")
      val coordinatesB = someCoordinates("b")
      val fileContent = s"""${coordinatesA.serialized}
                           |
                           |${coordinatesB.serialized}""".stripMargin
      val filePath:Path = fileWithContent(fileContent)

      MavenCoordinatesListReader.coordinatesIn(filePath) mustEqual Set(coordinatesA,coordinatesB)
    }

    "ignore preceding and trailing spaces" in new Ctx{
      val coordinatesA = someCoordinates("a")
      val coordinatesB = someCoordinates("b")
      val fileContent = s"    ${coordinatesA.serialized}   "
      val filePath:Path = fileWithContent(fileContent)

      MavenCoordinatesListReader.coordinatesIn(filePath) mustEqual Set(coordinatesA)
    }

    "ignore lines that starts with #" in new Ctx{
      val coordinatesA = someCoordinates("a")
      val coordinatesB = someCoordinates("b")
      val fileContent = s"""${coordinatesA.serialized}
                            |#${coordinatesB.serialized}""".stripMargin
      val filePath:Path = fileWithContent(fileContent)

      MavenCoordinatesListReader.coordinatesIn(filePath) mustEqual Set(coordinatesA)
    }

    "throw exception in case file is missing" in new Ctx{
      MavenCoordinatesListReader.coordinatesIn(fs.getPath("non-existing-file")) must throwA[NoSuchFileException]
    }
  }

  trait Ctx extends Scope{
    val fs = MemoryFileSystemBuilder.newLinux().build()
    def fileWithContent(content:String):Path = {
      val path = Files.createTempFile(fs.getPath("/"),"",".txt")
      Files.write(path, content.getBytes(StandardCharsets.UTF_8))
    }
  }

} 
Example 83
Source File: DockerImagesWriterTest.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator

import java.nio.file.{Files, Path}

import better.files.File
import com.github.marschall.memoryfilesystem.MemoryFileSystemBuilder
import com.wix.bazel.migrator.overrides.{InternalTargetOverride, InternalTargetsOverrides}
import org.specs2.matcher.{Matcher, Scope}
import org.specs2.mutable.SpecificationWithJUnit

class DockerImagesWriterTest extends SpecificationWithJUnit {
  abstract class ctx extends Scope{

    def containExactlyOnce(substr: String): Matcher[String] = {
      {a:String => a.indexOf(substr) must not be_== -1} and {a:String => a.indexOf(substr) must beEqualTo(a.lastIndexOf(substr))}
    }

    val rootfs: Path = MemoryFileSystemBuilder.newLinux().build().getPath("repo-root")
    val overrideWithDockerImages = InternalTargetOverride("some-label", dockerImagesDeps = Option(List("mysql:5.7", "docker-repo.wixpress.com/com.wixpress.whatever/whatever:1.234.5")))
    def overrides: Set[InternalTargetOverride]
    def writer = new DockerImagesWriter(rootfs, InternalTargetsOverrides(overrides))
  }

  "DockerImagesWriter" should {
    "create docker_images.bzl in third_party/docker_images" in new ctx {
      def overrides: Set[InternalTargetOverride] = Set.empty
      writer.write()
      Files.exists(rootfs.resolve("third_party/docker_images/docker_images.bzl")) should beTrue
    }

    "create BUILD.bazel file in third_party/docker_images" in new ctx {
      def overrides: Set[InternalTargetOverride] = Set.empty
      writer.write()
      Files.exists(rootfs.resolve("third_party/docker_images/BUILD.bazel")) should beTrue
    }

    "fill default values in container_pull for short-form image" in new ctx {
      def overrides: Set[InternalTargetOverride] = Set(overrideWithDockerImages)
      writer.write()
      val expected: String =
        s"""|  container_pull(
            |    name = "mysql_5.7",
            |    registry = "index.docker.io",
            |    repository = "library/mysql",
            |    tag = "5.7"
            |  )""".stripMargin
      File(rootfs.resolve("third_party/docker_images/docker_images.bzl")).contentAsString must contain(expected)
    }

    "write values as-is in container_pull for full form image" in new ctx {
      def overrides: Set[InternalTargetOverride] = Set(overrideWithDockerImages)
      writer.write()
      val expected: String =
        s"""|  container_pull(
            |    name = "com.wixpress.whatever_whatever_1.234.5",
            |    registry = "docker-repo.wixpress.com",
            |    repository = "com.wixpress.whatever/whatever",
            |    tag = "1.234.5"
            |  )""".stripMargin
      File(rootfs.resolve("third_party/docker_images/docker_images.bzl")).contentAsString must contain(expected)
    }

    "write container_image in BUILD file" in new ctx {
      def overrides: Set[InternalTargetOverride] = Set(overrideWithDockerImages)
      writer.write()
      val expected: String =
        s"""container_image(name="com.wixpress.whatever_whatever_1.234.5", base="@com.wixpress.whatever_whatever_1.234.5//image")""".stripMargin

      File(rootfs.resolve("third_party/docker_images/BUILD.bazel")).contentAsString must contain(expected)
    }

    "deduplicate images in BUILD file" in new ctx {
      def overrides = Set(overrideWithDockerImages, overrideWithDockerImages.copy(label = "duplicate"))
      writer.write()
      private val fileContent: String = File(rootfs.resolve("third_party/docker_images/BUILD.bazel")).contentAsString
      fileContent must containExactlyOnce("container_image(name=\"mysql_5.7\",")
    }

    "deduplicate images in docker_images.bzl file" in new ctx {

      def overrides = Set(overrideWithDockerImages, overrideWithDockerImages.copy(label = "duplicate"))
      writer.write()
      private val fileContent: String = File(rootfs.resolve("third_party/docker_images/docker_images.bzl")).contentAsString
      fileContent must containExactlyOnce("name = \"mysql_5.7\",")
    }
  }
} 
Example 84
Source File: JDepsParserTest.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator.analyze.jdk

import java.nio.file.Files

import com.wix.bazel.migrator.model.{ModuleDependencies, SourceModule}
import com.wixpress.build.maven.Coordinates
import org.specs2.mutable.SpecificationWithJUnit


class JDepsParserTest extends SpecificationWithJUnit {
  "JDepsParser" should {
    "remove 3rd party deps" in {
      val jdepsOutput = Files.createTempFile("jdeps-output", ".txt")

      Files.write(jdepsOutput, jdepsOutputContent.getBytes )

      val coreCommon =
        SourceModule("commons/core-common", Coordinates("exodus-demo.commons", "core-common", "0.0.1-SNAPSHOT"), Set.empty, ModuleDependencies())
      val coreBusinessSomeService = SourceModule("products/some-service/core-business-some-service", Coordinates("exodus-demo.products", "core-business-some-service", "0.0.1-SNAPSHOT"), Set.empty, ModuleDependencies())
      val coreRepositorySomeRepo = SourceModule("repositories/some-repository/core-repository-some-repo", Coordinates("exodus-demo.repositories", "core-repository-some-repo", "0.0.1-SNAPSHOT"), Set.empty, ModuleDependencies())
      val parser = new JDepsParserImpl(Set(
              coreCommon,
              coreBusinessSomeService,
              coreRepositorySomeRepo,
            ))
      parser.convert(ClassDependencies(jdepsOutput), coreBusinessSomeService) mustEqual Map(
        JVMClass("exodus.demo.core.business.some.service.SomeServiceCoreConfiguration",coreBusinessSomeService) -> Set.empty,
        JVMClass("exodus.demo.core.business.some.service.api.SomeService",coreBusinessSomeService) -> Set.empty,
        JVMClass("exodus.demo.core.business.some.service.impl.DefaultSomeService", coreBusinessSomeService) -> Set(
          JVMClass("exodus.demo.commons.core.something.SomeCommonBusinessUtil", coreCommon),
          JVMClass("exodus.demo.core.business.some.service.api.SomeService", coreBusinessSomeService),
          JVMClass("exodus.demo.core.repositories.some.repo.SomeRepository", coreRepositorySomeRepo),
        )
      )
    }
  }

  val jdepsOutputContent: String = """digraph "core-business-some-service-0.0.1-SNAPSHOT.jar" {
                             |    // Path: products/some-service/core-business-some-service/target/core-business-some-service-0.0.1-SNAPSHOT.jar
                             |   "exodus.demo.core.business.some.service.SomeServiceCoreConfiguration"     -> "java.lang.Object";
                             |   "exodus.demo.core.business.some.service.SomeServiceCoreConfiguration" ->    "org.springframework.context.annotation.ComponentScan (not found)";
                             |   "exodus.demo.core.business.some.service.SomeServiceCoreConfiguration"   ->    "org.springframework.context.annotation.Configuration (not found)";
                             |   "exodus.demo.core.business.some.service.SomeServiceCoreConfiguration" -> "org.springframework.context.annotation.Import (not found)";
                             |   "exodus.demo.core.business.some.service.api.SomeService" -> "java.lang.Object";
                             |   "exodus.demo.core.business.some.service.impl.DefaultSomeService" -> "exodus.demo.commons.core.something.SomeCommonBusinessUtil (core-common-0.0.1-SNAPSHOT.jar)";
                             |   "exodus.demo.core.business.some.service.impl.DefaultSomeService"   -> "exodus.demo.core.business.some.service.api.SomeService (classes)";
                             |   "exodus.demo.core.business.some.service.impl.DefaultSomeService" ->    "exodus.demo.core.repositories.some.repo.SomeRepository (core-repository-some-repo-0.0.1-SNAPSHOT.jar)";
                             |   "exodus.demo.core.business.some.service.impl.DefaultSomeService" -> "java.lang.IllegalStateException";
                             |   "exodus.demo.core.business.some.service.impl.DefaultSomeService" -> "java.lang.Object";
                             |   "exodus.demo.core.business.some.service.impl.DefaultSomeService" -> "java.lang.String";
                             |   "exodus.demo.core.business.some.service.impl.DefaultSomeService" -> "org.springframework.stereotype.Service (not found)";
                             |}
                             |
                             |""".stripMargin
} 
Example 85
Source File: JavaPSourceFileTracerTest.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator.analyze.jdk

import java.nio.file.{FileSystem, Files, Path}

import com.github.marschall.memoryfilesystem.MemoryFileSystemBuilder
import com.wix.bazel.migrator.model.SourceModule
import com.wix.bazel.migrator.model.makers.ModuleMaker._
import org.specs2.matcher.Scope
import org.specs2.mock.Mockito
import org.specs2.mutable.SpecificationWithJUnit
import com.wix.bazel.migrator.analyze.CodePath

class JavaPSourceFileTracerTest extends SpecificationWithJUnit with Mockito {
  "JavaPSourceFileTracerTest" should {
    "return the location of source file given it exists on filesystem" in new ctx{
      override def relativeSourcePath: String = "src/main/java"

      private val file: Path = fullPathToSourceFile
      Files.createDirectories(file.getParent)
      Files.createFile(file)

      processRunner.run(repoRoot,"javap",List("-cp",pathToClasses,fqn)) returns RunResult(
        exitCode = 0,
        stdOut = s"""Compiled from "${className}.$fileType"
                   |dontcare
                   |dontcare
                   |""".stripMargin,
        stdErr = ""
      )
      val res = tracer.traceSourceFile(module,fqn = fqn,pathToClasses = pathToClasses, testClass = false)

      res mustEqual CodePath(module,relativeSourcePath,filePath)
    }
  }

  trait ctx extends Scope{
    val fileSystem: FileSystem = MemoryFileSystemBuilder.newLinux().build()
    val repoRoot: Path = fileSystem.getPath("/")
    val moduleName = "foo"
    val module: SourceModule = aModule(moduleName)
    def relativeSourcePath:String
    val javaPackage = "com.wix.example"
    val className = "Example"
    val fileType = "java"
    val filePath = javaPackage.replace('.','/') + s"/$className.$fileType"
    def fullPathToSourceFile: Path = repoRoot.resolve(module.relativePathFromMonoRepoRoot).resolve(relativeSourcePath).resolve(filePath)
    val processRunner: ProcessRunner = mock[ProcessRunner]
    val tracer = new JavaPSourceFileTracer(repoRoot,processRunner,fileSystem)
    val pathToClasses: String = moduleName + "target/classes"
    val fqn = s"$javaPackage.$className"

  }

} 
Example 86
Source File: MavenStandardModulePathsResolverTest.scala    From exodus   with MIT License 5 votes vote down vote up
package com.wix.bazel.migrator.analyze.jdk

import java.nio.file.{FileSystem, Files, Path}

import com.github.marschall.memoryfilesystem.MemoryFileSystemBuilder
import com.wix.bazel.migrator.model.SourceModule
import com.wix.bazel.migrator.model.makers.ModuleMaker.aModule
import org.specs2.matcher.Scope
import org.specs2.mutable.SpecificationWithJUnit

class MavenStandardModulePathsResolverTest extends SpecificationWithJUnit {
  "MavenStandardModulePathsResolver" >> {
    "in case asked for classes modules should" in {
      "return relative path to <relative-module-dir>/target/classes in case it exists" in new ctx {
        val standardPathToClasses: Path = pathToModuleTargetDir.resolve("classes")
        Files.createDirectories(standardPathToClasses)

        pathsResolver.resolveClassesPath(interestingModule) must beSome(interestingModule.relativePathFromMonoRepoRoot + "/target/classes")
      }

      "return None in case <relative-module-dir>/target/classes does not exist" in new ctx {
        pathsResolver.resolveClassesPath(interestingModule) must beNone
      }
    }
    "in case asked for test-classes for modules should" in {
      "return relative path to <relative-module-dir>/target/test-classes in case it exists" in new ctx {
        val standardPathToTestClasses: Path = pathToModuleTargetDir.resolve("test-classes")
        Files.createDirectories(standardPathToTestClasses)

        pathsResolver.resolveTestClassesPath(interestingModule) must beSome(interestingModule.relativePathFromMonoRepoRoot + "/target/test-classes")
      }

      "return None in case <relative-module-dir>/target/test-classes does not exist" in new ctx {
        pathsResolver.resolveTestClassesPath(interestingModule) must beNone
      }
    }
    "in case asked for jar-path for modules should" in {
      "return relative path to <relative-module-dir>/target/<artifactId>-<version>.jar in case it exists" in new ctx {
        val jarName = s"${interestingModule.coordinates.artifactId}-${interestingModule.coordinates.version}.jar"
        val standardPathToClasses: Path = pathToModuleTargetDir.resolve(jarName)
        Files.createDirectories(standardPathToClasses)

        pathsResolver.resolveJarPath(interestingModule) must beSome(interestingModule.relativePathFromMonoRepoRoot + s"/target/$jarName")
      }

      "return None in case <relative-module-dir>/target/classes does not exist" in new ctx {
        pathsResolver.resolveJarPath(interestingModule) must beNone
      }
    }
  }

  trait ctx extends Scope {
    val fileSystem: FileSystem = MemoryFileSystemBuilder.newLinux().build()
    val repoRoot: Path = fileSystem.getPath("/")
    val moduleName = "interesting-module"
    val interestingModule: SourceModule = aModule(moduleName).copy(relativePathFromMonoRepoRoot = moduleName)
    val pathToModule: Path = Files.createDirectories(repoRoot.resolve(interestingModule.relativePathFromMonoRepoRoot))

    val pathsResolver: ModulePathsResolver = new MavenStandardModulesPathsResolver(repoRoot,fileSystem)
    val pathToModuleTargetDir: Path = pathToModule.resolve("target")

  }

} 
Example 87
Source File: Chapter10.scala    From Learning-Spark-SQL   with MIT License 5 votes vote down vote up
//Code for Chapter 10 to be executed in Spark shell. For all other code from the BigDL library follow the instructions and commands in the book.
//Note that the code in this Chapter uses Spark 2.1 due to some bugs.

//Execute the following on the command prompt to start the Spark shell
source /Users/aurobindosarkar/Downloads/BigDL-master/scripts/bigdl.sh
Aurobindos-MacBook-Pro-2:spark-2.1.0-bin-hadoop2.7 aurobindosarkar$ bin/spark-shell --properties-file /Users/aurobindosarkar/Downloads/BigDL-master/spark/dist/target/bigdl-0.2.0-SNAPSHOT-spark-2.0.0-scala-2.11.8-mac-dist/conf/spark-bigdl.conf --jars /Users/aurobindosarkar/Downloads/BigDL-master/spark/dist/target/bigdl-0.2.0-SNAPSHOT-spark-2.0.0-scala-2.11.8-mac-dist/lib/bigdl-0.2.0-SNAPSHOT-jar-with-dependencies.jar

import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.dataset.DataSet
import com.intel.analytics.bigdl.dataset.image.{BytesToGreyImg, GreyImgNormalizer, GreyImgToBatch, GreyImgToSample}
import com.intel.analytics.bigdl.nn.{ClassNLLCriterion, Module}
import com.intel.analytics.bigdl.numeric.NumericFloat
import com.intel.analytics.bigdl.optim._
import com.intel.analytics.bigdl.utils.{Engine, LoggerFilter, T, Table}
import com.intel.analytics.bigdl.nn._
import java.nio.ByteBuffer
import java.nio.file.{Files, Path, Paths}
import com.intel.analytics.bigdl.dataset.ByteRecord
import com.intel.analytics.bigdl.utils.File

val trainData = "/Users/aurobindosarkar/Downloads/mnist/train-images-idx3-ubyte"
val trainLabel = "/Users/aurobindosarkar/Downloads/mnist/train-labels-idx1-ubyte"
val validationData = "/Users/aurobindosarkar/Downloads/mnist/t10k-images-idx3-ubyte"
val validationLabel = "/Users/aurobindosarkar/Downloads/mnist/t10k-labels-idx1-ubyte"
val nodeNumber = 1
val coreNumber = 2
Engine.init
val model = Sequential[Float]()
val classNum = 10
val batchSize = 12
model.add(Reshape(Array(1, 28, 28))).add(SpatialConvolution(1, 6, 5, 5)).add(Tanh()).add(SpatialMaxPooling(2, 2, 2, 2)).add(Tanh()).add(SpatialConvolution(6, 12, 5, 5)).add(SpatialMaxPooling(2, 2, 2, 2)).add(Reshape(Array(12 * 4 * 4))).add(Linear(12 * 4 * 4, 100)).add(Tanh()).add(Linear(100, classNum)).add(LogSoftMax())

def load(featureFile: String, labelFile: String): Array[ByteRecord] = {
    val featureBuffer = ByteBuffer.wrap(Files.readAllBytes(Paths.get(featureFile)))
    val labelBuffer = ByteBuffer.wrap(Files.readAllBytes(Paths.get(labelFile)));
    val labelMagicNumber = labelBuffer.getInt();
    require(labelMagicNumber == 2049);
    val featureMagicNumber = featureBuffer.getInt();
    require(featureMagicNumber == 2051);
    val labelCount = labelBuffer.getInt();
    val featureCount = featureBuffer.getInt();
    require(labelCount == featureCount);
    val rowNum = featureBuffer.getInt();
    val colNum = featureBuffer.getInt();
    val result = new Array[ByteRecord](featureCount);
    var i = 0;
    while (i < featureCount) {
      val img = new Array[Byte]((rowNum * colNum));
      var y = 0;
      while (y < rowNum) {
        var x = 0;
        while (x < colNum) {
          img(x + y * colNum) = featureBuffer.get();
          x += 1;
        }
        y += 1;
      }
      result(i) = ByteRecord(img, labelBuffer.get().toFloat + 1.0f);
      i += 1;
    }
    result;
  }
val trainMean = 0.13066047740239506
val trainStd = 0.3081078
val trainSet = DataSet.array(load(trainData, trainLabel), sc) -> BytesToGreyImg(28, 28) -> GreyImgNormalizer(trainMean, trainStd) -> GreyImgToBatch(batchSize)
val optimizer = Optimizer(model = model, dataset = trainSet, criterion = ClassNLLCriterion[Float]())   
val testMean = 0.13251460696903547
val testStd = 0.31048024
val maxEpoch = 2
val validationSet = DataSet.array(load(validationData, validationLabel), sc) -> BytesToGreyImg(28, 28) -> GreyImgNormalizer(testMean, testStd) -> GreyImgToBatch(batchSize)
optimizer.setEndWhen(Trigger.maxEpoch(2))
optimizer.setState(T("learningRate" -> 0.05, "learningRateDecay" -> 0.0))
optimizer.setCheckpoint("/Users/aurobindosarkar/Downloads/mnist/checkpoint", Trigger.severalIteration(500))
optimizer.setValidation(trigger = Trigger.everyEpoch, dataset = validationSet, vMethods = Array(new Top1Accuracy, new Top5Accuracy[Float], new Loss[Float]))
optimizer.optimize()
model.save("/Users/aurobindosarkar/Downloads/mnist/model")
val model = Module.load[Float]("/Users/aurobindosarkar/Downloads/mnist/model")
val partitionNum = 2
val rddData = sc.parallelize(load(validationData, validationLabel), partitionNum)
val transformer = BytesToGreyImg(28, 28) -> GreyImgNormalizer(testMean, testStd) -> GreyImgToSample()
val evaluationSet = transformer(rddData)
val result = model.evaluate(evaluationSet, Array(new Top1Accuracy[Float]), Some(batchSize))
result.foreach(r => println(s"${r._2} is ${r._1}")) 
Example 88
Source File: gihyo_6_3_CountByValueAndWindowSuite.scala    From gihyo-spark-book-example   with Apache License 2.0 5 votes vote down vote up
package jp.gihyo.spark.ch06

import jp.gihyo.spark.{SparkFunSuite, TestStreamingContext}
import scala.collection.mutable
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.StreamingContextWrapper
import java.nio.file.Files

class gihyo_6_3_CountByValueAndWindowSuite extends SparkFunSuite with TestStreamingContext {

  test("run") {
    val lines = mutable.Queue[RDD[String]]()
    val ds = ssc.queueStream(lines)
    val clock = new StreamingContextWrapper(ssc).manualClock
    val checkpointDir = Files.createTempDirectory("StreamingUnitTest").toString
    ssc.checkpoint(checkpointDir)
    gihyo_6_3_countByValueAndWindow.run(ds, 2, 1)
    ssc.start()
    (1 to 3).foreach { case i =>
      lines += sc.makeRDD(Seq("key1", "key2", "key3")) // test data
      clock.advance(1000)
      Thread.sleep(1000)
    }
  }
} 
Example 89
Source File: gihyo_6_3_TwitterStreamSuite.scala    From gihyo-spark-book-example   with Apache License 2.0 5 votes vote down vote up
package jp.gihyo.spark.ch06

import java.nio.file.Files

import scala.collection.mutable
import scala.io.Source

import twitter4j.{Status, TwitterObjectFactory}

import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.StreamingContextWrapper

import jp.gihyo.spark.{SparkFunSuite, TestStreamingContext}


class gihyo_6_3_TwitterStreamSuite extends SparkFunSuite with TestStreamingContext {

  test("run") {
    val lines = mutable.Queue[RDD[Status]]()
    val ds = ssc.queueStream(lines)
    val clock = new StreamingContextWrapper(ssc).manualClock
    gihyo_6_3_TwitterStream.run(
      sc,
      ds,
      Files.createTempDirectory("TwitterTag").toString,
      Files.createTempDirectory("TwitterWords").toString)
    val checkpointDir = Files.createTempDirectory("StreamingUnitTest").toString
    ssc.checkpoint(checkpointDir)
    ssc.start()

    (1 to 2).foreach { case i =>
      // test data
      lines += sc.makeRDD(Seq(
        MockTweetGenerator.createMockStatusFromJson(),
        MockTweetGenerator.createMockStatusFromJson(),
        MockTweetGenerator.createMockStatusFromJson(),
        MockTweetGenerator.createMockStatusFromJson()))
      clock.advance(1000)
      Thread.sleep(1000)
    }
  }
}

object MockTweetGenerator {
  // Creates a tweet status from a JSON file
  def createMockStatusFromJson(): Status = {
    val jsonFile = getClass.getResource("/streaming/test-tweet.json").getPath
    TwitterObjectFactory.createStatus(Source.fromFile(jsonFile).getLines().mkString)
  }
} 
Example 90
Source File: gihyo_6_3_CountByWindowSuite.scala    From gihyo-spark-book-example   with Apache License 2.0 5 votes vote down vote up
package jp.gihyo.spark.ch06

import java.nio.file.Files

import scala.collection.mutable

import jp.gihyo.spark.{SparkFunSuite, TestStreamingContext}

import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.StreamingContextWrapper

class gihyo_6_3_CountByWindowSuite extends SparkFunSuite with TestStreamingContext {

  test("run") {
    val lines = mutable.Queue[RDD[String]]()
    val ds = ssc.queueStream(lines)
    val clock = new StreamingContextWrapper(ssc).manualClock
    val checkpointDir = Files.createTempDirectory("StreamingUnitTest").toString
    ssc.checkpoint(checkpointDir)
    gihyo_6_3_countByWindow.run(ds, 2, 1)
    ssc.start()
    (1 to 3).foreach { case i =>
      lines += sc.makeRDD(Seq("key1", "key2", "key3")) // test data
      clock.advance(1000)
      Thread.sleep(1000)
    }
  }
} 
Example 91
Source File: gihyo_6_3_UpdateStateByKeySuite.scala    From gihyo-spark-book-example   with Apache License 2.0 5 votes vote down vote up
package jp.gihyo.spark.ch06

import java.nio.file.Files

import scala.collection.mutable

import jp.gihyo.spark.{SparkFunSuite, TestStreamingContext}

import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.StreamingContextWrapper

class gihyo_6_3_UpdateStateByKeySuite extends SparkFunSuite with TestStreamingContext {

  test("run") {
    val lines = mutable.Queue[RDD[String]]()
    val ds = ssc.queueStream(lines)
    val clock = new StreamingContextWrapper(ssc).manualClock
    gihyo_6_3_updateStateByKey.run(ds)
    val checkpointDir = Files.createTempDirectory("StreamingUnitTest").toString
    ssc.checkpoint(checkpointDir)
    ssc.start()
    lines += sc.makeRDD(Seq("key1", "key2", "key3")) // test data
    clock.advance(1000)
    Thread.sleep(1000)
  }
} 
Example 92
package jp.gihyo.spark.ch06

import java.nio.file.Files

import scala.collection.mutable

import jp.gihyo.spark.{SparkFunSuite, TestStreamingContext}

import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.StreamingContextWrapper

class gihyo_6_3_ReduceByKeyAndWindowEfficientSuite extends SparkFunSuite with TestStreamingContext {

  test("run") {
    val lines = mutable.Queue[RDD[String]]()
    val ds = ssc.queueStream(lines)
    val clock = new StreamingContextWrapper(ssc).manualClock
    gihyo_6_3_reduceByKeyAndWindow_efficient.run(ds, 2, 1)
    val checkpointDir = Files.createTempDirectory("StreamingUnitTest").toString
    ssc.checkpoint(checkpointDir)
    ssc.start()
    (1 to 2).foreach { case i =>
      lines += sc.makeRDD(Seq("key1", "key2", "key3")) // test data
      clock.advance(1000)
      Thread.sleep(1000)
    }
  }
} 
Example 93
Source File: gihyo_6_3_KafkaStreamSuite.scala    From gihyo-spark-book-example   with Apache License 2.0 5 votes vote down vote up
package jp.gihyo.spark.ch06

import scala.collection.mutable
import java.nio.file.Files

import jp.gihyo.spark.{SparkFunSuite, TestStreamingContext}

import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.StreamingContextWrapper

class gihyo_6_3_KafkaStreamSuite extends SparkFunSuite with TestStreamingContext {

  test("run") {
    val lines = mutable.Queue[RDD[(String, String)]]()
    val ds = ssc.queueStream(lines)
    val clock = new StreamingContextWrapper(ssc).manualClock
    gihyo_6_3_KafkaStream.run(ds, Files.createTempDirectory("KafkaStreamSuite").toString, 2, 1)
    val checkpointDir = Files.createTempDirectory("StreamingUnitTest").toString
    ssc.checkpoint(checkpointDir)
    ssc.start()
    (1 to 2).foreach { case i =>
      lines += sc.makeRDD(Seq(("", "userid:userid001,action:view,pageid:value1"),
        ("", "userid:userid002,action:click,pageid:value2"),
        ("", "userid:userid003,action:view,pageid:value3"),
        ("", "userid:userid001,action:view,pageid:value4"))) // test data
      clock.advance(1000)
      Thread.sleep(1000)
    }
  }
} 
Example 94
Source File: Dhall.scala    From http4s-jdk-http-client   with Apache License 2.0 5 votes vote down vote up
import cats.effect._
import java.nio.file.{Files, Paths}
import org.dhallj.core.Expr
import org.dhallj.core.converters.JsonConverter
import org.dhallj.imports.syntax._
import org.dhallj.parser.DhallParser
import org.dhallj.yaml.YamlConverter
import org.http4s.client.Client
import org.http4s.client.jdkhttpclient.JdkHttpClient
import sbt.{IO => _, _}
import scala.concurrent.ExecutionContext
import upickle.default.{ReadWriter, macroRW}

object Dhall {

  lazy val convertDhall = taskKey[Unit]("Generate YAML/JSON from Dhall.")

  private lazy val http = {
    implicit val cs: ContextShift[IO] = IO.contextShift(ExecutionContext.global)
    JdkHttpClient.simple[IO].unsafeRunSync()
  }

  private def loadDhall(expr: String): Expr = {
    implicit val c: Client[IO] = http
    DhallParser
      .parse(expr)
      .normalize()
      .resolveImports[IO]
      .unsafeRunSync()
      .normalize()
  }

  val convertDhallTask = convertDhall := {
    val baseDir = (Keys.baseDirectory in LocalRootProject).value.absolutePath
    def convertYaml(from: String, to: String): Unit = {
      val dhall = loadDhall(s"$baseDir/dhall/$from.dhall")
      val yaml = YamlConverter.toYamlString(dhall)
      Files.writeString(Paths.get(s"$baseDir/$to"), yaml)
    }
    List("ci", "release", "dhall").foreach { file =>
      convertYaml(file, s".github/workflows/$file.yml")
    }
    convertYaml("mergify", s".mergify.yml")
  }

  case class ScalaVersions(default: String, all: List[String])
  object ScalaVersions { implicit val rw: ReadWriter[ScalaVersions] = macroRW }

  val scalaVersions = settingKey[ScalaVersions]("Read the Scala versions via Dhall")

  val scalaVersionsImpl = scalaVersions := {
    val baseDir = (Keys.baseDirectory in LocalRootProject).value.absolutePath
    val dhall = loadDhall(s"$baseDir/dhall/scalaVersions.dhall")
    val json = JsonConverter.toCompactString(dhall)
    upickle.default.read[ScalaVersions](json)
  }

} 
Example 95
Source File: NeuralNetwork.scala    From Scala-Machine-Learning-Projects   with MIT License 5 votes vote down vote up
package Yelp.Trainer

import org.deeplearning4j.nn.conf.MultiLayerConfiguration
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork
import org.nd4j.linalg.factory.Nd4j
import java.io.File
import org.apache.commons.io.FileUtils
import java.io.{DataInputStream, DataOutputStream, FileInputStream}
import java.nio.file.{Files, Paths}

object NeuralNetwork {  
  def loadNN(NNconfig: String, NNparams: String) = {
    // get neural network config
    val confFromJson: MultiLayerConfiguration = MultiLayerConfiguration.fromJson(FileUtils.readFileToString(new File(NNconfig)))    
     // get neural network parameters 
    val dis: DataInputStream = new DataInputStream(new FileInputStream(NNparams))
    val newParams = Nd4j.read(dis)    
     // creating network object
    val savedNetwork: MultiLayerNetwork = new MultiLayerNetwork(confFromJson)
    savedNetwork.init()
    savedNetwork.setParameters(newParams)    
    savedNetwork
  }
  
  def saveNN(model: MultiLayerNetwork, NNconfig: String, NNparams: String) = {
    // save neural network config
    FileUtils.write(new File(NNconfig), model.getLayerWiseConfigurations().toJson())     
    // save neural network parms
    val dos: DataOutputStream = new DataOutputStream(Files.newOutputStream(Paths.get(NNparams)))
    Nd4j.write(model.params(), dos)
  }  
} 
Example 96
Source File: zip.scala    From watr-works   with Apache License 2.0 5 votes vote down vote up
package edu.umass.cs.iesl.watr
package corpora
package filesys

import java.nio.file.{ DirectoryStream, Files, Path }
import fs2._
import cats.effect._
import cats.implicits._


object zip {

  def dirEntries[F[_]](dir: Path, include: Path => Boolean = _ => true)(implicit F: Effect[F]): fs2.Stream[F, Path] = {
    def useDirStream(dirStream: DirectoryStream[Path]): fs2.Stream[F, Path] = {
      Stream.unfold(dirStream.iterator) { iter =>
        if (iter.hasNext()) Some((iter.next(), iter)) else None
      }
    }

    val closeDirStream = (dirStream: DirectoryStream[Path]) => F.delay(dirStream.close)
    val acquire = F.delay(Files.newDirectoryStream(dir))
    val release = closeDirStream(_)

    Stream.bracket(acquire)(release)
      .flatMap(ds => useDirStream(ds))
      .filter(include)
  }


  def dirEntriesRecursive[F[_]](dir: Path, include: Path => Boolean = _ => true)(implicit F: Effect[F]): Stream[F, Path] =
    dirEntries[F](dir).flatMap { p =>
      val r = if (include(p)) Stream.emit(p) else Stream.empty
      if (Files.isDirectory(p)) r ++ dirEntriesRecursive(p, include)
      else r
    }

} 
Example 97
Source File: HBaseConnectorSuite.scala    From darwin   with Apache License 2.0 5 votes vote down vote up
package it.agilelab.darwin.connector.hbase

import java.nio.file.Files

import com.typesafe.config.{ConfigFactory, ConfigValueFactory}
import it.agilelab.darwin.common.Connector
import org.apache.avro.reflect.ReflectData
import org.apache.avro.{Schema, SchemaNormalization}
import org.apache.hadoop.hbase.HBaseTestingUtility
import org.scalatest.BeforeAndAfterAll
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers

class HBaseConnectorSuite extends AnyFlatSpec with Matchers with BeforeAndAfterAll {

  var connector: Connector = _

  "HBaseConnector" should "load all existing schemas" in {
    connector.fullLoad()
  }

  it should "insert and retrieve" in {
    val schemas = Seq(ReflectData.get().getSchema(classOf[HBaseMock]), ReflectData.get().getSchema(classOf[HBase2Mock]))
      .map(s => SchemaNormalization.parsingFingerprint64(s) -> s)
    connector.insert(schemas)
    val loaded: Seq[(Long, Schema)] = connector.fullLoad()
    assert(loaded.size == schemas.size)
    assert(loaded.forall(schemas.contains))
    val schema = connector.findSchema(loaded.head._1)
    assert(schema.isDefined)
    assert(schema.get == loaded.head._2)
    val noSchema = connector.findSchema(-1L)
    assert(noSchema.isEmpty)
  }

  "connector.tableCreationHint" should "print the correct hint for table creation" in {
    connector.tableCreationHint() should be(
      """To create namespace and table from an HBase shell issue:
        |  create_namespace 'AVRO'
        |  create 'AVRO:SCHEMA_REPOSITORY', '0'""".stripMargin)
  }

  "connector.tableExists" should "return true with existent table" in {
    connector.tableExists() should be(true)
  }

  override def beforeAll(): Unit = {

    connector = new HBaseConnectorCreator().create(HBaseConnectorSuite.config)

    connector.createTable()
  }


}

object HBaseConnectorSuite {
  private lazy val config = {
    val util = new HBaseTestingUtility()
    val minicluster = util.startMiniCluster()

    //Hbase connector can only load configurations from a file path so we need to render the hadoop conf
    val confFile = Files.createTempFile("prefix", "suffix")
    val stream = Files.newOutputStream(confFile)
    minicluster.getConfiguration.writeXml(stream)
    stream.flush()
    stream.close()
    val hbaseConfigPath = ConfigValueFactory.fromAnyRef(confFile.toAbsolutePath.toString)

    //HbaseConnector will only load conf if hbase-site and core-site are given,
    //we give the same file to each.
    sys.addShutdownHook(minicluster.shutdown())
    ConfigFactory.load()
      .withValue(ConfigurationKeys.HBASE_SITE, hbaseConfigPath)
      .withValue(ConfigurationKeys.CORE_SITE, hbaseConfigPath)
  }

} 
Example 98
Source File: MemoryFootprint.scala    From collection-strawman   with Apache License 2.0 5 votes vote down vote up
package bench

import strawman.collection.immutable.{LazyList, List, Range, NumericRange, Vector}
import strawman.collection.mutable.{ArrayBuffer, ListBuffer}

import scala.{Any, AnyRef, App, Int, Long, Seq, StringContext}
import scala.Predef.{ArrowAssoc, println, intWrapper}
import scala.compat.Platform
import java.lang.Runtime
import java.nio.file.{Files, Paths}


object MemoryFootprint extends App {

  val reportPath = Paths.get(args(0))

  val sizes = scala.List(8, 64, 512, 4096, 32768, 262144, 2097152)

  val runtime = Runtime.getRuntime
  val obj: AnyRef = null
  var placeholder: Any = _

  def benchmark[A](gen: Int => A): scala.List[(Int, Long)] = (
    // We run 5 iterations and pick the last result only
    for (_ <- scala.Range(0, 5)) yield {
      for (size <- sizes) yield {
        placeholder = null
        Platform.collectGarbage()
        val memBefore = runtime.totalMemory() - runtime.freeMemory()
        placeholder = gen(size)
        Platform.collectGarbage()
        val memAfter = runtime.totalMemory() - runtime.freeMemory()
        size -> (memAfter - memBefore)
      }
    }
  ).last

  val memories =
    scala.Predef.Map(
      "scala.List"    -> benchmark(scala.List.fill(_)(obj)),
      "List"          -> benchmark(List.fill(_)(obj)),
      "LazyList"      -> benchmark(LazyList.fill(_)(obj)),
      "scala.Vector"  -> benchmark(scala.Vector.fill(_)(obj)),
      "Vector"        -> benchmark(Vector.fill(_)(obj)),
      "scala.HashSet" -> benchmark(n => scala.collection.immutable.HashSet((1 to n).map(_.toString): _*)),
      "HashSet"       -> benchmark(n => strawman.collection.immutable.HashSet((1 to n).map(_.toString): _*)),
      "scala.TreeSet" -> benchmark(n => scala.collection.immutable.TreeSet((1 to n).map(_.toString): _*)),
      "TreeSet"       -> benchmark(n => strawman.collection.immutable.TreeSet((1 to n).map(_.toString): _*)),
      "ArrayBuffer"   -> benchmark(ArrayBuffer.fill(_)(obj)),
      "ListBuffer"    -> benchmark(ListBuffer.fill(_)(obj)),
      "ImmutableArray" -> benchmark(strawman.collection.immutable.ImmutableArray.fill(_)(obj)),
      "ImmutableArray (primitive)" -> benchmark(strawman.collection.immutable.ImmutableArray.fill(_)(123)),
      "Range"         -> benchmark(Range(0, _)),
      "NumericRange"  -> benchmark(NumericRange(0, _, 1))
    )

  // We use a format similar to the one used by JMH so that
  // our charts can be generated in the same way
  import jawn.ast._
  val report =
    JArray.fromSeq(
      memories.flatMap { case (name, values) =>
        values.map { case (size, value) =>
          JObject.fromSeq(Seq(
            "benchmark" -> JString(s"$name.memory-footprint"),
            "params" -> JObject.fromSeq(Seq(
              "size" -> JString(size.toString)
            )),
            "primaryMetric" -> JObject.fromSeq(Seq(
              "score" -> JNum(value),
              "scoreConfidence" -> JArray.fromSeq(Seq(JNum(value), JNum(value)))
            ))
          ))
        }
      }.to[Seq]
    )
  Files.write(reportPath, FastRenderer.render(report).getBytes)

} 
Example 99
Source File: RowCSVWriter.scala    From maha   with Apache License 2.0 5 votes vote down vote up
// Copyright 2017, Yahoo Holdings Inc.
// Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms.
package com.yahoo.maha.report


  def close() {
    csvWriter.close()
  }

}

trait RowCSVWriterProvider {
  def newRowCSVWriter: RowCSVWriter
}

case class FileRowCSVWriterProvider(file: File) extends RowCSVWriterProvider {
  def newRowCSVWriter: RowCSVWriter = {
    if(file.exists() && file.length() > 0) {
      Files.write(file.toPath, Array[Byte](), StandardOpenOption.TRUNCATE_EXISTING) // Clear file
    }
    val fos = new FileOutputStream(file.getAbsoluteFile, true)
    val writerTry = safeCloseable(fos)(new OutputStreamWriter(_, StandardCharsets.UTF_8))
      .flatMap(safeCloseable(_)(new BufferedWriter(_)))
      .flatMap(safeCloseable(_)(new RowCSVWriter(_, RowCSVWriter.DEFAULT_SEPARATOR)))
    require(writerTry.isSuccess, s"Failed to create RowCSVWriter safely : $writerTry")
    writerTry.get
  }
} 
Example 100
Source File: RocksDBStorageTest.scala    From JustinDB   with Apache License 2.0 5 votes vote down vote up
package justin.db.storage

import java.nio.file.Files
import java.util.UUID

import justin.db.storage.PluggableStorageProtocol.{Ack, DataOriginality, StorageGetData}
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{FlatSpec, Matchers}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._

class RocksDBStorageTest extends FlatSpec with Matchers  with ScalaFutures {

  behavior of "RocksDBStorage"

  it should "save 3 payloads and read them" in {
    val journal = Files.createTempDirectory("rocksdb")
    val rocksdb = new RocksDBStorage(journal.toFile)
    val data1 = JustinData(
      id        = UUID.randomUUID,
      value     = "1",
      vclock    = "vclock-value",
      timestamp = 1234124L
    )
    val data2 = JustinData(
      id        = UUID.randomUUID,
      value     = "1",
      vclock    = "vclock-value",
      timestamp = 1234124L
    )
    val data3 = JustinData(
      id        = UUID.randomUUID,
      value     = "3",
      vclock    = "vclock-value",
      timestamp = 1234124L
    )
    val dataOriginality = DataOriginality.Primary(ringPartitionId = 1)

    // PUT
    rocksdb.put(data1)(_ => dataOriginality).futureValue shouldBe Ack
    rocksdb.put(data2)(_ => dataOriginality).futureValue shouldBe Ack
    rocksdb.put(data3)(_ => dataOriginality).futureValue shouldBe Ack

    // GET
    rocksdb.get(data3.id)(_ => dataOriginality).futureValue shouldBe StorageGetData.Single(data3)
    rocksdb.get(data2.id)(_ => dataOriginality).futureValue shouldBe StorageGetData.Single(data2)
    rocksdb.get(data1.id)(_ => dataOriginality).futureValue shouldBe StorageGetData.Single(data1)
  }

  override implicit def patienceConfig: PatienceConfig = PatienceConfig(10.seconds, 50.millis)
} 
Example 101
Source File: ManagedPath.scala    From zio-rocksdb   with Apache License 2.0 5 votes vote down vote up
package zio.rocksdb.internal

package internal

import java.io.IOException
import java.nio.file.{ Files, Path }

import zio.{ Task, UIO, ZIO, ZManaged }

import scala.reflect.io.Directory

object ManagedPath {
  private def createTempDirectory: Task[Path] = Task {
    Files.createTempDirectory("zio-rocksdb")
  }

  private def deleteDirectory(path: Path): UIO[Boolean] = UIO {
    new Directory(path.toFile).deleteRecursively()
  }

  private def deleteDirectoryE(path: Path): UIO[Unit] =
    deleteDirectory(path) >>= {
      case true  => ZIO.unit
      case false => ZIO.die(new IOException("Could not delete path recursively"))
    }

  def apply(): ZManaged[Any, Throwable, Path] = createTempDirectory.toManaged(deleteDirectoryE)
} 
Example 102
Source File: FileDownloadServlet.scala    From udash-core   with Apache License 2.0 5 votes vote down vote up
package io.udash.rpc.utils

import java.io.File
import java.nio.file.Files
import javax.servlet.http.{HttpServlet, HttpServletRequest, HttpServletResponse}


  protected def resolveFileMimeType(file: File): String =
    Option(getServletContext.getMimeType(file.getAbsolutePath)).getOrElse("application/octet-stream")

  override def doGet(request: HttpServletRequest, response: HttpServletResponse): Unit = {
    val file = resolveFile(request)

    if (!file.exists()) response.sendError(404, "File not found!")
    else {
      // MIME type
      response.setContentType(resolveFileMimeType(file))
      // content length
      response.setContentLengthLong(file.length)
      // file name
      response.setHeader("Content-Disposition", s"""attachment; filename="${presentedFileName(file.getName)}"""")

      val outStream = response.getOutputStream
      Files.copy(file.toPath, outStream)
      outStream.close()
    }
  }
} 
Example 103
Source File: FileUtils.scala    From skeuomorph   with Apache License 2.0 5 votes vote down vote up
package higherkindness.skeuomorph

import java.io.{File, FileOutputStream, InputStream}
import java.nio.file.{Files, Paths, StandardOpenOption}

import cats.effect.{Resource, Sync}

object FileUtils {
  def fileHandle[F[_]: Sync](name: String): Resource[F, File] =
    Resource.make(
      Sync[F].delay(new File(name))
    )(file => Sync[F].delay(file.deleteOnExit()))

  def fileOutputStream[F[_]: Sync](file: File): Resource[F, FileOutputStream] =
    Resource.make(
      Sync[F].delay(new FileOutputStream(file))
    )(fos => Sync[F].delay(fos.close()))

  def fileInputStream[F[_]: Sync](name: String): Resource[F, InputStream] =
    Resource.make(
      Sync[F].delay(Files.newInputStream(Paths.get(name), StandardOpenOption.DELETE_ON_CLOSE))
    )(is => Sync[F].delay(is.close()))
} 
Example 104
Source File: StandaloneKCFTests.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.standalone

import java.nio.charset.StandardCharsets.UTF_8
import java.nio.file.Files

import common.WskProps
import org.apache.commons.io.FileUtils
import org.apache.openwhisk.core.containerpool.kubernetes.test.KubeClientSupport
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import system.basic.WskRestBasicTests

@RunWith(classOf[JUnitRunner])
class StandaloneKCFTests
    extends WskRestBasicTests
    with StandaloneServerFixture
    with StandaloneSanityTestSupport
    with KubeClientSupport {
  override implicit val wskprops = WskProps().copy(apihost = serverUrl)

  //Turn on to debug locally easily
  override protected val dumpLogsAlways = false

  override protected val dumpStartupLogs = false

  override protected def useMockServer = false

  override protected def supportedTests = Set("Wsk Action REST should invoke a blocking action and get only the result")

  override protected def extraArgs: Seq[String] = Seq("--dev-mode", "--dev-kcf")

  private val podTemplate = """---
                              |apiVersion: "v1"
                              |kind: "Pod"
                              |metadata:
                              |  annotations:
                              |    allow-outbound : "true"
                              |  labels:
                              |     launcher: standalone""".stripMargin

  private val podTemplateFile = Files.createTempFile("whisk", null).toFile

  override val customConfig = {
    FileUtils.write(podTemplateFile, podTemplate, UTF_8)
    Some(s"""include classpath("standalone-kcf.conf")
         |
         |whisk {
         |  kubernetes {
         |    pod-template = "${podTemplateFile.toURI}"
         |  }
         |}""".stripMargin)
  }

  override def afterAll(): Unit = {
    checkPodState()
    super.afterAll()
    podTemplateFile.delete()
  }

  def checkPodState(): Unit = {
    val podList = kubeClient.pods().withLabel("launcher").list()
    podList.getItems.isEmpty shouldBe false
  }
} 
Example 105
Source File: ConfigMapValueTests.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.common

import java.nio.charset.StandardCharsets.UTF_8
import java.nio.file.Files

import com.typesafe.config.ConfigFactory
import org.apache.commons.io.FileUtils
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{FlatSpec, Matchers}
import pureconfig._
import pureconfig.generic.auto._

@RunWith(classOf[JUnitRunner])
class ConfigMapValueTests extends FlatSpec with Matchers {
  behavior of "ConfigMapValue"

  case class ValueTest(template: ConfigMapValue, count: Int)

  it should "read from string" in {
    val config = ConfigFactory.parseString("""
       |whisk {
       |  value-test {
       |    template = "test string"
       |    count = 42
       |  }
       |}""".stripMargin)

    val valueTest = readValueTest(config)
    valueTest.template.value shouldBe "test string"
  }

  it should "read from file reference" in {
    val file = Files.createTempFile("whisk", null).toFile
    FileUtils.write(file, "test string", UTF_8)

    val config = ConfigFactory.parseString(s"""
       |whisk {
       |  value-test {
       |    template = "${file.toURI}"
       |    count = 42
       |  }
       |}""".stripMargin)

    val valueTest = readValueTest(config)
    valueTest.template.value shouldBe "test string"

    file.delete()
  }

  private def readValueTest(config: com.typesafe.config.Config) = {
    loadConfigOrThrow[ValueTest](config.getConfig("whisk.value-test"))
  }
} 
Example 106
Source File: MleapSupportSpec.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.runtime

import java.net.URI
import java.nio.file.{Files, Paths}

import ml.combust.mleap.core.feature.StringIndexerModel
import ml.combust.mleap.core.types.NodeShape
import ml.combust.mleap.runtime.transformer.feature.StringIndexer
import MleapSupport._

import org.scalatest.FunSpec

class MleapSupportSpec extends FunSpec {
  private val testDir = Files.createTempDirectory("MleapSupportSpec")

  private val stringIndexer = StringIndexer(shape = NodeShape().
    withStandardInput("feature").
    withStandardOutput("feature_index"),
    model = StringIndexerModel(Seq("label1", "label2")))

  describe("URIBundleFileOps") {
    it("can save/load a bundle using a URI") {
      val testFile = Paths.get(testDir.toString, "URIBundleFileOps.zip")
      testFile.toFile.deleteOnExit()

      val uri = new URI(s"jar:file://$testFile")
      stringIndexer.writeBundle.save(uri)

      val loadedStringIndexer = uri.loadMleapBundle().get.root

      assert(stringIndexer == loadedStringIndexer)
    }
  }
} 
Example 107
Source File: XGBoostClassificationOp.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.xgboost.runtime.bundle.ops

import java.nio.file.Files

import ml.combust.bundle.BundleContext
import ml.combust.bundle.dsl.{Model, Value}
import ml.combust.bundle.op.OpModel
import ml.combust.mleap.bundle.ops.MleapOp
import ml.combust.mleap.runtime.MleapContext
import ml.combust.mleap.xgboost.runtime.{XGBoostBinaryClassificationModel, XGBoostClassification, XGBoostClassificationModel, XGBoostMultinomialClassificationModel, XGBoostPredictorBinaryClassificationModel, XGBoostPredictorClassification, XGBoostPredictorClassificationModel}
import ml.dmlc.xgboost4j.scala.{Booster, XGBoost}



class XGBoostClassificationOp extends MleapOp[XGBoostClassification, XGBoostClassificationModel] {
  override val Model: OpModel[MleapContext, XGBoostClassificationModel] = new OpModel[MleapContext, XGBoostClassificationModel] {
    override val klazz: Class[XGBoostClassificationModel] = classOf[XGBoostClassificationModel]

    override def opName: String = "xgboost.classifier"

    override def store(model: Model, obj: XGBoostClassificationModel)
                      (implicit context: BundleContext[MleapContext]): Model = {
      val out = Files.newOutputStream(context.file("xgboost.model"))
      obj.booster.saveModel(out)
      model
        .withValue("num_features", Value.int(obj.numFeatures))
        .withValue("num_classes", Value.int(obj.numClasses))
        .withValue("tree_limit", Value.int(obj.treeLimit))
    }

    override def load(model: Model)
            (implicit context: BundleContext[MleapContext]): XGBoostClassificationModel = {

      val booster: Booster = XGBoost.loadModel(Files.newInputStream(context.file("xgboost.model")))

      val numClasses = model.value("num_classes").getInt
      val numFeatures = model.value("num_features").getInt
      val treeLimit = model.value("tree_limit").getInt

      val impl = if(numClasses == 2) {
        XGBoostBinaryClassificationModel(booster, numFeatures, treeLimit)
      } else {
        XGBoostMultinomialClassificationModel(booster, numClasses, numFeatures, treeLimit)
      }

      XGBoostClassificationModel(impl)
    }
  }

  override def model(node: XGBoostClassification): XGBoostClassificationModel = node.model
} 
Example 108
Source File: XGBoostRegressionOp.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.xgboost.runtime.bundle.ops

import java.io.ByteArrayInputStream
import java.nio.file.Files

import ml.combust.bundle.BundleContext
import ml.combust.bundle.dsl.{Model, Value}
import ml.combust.bundle.op.OpModel
import ml.combust.mleap.bundle.ops.MleapOp
import ml.combust.mleap.runtime.MleapContext
import ml.combust.mleap.xgboost.runtime.{XGBoostRegression, XGBoostRegressionModel}
import ml.dmlc.xgboost4j.scala.XGBoost
import resource._


class XGBoostRegressionOp extends MleapOp[XGBoostRegression, XGBoostRegressionModel] {
  override val Model: OpModel[MleapContext, XGBoostRegressionModel] = new OpModel[MleapContext, XGBoostRegressionModel] {
    override val klazz: Class[XGBoostRegressionModel] = classOf[XGBoostRegressionModel]

    override def opName: String = "xgboost.regression"

    override def store(model: Model, obj: XGBoostRegressionModel)
                      (implicit context: BundleContext[MleapContext]): Model = {
      val out = Files.newOutputStream(context.file("xgboost.model"))
      obj.booster.saveModel(out)

      model
        .withValue("num_features", Value.int(obj.numFeatures))
        .withValue("tree_limit", Value.int(obj.treeLimit))
    }

    override def load(model: Model)
                     (implicit context: BundleContext[MleapContext]): XGBoostRegressionModel = {
      val bytes = Files.readAllBytes(context.file("xgboost.model"))
      val booster = XGBoost.loadModel(new ByteArrayInputStream(bytes))
      val treeLimit = model.value("tree_limit").getInt

      XGBoostRegressionModel(booster,
        numFeatures = model.value("num_features").getInt,
        treeLimit = treeLimit)
    }
  }

  override def model(node: XGBoostRegression): XGBoostRegressionModel = node.model
} 
Example 109
Source File: HttpRepository.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.executor.repository

import java.net.URI
import java.nio.file.{Files, Path}
import java.util.concurrent.Executors

import akka.actor.ActorSystem
import com.typesafe.config.{Config, ConfigFactory}

import scala.concurrent.{ExecutionContext, Future}
import scala.concurrent.duration.TimeUnit

object HttpRepositoryConfig {
  val defaults: Config = ConfigFactory.load().getConfig("ml.combust.mleap.executor.repository-defaults.http")
}

class HttpRepositoryConfig(_config: Config) {
  val config: Config = _config.withFallback(FileRepositoryConfig.defaults)

  val threads: Int = config.getInt("threads")
}

class HttpRepository(config: HttpRepositoryConfig) extends Repository {
  private val threadPool = Executors.newFixedThreadPool(config.threads)
  implicit val diskEc: ExecutionContext = ExecutionContext.fromExecutor(threadPool)

  override def downloadBundle(uri: URI): Future[Path] = Future {
    val tmpFile = Files.createTempFile("mleap", ".bundle.zip")
    Files.copy(uri.toURL.openStream(), tmpFile)
    tmpFile
  }

  override def canHandle(uri: URI): Boolean = uri.getScheme == "http" || uri.getScheme == "https"

  override def shutdown(): Unit = threadPool.shutdown()

  override def awaitTermination(timeout: Long, unit: TimeUnit): Unit = threadPool.awaitTermination(timeout, unit)
}

object HttpRepositoryProvider extends RepositoryProvider {
  override def create(config: Config)
                     (implicit system: ActorSystem): HttpRepository = {
    new HttpRepository(new HttpRepositoryConfig(config))
  }
} 
Example 110
Source File: FileRepository.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.executor.repository

import java.io.File
import java.net.URI
import java.nio.file.{Files, Path, StandardCopyOption}
import java.util.concurrent.Executors

import akka.actor.ActorSystem
import com.typesafe.config.{Config, ConfigFactory}
import ml.combust.mleap.executor.error.BundleException

import scala.concurrent.duration.TimeUnit
import scala.concurrent.{ExecutionContext, Future}

object FileRepositoryConfig {
  val defaults: Config = ConfigFactory.load().getConfig("ml.combust.mleap.executor.repository-defaults.file")
}

class FileRepositoryConfig(_config: Config) {
  val config: Config = _config.withFallback(FileRepositoryConfig.defaults)

  val move: Boolean = config.getBoolean("move")
  val threads: Int = config.getInt("threads")
}

class FileRepository(config: FileRepositoryConfig) extends Repository {
  private val threadPool = Executors.newFixedThreadPool(config.threads)
  implicit val diskEc: ExecutionContext = ExecutionContext.fromExecutor(threadPool)

  def this() = this(new FileRepositoryConfig(FileRepositoryConfig.defaults))

  override def downloadBundle(uri: URI): Future[Path] = Future {

    if (uri.getPath.isEmpty) {
      throw new BundleException("file path cannot be empty")
    }

    val local = new File(uri.getPath).toPath
    if (!Files.exists(local)) {
      throw new BundleException(s"file does not exist $local")
    }

    if (config.move) {
      val tmpFile = Files.createTempFile("mleap", ".bundle.zip")
      Files.copy(local, tmpFile, StandardCopyOption.REPLACE_EXISTING)
      tmpFile.toFile.deleteOnExit()
      tmpFile
    } else {
      local
    }
  }

  override def canHandle(uri: URI): Boolean = uri.getScheme == "file" || uri.getScheme == "jar:file"

  override def shutdown(): Unit = threadPool.shutdown()

  override def awaitTermination(timeout: Long, unit: TimeUnit): Unit = threadPool.awaitTermination(timeout, unit)
}

object FileRepositoryProvider extends RepositoryProvider {
  override def create(tConfig: Config)
                     (implicit system: ActorSystem): Repository = {
    val config = new FileRepositoryConfig(tConfig)

    new FileRepository(config)
  }
} 
Example 111
Source File: TensorflowTransformerOp.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.tensorflow

import java.nio.file.Files

import ml.bundle.{BasicType, DataShape}
import ml.combust.bundle.BundleContext
import ml.combust.bundle.dsl._
import ml.combust.bundle.op.OpModel
import ml.combust.mleap.bundle.ops.MleapOp
import ml.combust.mleap.core
import ml.combust.mleap.core.types.TensorType
import ml.combust.mleap.runtime.MleapContext
import ml.combust.mleap.runtime.types.BundleTypeConverters._


class TensorflowTransformerOp extends MleapOp[TensorflowTransformer, TensorflowModel] {
  override val Model: OpModel[MleapContext, TensorflowModel] = new OpModel[MleapContext, TensorflowModel] {
    override val klazz: Class[TensorflowModel] = classOf[TensorflowModel]

    override def opName: String = Bundle.BuiltinOps.tensorflow

    override def store(model: Model, obj: TensorflowModel)
                      (implicit context: BundleContext[MleapContext]): Model = {
      val graph = obj.graph.getOrElse({
        val graph = new org.tensorflow.Graph()
        graph.importGraphDef(obj.graphBytes)
        graph
      })

      Files.write(context.file("graph.pb"), graph.toGraphDef)
      val (inputNames, inputMleapDataTypes) = obj.inputs.unzip
      val (inputBasicTypes, inputShapes) = inputMleapDataTypes.map {
        dt => (dt.base: BasicType, dt.shape: DataShape)
      }.unzip

      val (outputNames, outputMleapDataTypes) = obj.outputs.unzip
      val (outputBasicTypes, outputShapes) = outputMleapDataTypes.map {
        dt => (dt.base: BasicType, dt.shape: DataShape)
      }.unzip

      model.withValue("input_names", Value.stringList(inputNames)).
        withValue("input_types", Value.basicTypeList(inputBasicTypes)).
        withValue("input_shapes", Value.dataShapeList(inputShapes)).
        withValue("output_names", Value.stringList(outputNames)).
        withValue("output_types", Value.basicTypeList(outputBasicTypes)).
        withValue("output_shapes", Value.dataShapeList(outputShapes)).
        withValue("nodes", obj.nodes.map(Value.stringList))
    }

    override def load(model: Model)
                     (implicit context: BundleContext[MleapContext]): TensorflowModel = {
      val graphBytes = Files.readAllBytes(context.file("graph.pb"))

      val inputNames = model.value("input_names").getStringList
      val inputTypes = model.value("input_types").getBasicTypeList.map(v => v: core.types.BasicType)
      val inputShapes = model.value("input_shapes").getDataShapeList.map(v => v: core.types.DataShape)

      val outputNames = model.value("output_names").getStringList
      val outputTypes = model.value("output_types").getBasicTypeList.map(v => v: core.types.BasicType)
      val outputShapes = model.value("output_shapes").getDataShapeList.map(v => v: core.types.DataShape)

      val nodes = model.getValue("nodes").map(_.getStringList)

      val inputs = inputNames.zip(inputTypes.zip(inputShapes).map {
        case (b, s) => core.types.DataType(b, s).asInstanceOf[TensorType]
      })
      val outputs = outputNames.zip(outputTypes.zip(outputShapes).map {
        case (b, s) => core.types.DataType(b, s).asInstanceOf[TensorType]
      })

      val graph = new org.tensorflow.Graph()
      graph.importGraphDef(graphBytes)
      TensorflowModel(graph = Some(graph),
        inputs = inputs,
        outputs = outputs,
        nodes = nodes,
        graphBytes = graphBytes)
    }
  }

  override def model(node: TensorflowTransformer): TensorflowModel = node.model
} 
Example 112
Source File: TestTensorflow.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.databricks.runtime.testkit

import java.io.File
import java.nio.file.Files

import ml.combust.bundle.BundleFile
import ml.combust.mleap.core.types.{NodeShape, TensorType}
import ml.combust.mleap.tensorflow.{TensorflowModel, TensorflowTransformer}
import org.apache.spark.sql.SparkSession
import ml.combust.mleap.runtime.MleapSupport._

class TestTensorflow(session: SparkSession) extends Runnable {
  override def run(): Unit = {
    val model = TensorflowModel(TensorFlowTestUtil.createAddGraph(),
      inputs = Seq(("InputA", TensorType.Float()), ("InputB", TensorType.Float())),
      outputs = Seq(("MyResult", TensorType.Float())))
    val shape = NodeShape().withInput("InputA", "input_a").
      withInput("InputB", "input_b").
      withOutput("MyResult", "my_result")
    val transformer = TensorflowTransformer(uid = "tensorflow_ab",
      shape = shape,
      model = model)

    val modelPath = Files.createTempFile("mleap-databricks-runtime-testkit", ".zip")
    Files.delete(modelPath)

    {
      println("Writing model to...", modelPath)
      val bf = BundleFile(new File(modelPath.toString))
      transformer.writeBundle.save(bf).get
      bf.close()
    }

    {
      val bf = BundleFile(new File(modelPath.toString))
      bf.loadMleapBundle()
      bf.close()
    }
  }
} 
Example 113
Source File: TestXgboost.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.databricks.runtime.testkit

import java.io.File
import java.nio.file.{Files, StandardCopyOption}

import ml.combust.bundle.BundleFile
import org.apache.spark.ml.bundle.SparkBundleContext
import org.apache.spark.ml.feature.{StringIndexer, VectorAssembler}
import org.apache.spark.sql.SparkSession
import com.databricks.spark.avro._
import ml.combust.mleap.spark.SparkSupport._
import ml.combust.mleap.runtime.MleapSupport._
import ml.dmlc.xgboost4j.scala.spark.XGBoostClassifier
import org.apache.spark.ml.Pipeline

class TestXgboost(session: SparkSession) extends Runnable {
  private val xgboostParams: Map[String, Any] = Map(
    "eta" -> 0.3,
    "max_depth" -> 2,
    "objective" -> "binary:logistic",
    "early_stopping_rounds" ->2,
    "num_round" -> 15,
    "nworkers" -> 2
  )

  override def run(): Unit = {
    val sqlContext = session.sqlContext

    // Create a temporary file and copy the contents of the resource avro to it
    val path = Files.createTempFile("mleap-databricks-runtime-testkit", ".avro")
    Files.copy(getClass.getClassLoader.getResource("datasources/lending_club_sample.avro").openStream(),
      path,
      StandardCopyOption.REPLACE_EXISTING)

    val sampleData = sqlContext.read.avro(path.toString)
    sampleData.show()

    val stringIndexer = new StringIndexer().
      setInputCol("fico_score_group_fnl").
      setOutputCol("fico_index")

    val featureAssembler = new VectorAssembler().
      setInputCols(Array(stringIndexer.getOutputCol, "dti", "loan_amount")).
      setOutputCol("features")

    val logisticRegression = new XGBoostClassifier(xgboostParams).
      setFeaturesCol("features").
      setLabelCol("approved").
      setPredictionCol("prediction")

    val pipeline = new Pipeline().setStages(Array(stringIndexer, featureAssembler, logisticRegression))

    val model = pipeline.fit(sampleData)

    val modelPath = Files.createTempFile("mleap-databricks-runtime-testkit", ".zip")
    Files.delete(modelPath)

    {
      println("Writing model to...", modelPath)
      implicit val sbc = SparkBundleContext.defaultContext.withDataset(model.transform(sampleData))
      val bf = BundleFile(new File(modelPath.toString))
      model.writeBundle.save(bf).get
      bf.close()
    }

    {
      val bf = BundleFile(new File(modelPath.toString))
      bf.loadMleapBundle()
      bf.close()
    }
  }
} 
Example 114
Source File: TestSparkMl.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.databricks.runtime.testkit

import java.io.File
import java.nio.file.{Files, StandardCopyOption}

import ml.combust.bundle.BundleFile
import org.apache.spark.ml.bundle.SparkBundleContext
import org.apache.spark.ml.feature.{StringIndexer, VectorAssembler}
import org.apache.spark.sql.SparkSession
import com.databricks.spark.avro._
import ml.combust.mleap.spark.SparkSupport._
import ml.combust.mleap.runtime.MleapSupport._
import org.apache.spark.ml.Pipeline
import org.apache.spark.ml.classification.LogisticRegression

class TestSparkMl(session: SparkSession) extends Runnable {
  override def run(): Unit = {
    val sqlContext = session.sqlContext

    // Create a temporary file and copy the contents of the resource avro to it
    val path = Files.createTempFile("mleap-databricks-runtime-testkit", ".avro")
    Files.copy(getClass.getClassLoader.getResource("datasources/lending_club_sample.avro").openStream(),
      path,
      StandardCopyOption.REPLACE_EXISTING)

    val sampleData = sqlContext.read.avro(path.toString)
    sampleData.show()

    val stringIndexer = new StringIndexer().
      setInputCol("fico_score_group_fnl").
      setOutputCol("fico_index")

    val featureAssembler = new VectorAssembler().
      setInputCols(Array(stringIndexer.getOutputCol, "dti", "loan_amount")).
      setOutputCol("features")

    val logisticRegression = new LogisticRegression().
      setFeaturesCol(featureAssembler.getOutputCol).
      setLabelCol("approved").
      setPredictionCol("prediction")

    val pipeline = new Pipeline().setStages(Array(stringIndexer, featureAssembler, logisticRegression))

    val model = pipeline.fit(sampleData)

    val modelPath = Files.createTempFile("mleap-databricks-runtime-testkit", ".zip")
    Files.delete(modelPath)

    // Save the model
    {
      println("Writing model to...", modelPath)
      implicit val sbc = SparkBundleContext.defaultContext.withDataset(model.transform(sampleData))
      val bf = BundleFile(new File(modelPath.toString))
      model.writeBundle.save(bf).get
      bf.close()
    }

    // Load the model
    {
      val bf = BundleFile(new File(modelPath.toString))
      bf.loadMleapBundle().get
      bf.close()
    }
  }
} 
Example 115
Source File: XGBoostRegressionModelOp.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.dmlc.xgboost4j.scala.spark.mleap

import java.nio.file.Files

import ml.combust.bundle.BundleContext
import ml.combust.bundle.dsl.{Model, NodeShape, Value}
import ml.combust.bundle.op.OpModel
import ml.dmlc.xgboost4j.scala.spark.XGBoostRegressionModel
import ml.dmlc.xgboost4j.scala.{XGBoost => SXGBoost}
import org.apache.spark.ml.bundle._
import org.apache.spark.ml.linalg.Vector
import resource.managed


  override val Model: OpModel[SparkBundleContext, XGBoostRegressionModel] = new OpModel[SparkBundleContext, XGBoostRegressionModel] {
    override val klazz: Class[XGBoostRegressionModel] = classOf[XGBoostRegressionModel]

    override def opName: String = "xgboost.regression"

    override def store(model: Model, obj: XGBoostRegressionModel)
                      (implicit context: BundleContext[SparkBundleContext]): Model = {
      assert(context.context.dataset.isDefined, BundleHelper.sampleDataframeMessage(klazz))

      Files.write(context.file("xgboost.model"), obj._booster.toByteArray)

      val numFeatures = context.context.dataset.get.select(obj.getFeaturesCol).first.getAs[Vector](0).size
      model.withValue("num_features", Value.int(numFeatures)).
        withValue("tree_limit", Value.int(obj.getOrDefault(obj.treeLimit)))
    }

    override def load(model: Model)
                     (implicit context: BundleContext[SparkBundleContext]): XGBoostRegressionModel = {
      val booster = (for(in <- managed(Files.newInputStream(context.file("xgboost.model")))) yield {
        SXGBoost.loadModel(in)
      }).tried.get

      new XGBoostRegressionModel("", booster)
    }
  }

  override def sparkLoad(uid: String,
                         shape: NodeShape,
                         model: XGBoostRegressionModel): XGBoostRegressionModel = {
    new XGBoostRegressionModel(uid, model._booster)
  }

  override def sparkInputs(obj: XGBoostRegressionModel): Seq[ParamSpec] = {
    Seq("features" -> obj.featuresCol)
  }

  override def sparkOutputs(obj: XGBoostRegressionModel): Seq[SimpleParamSpec] = {
    Seq("prediction" -> obj.predictionCol,
      "leaf_prediction" -> obj.leafPredictionCol,
      "contrib_prediction" -> obj.contribPredictionCol)
  }
} 
Example 116
Source File: XGBoostClassificationModelOp.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.dmlc.xgboost4j.scala.spark.mleap

import java.nio.file.Files

import ml.combust.bundle.BundleContext
import ml.combust.bundle.dsl.{Model, NodeShape, Value}
import ml.combust.bundle.op.OpModel
import ml.dmlc.xgboost4j.scala.spark.XGBoostClassificationModel
import ml.dmlc.xgboost4j.scala.{XGBoost => SXGBoost}
import org.apache.spark.ml.bundle._
import org.apache.spark.ml.linalg.Vector
import resource._


  override val Model: OpModel[SparkBundleContext, XGBoostClassificationModel] = new OpModel[SparkBundleContext, XGBoostClassificationModel] {
    override val klazz: Class[XGBoostClassificationModel] = classOf[XGBoostClassificationModel]

    override def opName: String = "xgboost.classifier"

    override def store(model: Model, obj: XGBoostClassificationModel)
                      (implicit context: BundleContext[SparkBundleContext]): Model = {
      assert(context.context.dataset.isDefined, BundleHelper.sampleDataframeMessage(klazz))

      val thresholds = if(obj.isSet(obj.thresholds)) {
        Some(obj.getThresholds)
      } else None

      val out = Files.newOutputStream(context.file("xgboost.model"))
      obj._booster.saveModel(out)

      val numFeatures = context.context.dataset.get.select(obj.getFeaturesCol).first.getAs[Vector](0).size
      model.withValue("thresholds", thresholds.map(_.toSeq).map(Value.doubleList)).
        withValue("num_classes", Value.int(obj.numClasses)).
        withValue("num_features", Value.int(numFeatures)).
        withValue("tree_limit", Value.int(obj.getOrDefault(obj.treeLimit)))
    }

    override def load(model: Model)
                     (implicit context: BundleContext[SparkBundleContext]): XGBoostClassificationModel = {
      val booster = (for(in <- managed(Files.newInputStream(context.file("xgboost.model")))) yield {
        SXGBoost.loadModel(in)
      }).tried.get

      new XGBoostClassificationModel("", model.value("num_classes").getInt, booster)
    }
  }

  override def sparkLoad(uid: String,
                         shape: NodeShape,
                         model: XGBoostClassificationModel): XGBoostClassificationModel = {
    new XGBoostClassificationModel(uid, model.numClasses, model._booster)
  }

  override def sparkInputs(obj: XGBoostClassificationModel): Seq[ParamSpec] = {
    Seq("features" -> obj.featuresCol)
  }

  override def sparkOutputs(obj: XGBoostClassificationModel): Seq[SimpleParamSpec] = {
    Seq("raw_prediction" -> obj.rawPredictionCol,
      "prediction" -> obj.predictionCol,
      "probability" -> obj.probabilityCol,
      "leaf_prediction" -> obj.leafPredictionCol,
      "contrib_prediction" -> obj.contribPredictionCol)
  }
} 
Example 117
Source File: BundleWriter.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.bundle

import java.net.URI
import java.nio.file.{Files, Paths}

import ml.combust.bundle.dsl.Bundle
import ml.combust.bundle.fs.BundleFileSystem
import ml.combust.bundle.serializer.{BundleSerializer, SerializationFormat}

import scala.util.Try
import resource._


case class BundleWriter[Context <: HasBundleRegistry,
Transformer <: AnyRef](root: Transformer,
                       name: Option[String] = None,
                       format: SerializationFormat = SerializationFormat.Json,
                       meta: Option[ml.bundle.Attributes] = None) {
  def name(value: String): BundleWriter[Context, Transformer] = copy(name = Some(value))
  def format(value: SerializationFormat): BundleWriter[Context, Transformer] = copy(format = value)
  def meta(value: ml.bundle.Attributes): BundleWriter[Context, Transformer] = copy(meta = Some(value))

  def save(file: BundleFile)
          (implicit context: Context): Try[Bundle[Transformer]] = {
    val n = name.getOrElse {
      context.bundleRegistry.opForObj[Any, Any, Any](root).name(root)
    }

    BundleSerializer(context, file).write(Bundle(name = n,
      format = format,
      root = root,
      meta = meta))
  }

  def save(uri: URI)
          (implicit context: Context): Try[Bundle[Transformer]] = uri.getScheme match {
    case "jar" | "file" =>
        (for (bf <- managed(BundleFile(uri))) yield {
          save(bf).get
        }).tried
    case _ =>
    val tmpDir = Files.createTempDirectory("bundle")
    val tmp = Paths.get(tmpDir.toString, "tmp.zip")

    (for (bf <- managed(BundleFile(tmp.toFile))) yield {
      save(bf).get
    }).tried.map {
      r =>
        context.bundleRegistry.fileSystemForUri(uri).save(uri, tmp.toFile)
        r
    }
  }
} 
Example 118
Source File: BundleSerializer.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.bundle.serializer

import java.io.Closeable
import java.nio.file.Files

import ml.combust.bundle.{BundleContext, BundleFile, HasBundleRegistry}
import ml.combust.bundle.dsl.Bundle
import ml.combust.bundle.json.JsonSupport._
import spray.json._
import resource._

import scala.util.Try


  def read[Transformer <: AnyRef](): Try[Bundle[Transformer]] = {
    for(info <- file.readInfo();
        bundleContext = BundleContext(context,
          info.format,
          hr.bundleRegistry,
          file.fs,
          file.path);
        root <- NodeSerializer(bundleContext.bundleContext("root")).read()) yield {
      Bundle(info, root.asInstanceOf[Transformer])
    }
  }

  override def close(): Unit = file.close()
} 
Example 119
Source File: NodeSerializer.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.bundle.serializer

import java.nio.file.{Files, Path}

import ml.combust.bundle.BundleContext
import ml.combust.bundle.dsl.{Bundle, Node}
import ml.combust.bundle.json.JsonSupport._
import spray.json._

import scala.util.Try


  def read(): Try[Any] = {
    Try(FormatNodeSerializer.serializer.read(bundleContext.file(Bundle.nodeFile))).flatMap {
      node =>
        ModelSerializer(bundleContext).readWithModel().flatMap {
          case (model, m) =>
            Try {
              val op = bundleContext.bundleRegistry[Context, Any, Any](m.op)
              op.load(node, model)(bundleContext)
            }
        }
    }
  }
} 
Example 120
Source File: FileUtil.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.bundle.util

import java.io.IOException
import java.nio.file.attribute.BasicFileAttributes
import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor}


object FileUtil {
  def rmRf(path: Path): Unit = {
    Files.walkFileTree(path, new SimpleFileVisitor[Path]() {
      override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = {
        Files.delete(file)
        FileVisitResult.CONTINUE
      }

      override def postVisitDirectory(dir: Path, exc: IOException): FileVisitResult = {
        Files.delete(dir)
        FileVisitResult.CONTINUE
      }
    })
  }
} 
Example 121
Source File: BundleFileSystemSpec.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.bundle.serializer

import java.net.URI
import java.nio.file.Files

import ml.combust.bundle.test.TestSupport._
import ml.combust.bundle.{BundleFile, BundleRegistry}
import ml.combust.bundle.test.ops._
import ml.combust.bundle.test.{TestBundleFileSystem, TestContext}
import org.scalatest.FunSpec
import resource.managed

import scala.util.Random

class BundleFileSystemSpec extends FunSpec {
  implicit val testContext = TestContext(BundleRegistry("test-registry").
    registerFileSystem(new TestBundleFileSystem))

  val randomCoefficients = (0 to 100000).map(v => Random.nextDouble())
  val lr = LinearRegression(uid = "linear_regression_example",
    input = "input_field",
    output = "output_field",
    model = LinearModel(coefficients = randomCoefficients,
      intercept = 44.5))

  describe("saving/loading bundle file using test file system") {
    it("loads/saves using the custom file system") {
      val tmpDir = Files.createTempDirectory("BundleFileSystemSpec")
      val uri = new URI(s"test://$tmpDir/test.zip")

      lr.writeBundle.name("my_bundle").save(uri)
      val loaded = uri.loadBundle().get

      assert(loaded.root == lr)
    }
  }
} 
Example 122
Source File: S3Repository.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.repository.s3

import java.net.URI
import java.nio.file.{Files, Path}
import java.util.concurrent.Executors

import akka.actor.ActorSystem
import com.amazonaws.services.s3.{AmazonS3ClientBuilder, AmazonS3URI}
import com.typesafe.config.Config
import ml.combust.mleap.executor.repository.{Repository, RepositoryProvider}

import scala.concurrent.{ExecutionContext, Future}
import scala.concurrent.duration.TimeUnit
import scala.util.Try

class S3RepositoryConfig(config: Config) {
  val threads: Int = config.getInt("threads")
}

class S3Repository(config: S3RepositoryConfig) extends Repository {
  private val client = AmazonS3ClientBuilder.defaultClient()
  private val threadPool = Executors.newFixedThreadPool(config.threads)
  implicit val diskEc: ExecutionContext = ExecutionContext.fromExecutor(threadPool)

  override def downloadBundle(uri: URI): Future[Path] = Future {
    val s3Uri = new AmazonS3URI(uri)
    val bucket = s3Uri.getBucket
    val key = s3Uri.getKey

    val tmpFile = Files.createTempFile("mleap", ".bundle.zip")
    Files.copy(client.getObject(bucket, key).getObjectContent, tmpFile)
    tmpFile
  }

  override def canHandle(uri: URI): Boolean = Try(new AmazonS3URI(uri)).isSuccess

  override def shutdown(): Unit = threadPool.shutdown()
  override def awaitTermination(timeout: Long, unit: TimeUnit): Unit = threadPool.awaitTermination(timeout, unit)
}

class S3RepositoryProvider extends RepositoryProvider {
  override def create(config: Config)
                     (implicit system: ActorSystem): S3Repository = {
    new S3Repository(new S3RepositoryConfig(config))
  }
} 
Example 123
Source File: HadoopBundleFileSystem.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.bundle.hdfs

import java.io.File
import java.net.URI
import java.nio.file.{Files, Paths}

import com.typesafe.config.Config
import ml.combust.bundle.fs.BundleFileSystem
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}

import scala.util.Try
import scala.collection.JavaConverters._

object HadoopBundleFileSystem {
  lazy val defaultSchemes: Seq[String] = Seq("hdfs")

  def createHadoopConfiguration(config: Config): Configuration = {
    val options: Map[String, String] = if(config.hasPath("options")) {
      config.getConfig("options").entrySet().asScala.map {
        entry => (entry.getKey, entry.getValue.unwrapped().toString)
      }.toMap
    } else {
      Map()
    }

    val c = new Configuration()
    for ((key, value) <- options) { c.set(key, value) }
    c
  }

  def createSchemes(config: Config): Seq[String] = if (config.hasPath("schemes")) {
    config.getStringList("schemes").asScala
  } else { Seq("hdfs") }
}

class HadoopBundleFileSystem(fs: FileSystem,
                             override val schemes: Seq[String] = HadoopBundleFileSystem.defaultSchemes) extends BundleFileSystem {
  def this(config: Config) = {
    this(FileSystem.get(HadoopBundleFileSystem.createHadoopConfiguration(config)),
      HadoopBundleFileSystem.createSchemes(config))
  }

  override def load(uri: URI): Try[File] = Try {
    val tmpDir = Files.createTempDirectory("hdfs-bundle")
    val tmpFile = Paths.get(tmpDir.toString, "bundle.zip")
    fs.copyToLocalFile(new Path(uri.toString), new Path(tmpFile.toString))
    tmpFile.toFile
  }

  override def save(uri: URI, localFile: File): Unit = {
    fs.copyFromLocalFile(new Path(localFile.toString), new Path(uri.toString))
  }
} 
Example 124
Source File: HadoopBundleFileSystemSpec.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.bundle.hdfs

import java.net.URI
import java.nio.file.{Files, Paths}

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.scalatest.FunSpec

class HadoopBundleFileSystemSpec extends FunSpec {
  private val fs = FileSystem.get(new Configuration())
  private val bundleFs = new HadoopBundleFileSystem(fs)

  describe("scheme") {
    it("returns hdfs") {
      assert(bundleFs.schemes == Seq("hdfs"))
    }
  }

  describe("load") {
    it("loads a file from hadoop and saves to a local file") {
      val testFile = Files.createTempFile("HadoopBundleFileSystemSpec", ".txt")
      Files.write(testFile.toAbsolutePath, "HELLO".getBytes())

      val loadedFile = bundleFs.load(testFile.toUri).get
      val contents = new String(Files.readAllBytes(loadedFile.toPath))

      assert(contents == "HELLO")
    }
  }

  describe("save") {
    it("saves local file to HDFS") {
      val testFile = Files.createTempFile("HadoopBundleFileSystemSpec", ".txt")
      Files.write(testFile.toAbsolutePath, "HELLO".getBytes())

      val tmpDir = Files.createTempDirectory("HadoopBundleFileSystemSpec")
      val tmpFile = new URI(s"file://$tmpDir/test.txt")

      bundleFs.save(tmpFile, testFile.toFile)
      val contents = new String(Files.readAllBytes(Paths.get(tmpFile)))

      assert(contents == "HELLO")
    }
  }
} 
Example 125
Source File: ModelLoader.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.springboot

import TypeConverters._
import javax.annotation.PostConstruct
import org.slf4j.LoggerFactory
import ml.combust.mleap.pb
import org.springframework.beans.factory.annotation.{Autowired, Value}
import org.springframework.stereotype.Component

import scala.collection.JavaConverters._
import java.nio.file.{Files, Path, Paths}

import ml.combust.mleap.executor.MleapExecutor
import scalapb.json4s.Parser

@Component
class ModelLoader(@Autowired val mleapExecutor: MleapExecutor,
                  @Autowired val jsonParser: Parser) {

  @Value("${mleap.model.config:#{null}}")
  private val modelConfigPath: String = null

  private val logger = LoggerFactory.getLogger(classOf[ModelLoader])
  private val timeout = 60000

  @PostConstruct
  def loadModel(): Unit = {
    if (modelConfigPath == null) {
      logger.info("Skipping loading model on startup")
      return
    }

    val configPath = Paths.get(modelConfigPath)

    if (!Files.exists(configPath)) {
      logger.warn(s"Model path does not exist: $modelConfigPath")
      return
    }

    val configFiles: List[Path] = if (Files.isDirectory(configPath)) {
      Files.list(configPath).iterator().asScala.toList
    } else {
      List(configPath)
    }

    for (configFile <- configFiles) {
      logger.info(s"Loading model from ${configFile.toString}")

      val request = new String(Files.readAllBytes(configFile))

      mleapExecutor.loadModel(jsonParser.fromJsonString[pb.LoadModelRequest](request))(timeout)
    }
  }
} 
Example 126
Source File: TestUtil.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.springboot

import java.io.File
import java.net.URI
import java.nio.file.{Files, StandardCopyOption}

import ml.combust.mleap.core.types.{ScalarType, StructField, StructType}
import ml.combust.mleap.runtime.frame.{DefaultLeapFrame, Row}

object TestUtil {

  lazy val demoUri = getClass.getClassLoader.getResource("demo.zip").toURI

  lazy val validFrame = DefaultLeapFrame(
    StructType(Seq(StructField("demo:a", ScalarType.Double),
      StructField("demo:c", ScalarType.Double),
      StructField("demo:d", ScalarType.Double))).get,
    Seq(Row(44.5, 22.1, 98.2)))

  lazy val incompleteFrame = DefaultLeapFrame(
    StructType(Seq(StructField("demo:a", ScalarType.Double),
      StructField("demo:d", ScalarType.Double))).get,
    Seq(Row(44.5, 98.2)))

  lazy val failingBytes = Array[Byte](69, 121, 101, 45, 62, 118, 101, 114, 61, 101, 98)

  def getBundle(uri: URI, createTmp: Boolean): URI = {
    if (createTmp) {
      val tmpFile = Files.createTempFile("demo", ".bundle.zip")
      val file = new File(uri.getPath).toPath
      Files.copy(file, tmpFile, StandardCopyOption.REPLACE_EXISTING)
      tmpFile.toFile.deleteOnExit()
      tmpFile.toUri
    } else {
      uri
    }
  }
} 
Example 127
Source File: FileRepositorySpec.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.executor.repository

import java.io.File
import java.net.URI
import java.nio.file.Files

import ml.combust.mleap.executor.error.BundleException
import ml.combust.mleap.executor.testkit.TestUtil
import org.scalatest.{BeforeAndAfterAll, FunSpec, Matchers}
import org.scalatest.concurrent.ScalaFutures

class FileRepositorySpec extends FunSpec
  with ScalaFutures
  with Matchers
  with BeforeAndAfterAll {
  val repository = new FileRepository()

  override protected def afterAll(): Unit = repository.shutdown()

  describe("downloading a local bundle") {
    it("returns the local file path") {
      val path = repository.downloadBundle(TestUtil.lrUri)

      whenReady(path) {
        p => assert(Files.readAllBytes(new File(TestUtil.lrUri.getPath).toPath).sameElements(Files.readAllBytes(p)))
      }
    }

    it("throws an exception when local file doesn't exist") {
      whenReady(repository.downloadBundle(URI.create("does-not-exist")).failed) {
        ex => ex shouldBe a [BundleException]
      }
    }

    it("throws an exception with empty file path") {
      whenReady(repository.downloadBundle(URI.create("")).failed) {
        ex => ex shouldBe a [BundleException]
      }
    }
  }
} 
Example 128
Source File: TestSpec.scala    From spark-distcp   with Apache License 2.0 5 votes vote down vote up
package com.coxautodata

import java.io.ByteArrayInputStream
import java.nio.file.Files

import com.coxautodata.objects.SerializableFileStatus
import com.coxautodata.utils.FileListing
import org.apache.commons.io.{FileUtils, IOUtils}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, LocalFileSystem, Path}
import org.scalatest.{BeforeAndAfterEach, FunSpec, Matchers}

trait TestSpec extends FunSpec with Matchers with BeforeAndAfterEach {

  var testingBaseDir: java.nio.file.Path = _
  var testingBaseDirName: String = _
  var testingBaseDirPath: Path = _
  var localFileSystem: LocalFileSystem = _

  override def beforeEach(): Unit = {
    super.beforeEach()
    testingBaseDir = Files.createTempDirectory("test_output")
    testingBaseDirName = testingBaseDir.toString
    localFileSystem = FileSystem.getLocal(new Configuration())
    testingBaseDirPath = localFileSystem.makeQualified(new Path(testingBaseDirName))
  }

  override def afterEach(): Unit = {
    super.afterEach()
    FileUtils.deleteDirectory(testingBaseDir.toFile)
  }

  def createFile(relativePath: Path, content: Array[Byte]): SerializableFileStatus = {
    val path = new Path(testingBaseDirPath, relativePath)
    localFileSystem.mkdirs(path.getParent)
    val in = new ByteArrayInputStream(content)
    val out = localFileSystem.create(path)
    IOUtils.copy(in, out)
    in.close()
    out.close()
    SerializableFileStatus(localFileSystem.getFileStatus(path))
  }

  def fileStatusToResult(f: SerializableFileStatus): FileListing = {
    FileListing(f.getPath.toString, if (f.isFile) Some(f.getLen) else None)
  }

} 
Example 129
Source File: JGitSystemReader.scala    From sbt-dynver   with Apache License 2.0 5 votes vote down vote up
package sbtdynver

import java.io.{ File, IOException }
import java.net.{ InetAddress, UnknownHostException }
import java.nio.file.{ Files, InvalidPathException, Path, Paths }

import org.eclipse.jgit.internal.JGitText
import org.eclipse.jgit.lib.{ Config, Constants }
import org.eclipse.jgit.storage.file.FileBasedConfig
import org.eclipse.jgit.util.{ FS, StringUtils, SystemReader }
import org.slf4j.LoggerFactory

// Copy of org.eclipse.jgit.util.SystemReader.Default with:
// * calls to Files.createDirectories guarded by if !Files.isDirectory
//   necessary because my ~/.config is a symlink to a directory
//   which Files.createDirectories isn't happy with
object JGitSystemReader extends SystemReader {
  private val LOG = LoggerFactory.getLogger(getClass)

  lazy val init: Unit = SystemReader.setInstance(this)

  override lazy val getHostname = {
    try InetAddress.getLocalHost.getCanonicalHostName
    catch { case _: UnknownHostException => "localhost" }
  }.ensuring(_ != null)

  override def getenv(variable: String): String = System.getenv(variable)
  override def getProperty(key: String): String = System.getProperty(key)
  override def getCurrentTime: Long             = System.currentTimeMillis
  override def getTimezone(when: Long): Int     = getTimeZone.getOffset(when) / (60 * 1000)

  override def openUserConfig(parent: Config, fs: FS) =
    new FileBasedConfig(parent, new File(fs.userHome, ".gitconfig"), fs)

  override def openSystemConfig(parent: Config, fs: FS): FileBasedConfig = {
    if (StringUtils.isEmptyOrNull(getenv(Constants.GIT_CONFIG_NOSYSTEM_KEY))) {
      val configFile = fs.getGitSystemConfig
      if (configFile != null) return new FileBasedConfig(parent, configFile, fs)
    }
    new FileBasedConfig(parent, null, fs) {
      override def load(): Unit = () // do not load
      override def isOutdated   = false // regular class would bomb here
    }
  }

  override def openJGitConfig(parent: Config, fs: FS): FileBasedConfig = {
    val xdgPath = getXDGConfigHome(fs)
    if (xdgPath != null) {
      var configPath: Path = null
      try {
        configPath = xdgPath.resolve("jgit")
        if (!Files.isDirectory(configPath))
          Files.createDirectories(configPath)
        configPath = configPath.resolve(Constants.CONFIG)
        return new FileBasedConfig(parent, configPath.toFile, fs)
      } catch {
        case e: IOException =>
          LOG.error(JGitText.get.createJGitConfigFailed, configPath: Any, e)
      }
    }
    new FileBasedConfig(parent, new File(fs.userHome, ".jgitconfig"), fs)
  }

  private def getXDGConfigHome(fs: FS): Path = {
    var configHomePath = getenv(Constants.XDG_CONFIG_HOME)
    if (StringUtils.isEmptyOrNull(configHomePath))
      configHomePath = new File(fs.userHome, ".config").getAbsolutePath
    try {
      val xdgHomePath = Paths.get(configHomePath)
      if (!Files.isDirectory(xdgHomePath))
        Files.createDirectories(xdgHomePath)
      xdgHomePath
    } catch {
      case e @ (_: IOException | _: InvalidPathException) =>
        LOG.error(JGitText.get.createXDGConfigHomeFailed, configHomePath: Any, e)
        null
    }
  }
} 
Example 130
Source File: ZSinkPlatformSpecificSpec.scala    From zio   with Apache License 2.0 5 votes vote down vote up
package zio.stream

import java.nio.file.Files

import zio._
import zio.test.Assertion._
import zio.test._

object ZSinkPlatformSpecificSpec extends ZIOBaseSpec {
  override def spec = suite("ZSink JVM")(
    suite("fromFile")(
      testM("writes to an existing file") {
        val data = (0 to 100).mkString

        Task(Files.createTempFile("stream", "fromFile"))
          .bracket(path => Task(Files.delete(path)).orDie) { path =>
            for {
              bytes  <- Task(data.getBytes("UTF-8"))
              length <- ZStream.fromIterable(bytes).run(ZSink.fromFile(path))
              str    <- Task(new String(Files.readAllBytes(path)))
            } yield assert(data)(equalTo(str)) && assert(bytes.length.toLong)(equalTo(length))
          }

      }
    )
  )
} 
Example 131
Source File: PluginMock.scala    From sbt-idea-plugin   with Apache License 2.0 5 votes vote down vote up
package org.jetbrains.sbtidea.download.plugin

import java.net.URI
import java.nio.file.{FileSystems, Files, Path}

import org.jetbrains.sbtidea.TmpDirUtils
import org.jetbrains.sbtidea.packaging.artifact
import org.jetbrains.sbtidea.Keys._
import org.jetbrains.sbtidea.download.plugin.PluginDescriptor.Dependency

import scala.collection.JavaConverters._

trait PluginMock extends TmpDirUtils {

  implicit class PluginMetaDataExt(metadata: PluginDescriptor) {
    def toPluginId: IntellijPlugin.Id = IntellijPlugin.Id(metadata.id, Some(metadata.version), None)
  }

  protected def createPluginJarMock(metaData: PluginDescriptor): Path = {
    val tmpDir = newTmpDir
    val targetPath = tmpDir.resolve(s"${metaData.name}.jar")
    val targetUri = URI.create("jar:" + targetPath.toUri)
    val opts = Map("create" -> "true").asJava
    artifact.using(FileSystems.newFileSystem(targetUri, opts)) { fs =>
      Files.createDirectory(fs.getPath("/", "META-INF"))
      Files.write(
        fs.getPath("/", "META-INF", "plugin.xml"),
        createPluginXmlContent(metaData).getBytes
      )
    }
    targetPath
  }

  protected def createPluginZipMock(metaData: PluginDescriptor): Path = {
    val tmpDir = newTmpDir
    val targetPath = tmpDir.resolve(s"${metaData.name}.zip")
    val targetUri = URI.create("jar:" + targetPath.toUri)
    val opts = Map("create" -> "true").asJava

    val mainPluginJar = createPluginJarMock(metaData)

    artifact.using(FileSystems.newFileSystem(targetUri, opts)) { fs =>
      val libRoot = fs.getPath("/", metaData.name, "lib")
      Files.createDirectories(libRoot)
      Files.copy(
        mainPluginJar,
        libRoot.resolve(mainPluginJar.getFileName.toString)
      )
    }
    targetPath
  }


  protected def createPluginXmlContent(metaData: PluginDescriptor): String = {
    val depStr = metaData.dependsOn.map {
      case Dependency(id, true)  => s"""<depends optional="true">$id</depends>"""
      case Dependency(id, false) => s"<depends>$id</depends>"
    }
    s"""
       |<idea-plugin>
       |  <name>${metaData.name}</name>
       |  <id>${metaData.id}</id>
       |  <version>${metaData.version}</version>
       |  <idea-version since-build="${metaData.sinceBuild}" until-build="${metaData.untilBuild}"/>
       |  ${depStr.mkString("\n")}
       |</idea-plugin>
       |""".stripMargin
  }

} 
Example 132
Source File: IdeaMock.scala    From sbt-idea-plugin   with Apache License 2.0 5 votes vote down vote up
package org.jetbrains.sbtidea.download.idea

import java.net.{URI, URL}
import java.nio.file.{Files, Path, Paths}
import java.util.zip.{ZipEntry, ZipInputStream}

import org.jetbrains.sbtidea.download.BuildInfo
import org.jetbrains.sbtidea.packaging.artifact
import org.jetbrains.sbtidea.{Keys, TmpDirUtils}
import org.jetbrains.sbtidea.Keys._
import org.jetbrains.sbtidea.download.jbr.JbrDependency

trait IdeaMock extends TmpDirUtils {
  protected val IDEA_VERSION      = "192.5728.12"
  protected val IDEA_EDITION      = "IU"
  protected val IDEA_DIST         = s"idea$IDEA_EDITION-$IDEA_VERSION.zip"
  protected val IDEA_DIST_PATH    = s"/org/jetbrains/sbtidea/download/$IDEA_DIST"
  protected val IDEA_BUILDINFO: BuildInfo =
    BuildInfo(IDEA_VERSION, Keys.IntelliJPlatform.IdeaUltimate, Some(JbrDependency.VERSION_AUTO))
  protected val IDEA_DEP: IdeaDependency  = IdeaDependency(IDEA_BUILDINFO)
  protected val IDEA_ART: IdeaDist        = IdeaDistImpl(IDEA_DEP, new URL("file:"))

  protected val bundledPlugins: List[Keys.IntellijPlugin] =
    "org.jetbrains.plugins.yaml".toPlugin ::
    "com.intellij.properties".toPlugin :: Nil

  protected def installIdeaMock: Path = {
    val tmpDir      = newTmpDir
    val installDir  = Files.createDirectory(tmpDir.resolve(IDEA_VERSION))
    val stream      = getClass.getResourceAsStream(IDEA_DIST_PATH)
    artifact.using(new ZipInputStream(stream)) { zip =>
      var entry: ZipEntry = zip.getNextEntry
      while (entry != null) {
        val toPath = installDir.resolve(entry.getName)
        if (entry.isDirectory)
          Files.createDirectory(toPath)
        else
          Files.copy(zip, toPath)
        entry = zip.getNextEntry
      }
    }
    installDir
  }

  protected def getDistCopy: Path = Files.copy(getIdeaDistMockPath, newTmpDir.resolve(IDEA_DIST))

  protected def getIdeaDistMockURI: URI = getClass.getResource(IDEA_DIST_PATH).toURI

  protected def getIdeaDistMockPath: Path = Paths.get(getIdeaDistMockURI)
} 
Example 133
Source File: DynamicDistBuilder.scala    From sbt-idea-plugin   with Apache License 2.0 5 votes vote down vote up
package org.jetbrains.sbtidea.packaging.artifact

import java.nio.file.{Files, Path, Paths, StandardCopyOption}

import org.jetbrains.sbtidea.packaging.PackagingKeys.ExcludeFilter
import org.jetbrains.sbtidea.packaging._
import sbt.File
import sbt.Keys.TaskStreams

class DynamicDistBuilder(stream: TaskStreams, target: File, outputDir: File, private val hints: Seq[File]) extends DistBuilder(stream, target) {

  override def packageJar(to: Path, mappings: Mappings): Unit = {
    val isStatic = mappings.forall(_.metaData.static)
    if (isStatic)
      super.packageJar(to, mappings)
    else {
      val newOutputPath = outputDir.toPath.resolve("classes")
      if (!Files.exists(newOutputPath) || hints.isEmpty)
        packageNoHints(newOutputPath, mappings)
      else
        packageUsingHints(newOutputPath)
    }
  }

  private def packageUsingHints(newOutputPath: Path): Unit = {
    timed(s"Using ${hints.size} hints from previous compilation: $newOutputPath", {
      val key = "classes"
      val offset = key.length + 1
      for (hint <- hints) {
        val hintStr = hint.toString
        val relativisedStr = hintStr.substring(hintStr.indexOf(key) + offset)
        val newRelativePath = Paths.get(relativisedStr)
        val newAbsolutePath = newOutputPath.resolve(newRelativePath)
        if (newAbsolutePath.toFile.getParentFile == null || !newAbsolutePath.toFile.getParentFile.exists())
          Files.createDirectories(newAbsolutePath.getParent)
        Files.copy(hint.toPath, newAbsolutePath, StandardCopyOption.REPLACE_EXISTING)
      }
    })
  }

  private def packageNoHints(newOutputPath: Path, mappings: Mappings): Unit = {
    val packager = new DynamicPackager(newOutputPath, new NoOpClassShader, ExcludeFilter.AllPass, incrementalCache)
    timed(s"classes(${mappings.size}): $newOutputPath",
      packager.mergeIntoOne(mappings.map(_.from.toPath))
    )
  }

  override def patch(to: Path, mappings: Mappings): Unit = {
    streams.log.info(s"Patching has no effect when building dynamic artifact")
  }

} 
Example 134
Source File: IncrementalCache.scala    From sbt-idea-plugin   with Apache License 2.0 5 votes vote down vote up
package org.jetbrains.sbtidea.packaging.artifact

import java.io.{BufferedOutputStream, ByteArrayInputStream, ObjectInputStream, ObjectOutputStream}
import java.nio.file.{Files, Path}

import sbt.Keys.TaskStreams

import scala.collection.mutable

trait IncrementalCache extends AutoCloseable {
  def fileChanged(in: Path): Boolean
}

class DumbIncrementalCache extends IncrementalCache {
  override def fileChanged(in: Path): Boolean = true
  override def close(): Unit = ()
}

class PersistentIncrementalCache(private val root: Path)(implicit private val streams: TaskStreams) extends IncrementalCache {

  private val FILENAME = "sbtidea.cache"
  private val myFile   = root.resolve(FILENAME)
  private val myData   = loadOrCreate()

  type Data = mutable.HashMap[String, Long]

  private def loadFromDisk(): Either[String, Data] = {
    if (!Files.exists(myFile) || Files.size(myFile) <= 0)
      return Left("Cache file is empty or doesn't exist")
    val data = Files.readAllBytes(myFile)
    using(new ObjectInputStream(new ByteArrayInputStream(data))) { stream =>
      Right(stream.readObject().asInstanceOf[Data])
    }
  }

  private def loadOrCreate(): Data = loadFromDisk() match {
    case Left(message) =>
      streams.log.info(message)
      new Data()
    case Right(value) => value
  }

  private def saveToDisk(): Unit = {
    import java.nio.file.StandardOpenOption._
    if (!Files.exists(myFile.getParent)) {
      Files.createDirectories(myFile.getParent)
      Files.createFile(myFile)
    }
    using(new ObjectOutputStream(
          new BufferedOutputStream(
            Files.newOutputStream(myFile, CREATE, WRITE, TRUNCATE_EXISTING)))) { stream =>
      stream.writeObject(myData)
    }
  }

  override def close(): Unit = saveToDisk()

  override def fileChanged(in: Path): Boolean = {
    val newTimestamp = Files.getLastModifiedTime(in).toMillis
    val inStr = in.toString
    val lastTimestamp = myData.getOrElseUpdate(inStr, newTimestamp)
    val result = newTimestamp > lastTimestamp
    myData.put(inStr, newTimestamp)
    result
  }
} 
Example 135
Source File: DynamicPackager.scala    From sbt-idea-plugin   with Apache License 2.0 5 votes vote down vote up
package org.jetbrains.sbtidea.packaging.artifact

import java.nio.file.{FileSystem, Files, Path}

import org.jetbrains.sbtidea.packaging.PackagingKeys.ExcludeFilter._
import sbt.Keys.TaskStreams

class DynamicPackager(myOutput: Path,
                      shader: ClassShader,
                      excludeFilter: ExcludeFilter,
                      incrementalCache: IncrementalCache)
                     (implicit private val streams: TaskStreams)
  extends SimplePackager(myOutput, shader, excludeFilter, incrementalCache) {

  override protected def outputExists(path: Path): Boolean = Files.exists(path)

  override protected def createOutputFS(output: Path): FileSystem = {
    if (!output.toFile.exists())
      Files.createDirectories(output)
    output.getFileSystem
  }

  override protected def createOutput(srcPath: Path, output: Path, outputFS: FileSystem): Path = {
    val srcTranslated = translatePath(srcPath, myOutput.getFileSystem)
    if (srcPath.toString.contains("META-INF"))
      myOutput.getParent.resolve(srcTranslated)
    else myOutput.resolve(srcTranslated)
  }

  private def translatePath(path: Path, toFS: FileSystem) = {
    val pathFS = path.getFileSystem
    val pathSeparator = pathFS.getSeparator
    val toSeparator = toFS.getSeparator
    val adapted = path.toString.replace(pathSeparator, toSeparator)
    toFS.getPath(adapted)
  }

} 
Example 136
Source File: ClassShader.scala    From sbt-idea-plugin   with Apache License 2.0 5 votes vote down vote up
package org.jetbrains.sbtidea.packaging.artifact

import java.nio.file.{Files, Path, StandardOpenOption}

import org.jetbrains.sbtidea.packaging.PackagingKeys.ShadePattern
import org.pantsbuild.jarjar.{NiceJJProcessor, _}
import org.pantsbuild.jarjar.util.EntryStruct
import sbt.Keys.TaskStreams

class ClassShader(patterns: Seq[ShadePattern])(implicit val streams: TaskStreams) {

  private val processor = new NiceJJProcessor(patterns.map {
    case ShadePattern(pat, res) =>
      val jRule = new Rule()
      jRule.setPattern(pat)
      jRule.setResult(res)
      jRule
  })

  private val entry = new EntryStruct

  if (streams!=null)
    streams.log.info(s"Initialized shader with ${patterns.size} patterns")

  def applyShading(from: Path, to: Path)(cont: => Unit): Unit = {
    entry.data = Files.readAllBytes(from)
    entry.name = from.toString.substring(1).replace('\\', '/') // leading '/' cannot be used in ZFS also fix class names produced under windows
    entry.time = -1
    if (processor.process(entry)) {
      val newPath = to.getFileSystem.getPath(entry.name)
      val parent = newPath.getParent
      if (parent != null && !Files.exists(parent))
        Files.createDirectories(parent)
      Files.write(newPath, entry.data, StandardOpenOption.CREATE)
    }
  }

}

class NoOpClassShader() extends ClassShader(Seq())(null) {
  override def applyShading(from: Path, to: Path)(cont: => Unit): Unit = cont
} 
Example 137
Source File: apiAdapter.scala    From sbt-idea-plugin   with Apache License 2.0 5 votes vote down vote up
package sbt.jetbrains.ideaPlugin

import java.io.InputStream
import java.nio.file.{Files, Path}

import sbt.File
import sbt.inc._
import java.util.Optional

object apiAdapter {
  type CompileResult = sbt.inc.Analysis
  type BuildDependencies = sbt.BuildDependencies
  val Using = sbt.Using
  def projectJarName(project: sbt.Project): String = s"${project.id}.jar"
  def extractAffectedFiles(initialTimestamp: Long, result: Seq[CompileResult]): Seq[File] = {
    def processCompilation(compileResult: CompileResult): Seq[File] = {
      val lastCompilation = compileResult.compilations.allCompilations.find(_.startTime() >= initialTimestamp).getOrElse(return Seq.empty)
      val startTime       = lastCompilation.startTime()
      val res = compileResult.stamps.products.collect {
        case (f, s:LastModified) if s.value >= startTime => f
      }.toSeq
      res
    }
    val res = result.flatMap(processCompilation)
    res
  }

  object SbtCompilationBackCompat {
    type Analysis         = sbt.inc.Analysis
    type Relations        = sbt.inc.Relations
    type CompileResult    = sbt.Compiler.CompileResult
    type CompileAnalysis  = sbt.inc.Analysis
    type PreviousResult   = sbt.Compiler.PreviousAnalysis
    type ClassFileManager = sbt.inc.ClassfileManager
    type IncOptions       = sbt.inc.IncOptions

    val Analysis = sbt.inc.Analysis

    implicit class CompileResultExt(val result: PreviousResult) extends AnyVal {
      def getAnalysis: Optional[CompileAnalysis] = Optional.of(result.analysis)
    }

    implicit class IncOptionsExt(val options: IncOptions) extends AnyVal {
      def withClassfileManager(manager: ClassFileManager): IncOptions =
        options.withNewClassfileManager(() => manager)
    }

    object PreviousResult {
      def empty(): PreviousResult =
        sbt.Compiler.PreviousAnalysis(Analysis.Empty, None)
    }
  }

  final class PathExt(val path: Path) extends AnyVal {
    import scala.collection.JavaConverters.asScalaIteratorConverter

    def /(string: String): Path = path.resolve(string)
    def list: Seq[Path] = Files.list(path).iterator().asScala.toSeq
    def exists: Boolean = Files.exists(path)
    def isDir: Boolean = Files.isDirectory(path)
    def inputStream: InputStream = Files.newInputStream(path)
  }

} 
Example 138
Source File: apiAdapter.scala    From sbt-idea-plugin   with Apache License 2.0 5 votes vote down vote up
package sbt.jetbrains.ideaPlugin

import java.io.InputStream
import java.nio.file.{Files, Path}
import java.util.Optional

object apiAdapter {
  type CompileResult = Any
  val Using = sbt.io.Using
  type BuildDependencies = sbt.internal.BuildDependencies

  def projectJarName(project: sbt.Project): String = s"${project.id}.jar"
  def extractAffectedFiles(result: CompileResult): Seq[sbt.File] = Seq.empty

  object SbtCompilationBackCompat {
    type Analysis         = sbt.internal.inc.Analysis
    type Relations        = sbt.internal.inc.Relations
    type CompileResult    = xsbti.compile.CompileResult
    type CompileAnalysis  = xsbti.compile.CompileAnalysis
    type PreviousResult   = xsbti.compile.PreviousResult
    type ClassFileManager = xsbti.compile.ClassFileManager
    type IncOptions       = xsbti.compile.IncOptions

    val Analysis = sbt.internal.inc.Analysis

    implicit class CompileResultExt(val result: PreviousResult) extends AnyVal {
      def getAnalysis: Optional[CompileAnalysis] = result.analysis()
    }

    implicit class IncOptionsExt(val options: IncOptions) extends AnyVal {
      def withClassfileManager(manager: ClassFileManager): IncOptions =
        options.withExternalHooks(options.externalHooks().withExternalClassFileManager(manager))
    }

    object PreviousResult {
      def empty(): PreviousResult =
        xsbti.compile.PreviousResult.create(Optional.empty(), Optional.empty())
    }
  }

  // / method is missing because it's already implemented in sbt 1.3 PathOps
  final class PathExt(val path: Path) extends AnyVal {
    import scala.collection.JavaConverters.asScalaIteratorConverter

    def list: Seq[Path] = Files.list(path).iterator().asScala.toSeq
    def exists: Boolean = Files.exists(path)
    def isDir: Boolean = Files.isDirectory(path)
    def inputStream: InputStream = Files.newInputStream(path)
  }
} 
Example 139
Source File: Enqueue.scala    From elastiknn   with Apache License 2.0 5 votes vote down vote up
package com.klibisz.elastiknn.benchmarks

import java.io.File
import java.nio.file.Files

import com.klibisz.elastiknn.benchmarks.codecs._
import io.circe.syntax._
import org.apache.commons.codec.digest.DigestUtils
import zio._
import zio.blocking.Blocking
import zio.console._

import scala.util.Random


object Enqueue extends App {

  case class Params(datasetsFilter: Set[String] = Set.empty,
                    file: File = new File("/tmp/hashes.txt"),
                    experimentsBucket: String = "",
                    experimentsPrefix: String = "",
                    s3Minio: Boolean = false)

  private val parser = new scopt.OptionParser[Params]("Build a list of benchmark jobs") {
    override def showUsageOnError: Option[Boolean] = Some(true)
    help("help")
    opt[Seq[String]]("datasetsFilter")
      .unbounded()
      .action((s, c) => c.copy(datasetsFilter = s.map(_.toLowerCase).toSet))
    opt[String]("experimentsBucket")
      .action((x, c) => c.copy(experimentsBucket = x))
    opt[String]("experimentsPrefix")
      .action((x, c) => c.copy(experimentsPrefix = x))
    opt[String]("file")
      .action((s, c) => c.copy(file = new File(s)))
    opt[Boolean]("s3Minio")
      .action((x, c) => c.copy(s3Minio = x))
  }

  override def run(args: List[String]): URIO[Console, ExitCode] = parser.parse(args, Params()) match {
    case Some(params) =>
      val experiments =
        if (params.datasetsFilter.isEmpty) Experiment.defaults
        else Experiment.defaults.filter(e => params.datasetsFilter.contains(e.dataset.name.toLowerCase))
      val s3Client = if (params.s3Minio) S3Utils.minioClient() else S3Utils.defaultClient()
      val layer = Blocking.live ++ Console.live
      val logic: ZIO[Console with Blocking, Throwable, Unit] = for {
        blocking <- ZIO.access[Blocking](_.get)
        hashesAndEffects = experiments.map { exp =>
          val body = exp.asJson.noSpaces
          val hash = DigestUtils.md5Hex(body).toLowerCase
          val key = s"${params.experimentsPrefix}/$hash.json"
          hash -> blocking.effectBlocking(s3Client.putObject(params.experimentsBucket, key, body))
        }
        _ <- putStrLn(s"Saving ${hashesAndEffects.length} experiments to S3")
        _ <- ZIO.collectAllParN(10)(hashesAndEffects.map(_._2))
        jsonListOfHashes = new Random(0).shuffle(hashesAndEffects).map(_._1).asJson.noSpaces
        _ <- blocking.effectBlocking(Files.writeString(params.file.toPath, jsonListOfHashes))
      } yield ()
      logic.provideLayer(layer).exitCode
    case None => sys.exit(1)
  }
} 
Example 140
Source File: DiskMetricsSelectorSpec.scala    From NSDb   with Apache License 2.0 5 votes vote down vote up
package io.radicalbit.nsdb.cluster.metrics

import java.nio.file.{Files, Paths}

import akka.actor.Address
import akka.cluster.metrics.StandardMetrics._
import akka.cluster.metrics.{Metric, NodeMetrics}
import io.radicalbit.nsdb.cluster.metrics.NSDbMetrics._
import org.scalatest.{Matchers, WordSpec}
import org.scalatest.OptionValues._

class DiskMetricsSelectorSpec extends WordSpec with Matchers {

  val emptyNode      = Address("nsdb", "NSDb", "emptyNode", 2552)
  val almostFullNode = Address("nsdb", "NSDb", "node1", 2552)
  val node2          = Address("nsdb", "NSDb", "node2", 2552)
  val node3          = Address("nsdb", "NSDb", "node3", 2552)
  val node4          = Address("nsdb", "NSDb", "node4", 2552)
  val realNode       = Address("nsdb", "NSDb", "real", 2552)

  val fs = Files.getFileStore(Paths.get("."))

  val nodeMetrics1 = NodeMetrics(
    almostFullNode,
    System.currentTimeMillis,
    Set(
      Metric.create(DiskTotalSpace, 1000000, None),
      Metric.create(DiskFreeSpace, 100, None),
      Metric.create(HeapMemoryMax, 512, None),
      Metric.create(CpuCombined, 0.2, None),
      Metric.create(CpuStolen, 0.1, None),
      Metric.create(SystemLoadAverage, 0.5, None),
      Metric.create(Processors, 8, None)
    ).flatten
  )

  val emptyNodeMetric = NodeMetrics(
    emptyNode,
    System.currentTimeMillis,
    Set(Metric.create(DiskTotalSpace, 1000000, None), Metric.create(DiskFreeSpace, 0, None)).flatten
  )

  val nodeMetrics2 = NodeMetrics(
    node2,
    System.currentTimeMillis,
    Set(Metric.create(DiskTotalSpace, 1000000, None), Metric.create(DiskFreeSpace, 750000, None)).flatten
  )

  val nodeMetrics3 = NodeMetrics(
    node3,
    System.currentTimeMillis,
    Set(Metric.create(DiskTotalSpace, 1000000, None), Metric.create(DiskFreeSpace, 1000000, None)).flatten
  )

  val nodeMetrics4 = NodeMetrics(
    node4,
    System.currentTimeMillis,
    Set()
  )

  val realNodeMetrics = NodeMetrics(
    realNode,
    System.currentTimeMillis,
    Set(Metric.create(DiskTotalSpace, fs.getTotalSpace, None), Metric.create(DiskFreeSpace, fs.getUsableSpace, None)).flatten
  )

  val nodeMetrics = Set(emptyNodeMetric, nodeMetrics1, nodeMetrics2, nodeMetrics3, nodeMetrics4, realNodeMetrics)

  "DiskMetricsSelector" must {
    "calculate capacity of heap metrics" in {
      val capacity = DiskMetricsSelector.capacity(nodeMetrics)
      capacity.get(emptyNode) shouldBe Some(0.0)
      capacity.get(almostFullNode) shouldBe Some(0.0001)
      capacity.get(node2) shouldBe Some(0.75)
      capacity.get(node3) shouldBe Some(1)
      capacity.get(node4) shouldBe None
      //for a real node the capacity must be between 0 and 1. There's no way to estimate a reasonable capacity value and mocking is not the point here
      capacity.get(realNode).value shouldBe >(0.0)
      capacity.get(realNode).value shouldBe <(1.0)
    }
  }

} 
Example 141
Source File: IgnoreAllSnapshots.scala    From c4proto   with Apache License 2.0 5 votes vote down vote up
package ee.cone.c4gate_server

import java.nio.file.{Files, Path, Paths}
import java.util.UUID

import com.typesafe.scalalogging.LazyLogging
import ee.cone.c4actor._
import ee.cone.c4di.c4

@c4("IgnoreAllSnapshotsApp") final class IgnoreAllSnapshots(
  toUpdate: ToUpdate,
  consuming: Consuming,
  factory: SnapshotSaverImplFactory,
  baseDir: DataDir,
) extends Executable with LazyLogging {
  private def ignoreTheSamePath(path: Path): Unit = ()
  def run(): Unit = {
    val endOffset = consuming.process("0" * OffsetHexSize(), _.endOffset)
    val subDir = "snapshots"
    val path = Paths.get(baseDir.value).resolve(subDir)
    if(Files.exists(path))
      ignoreTheSamePath(Files.move(path,path.resolveSibling(s"$subDir.${UUID.randomUUID()}.bak")))
    val (bytes, headers) = toUpdate.toBytes(Nil)
    // val saver = snapshotSavers.full
    val saver = factory.create(subDir)
    val rawSnapshot = saver.save(endOffset, bytes, headers)
    logger.info(s"EMPTY snapshot was saved: ${rawSnapshot.relativePath}")

  }
} 
Example 142
Source File: PurgerImpl.scala    From c4proto   with Apache License 2.0 5 votes vote down vote up
package ee.cone.c4gate_server

import java.nio.file.{Files, Path, Paths}
import java.time.Instant

import com.typesafe.scalalogging.LazyLogging
import ee.cone.c4actor.QProtocol.S_Firstborn
import ee.cone.c4actor.{Context, SleepUntilKey, TxTransform, WithPK}
import ee.cone.c4actor.Types.SrcId
import ee.cone.c4assemble.Types.{Each, Values}
import ee.cone.c4assemble.{Assemble, assemble, c4assemble}
import ee.cone.c4di.c4

object PurgerDefaultPolicy {
  def apply(): List[KeepPolicy] = {
    val millis = 1L
    val hour = 60L * 60L * 1000L * millis
    val day = 24L * hour
    val week = 7L * day
    List(KeepPolicy(millis, 8), KeepPolicy(hour, 23), KeepPolicy(day, 14), KeepPolicy(week, 14))
  }
}

case class KeepPolicy(period: Long, count: Int)

case class TimedPath(path: Path, mTime: Long)

trait Purger {
  def process(keepPolicyList: List[KeepPolicy]): Unit
}

@c4("SnapshotMakingApp") final class PurgerImpl(
  lister: SnapshotLister, baseDir: DataDir
) extends Purger with LazyLogging {
  def process(keepPolicyList: List[KeepPolicy]): Unit = {
    val files: List[TimedPath] = lister.list.map { snapshot =>
      val path = Paths.get(baseDir.value).resolve(snapshot.raw.relativePath)
      TimedPath(path, Files.getLastModifiedTime(path).toMillis)
    }
    val keepPaths = (for {
      keepPolicy <- keepPolicyList
      keepFile <- files.groupBy(file => file.mTime / keepPolicy.period).values
        .map(_.maxBy(_.mTime)).toList.sortBy(_.mTime).takeRight(keepPolicy.count)
    } yield keepFile.path).toSet

    for {
      path <- files.map(_.path).filterNot(keepPaths)
    } {
      if(Files.deleteIfExists(path)) logger.info(s"removed $path")
    }
    logger.debug("snapshots checked")
  }
}

case class PurgerTx(
  srcId: SrcId, keepPolicyList: List[KeepPolicy]
)(purger: Purger) extends TxTransform {
  def transform(local: Context): Context = {
    purger.process(keepPolicyList)
    SleepUntilKey.set(Instant.now.plusSeconds(60L))(local)
  }
}

@c4assemble("SnapshotMakingApp") class PurgerAssembleBase(purger: Purger)   {
  def joinPurger(
    key: SrcId,
    first: Each[S_Firstborn]
  ): Values[(SrcId,TxTransform)] =
    List(WithPK(PurgerTx("purger",PurgerDefaultPolicy())(purger)))
} 
Example 143
Source File: ProgressObserverImpl.scala    From c4proto   with Apache License 2.0 5 votes vote down vote up
package ee.cone.c4actor

import java.lang.management.ManagementFactory
import java.nio.charset.StandardCharsets.UTF_8
import java.nio.file.{Files, Path, Paths}
import java.time.Instant
import java.util.UUID

import com.typesafe.scalalogging.LazyLogging
import ee.cone.c4actor.QProtocol.S_Firstborn
import ee.cone.c4actor.Types.{NextOffset, SrcId}
import ee.cone.c4assemble.Types.{Each, Values}
import ee.cone.c4assemble.{Single, c4assemble}
import ee.cone.c4di.c4

import scala.annotation.tailrec
import scala.concurrent.Future

@c4("ServerCompApp") final class ProgressObserverFactoryImpl(
  inner: TxObserver, config: ListConfig,
  execution: Execution, getToStart: DeferredSeq[Executable]
) extends ProgressObserverFactory {
  def create(endOffset: NextOffset): Observer[RichContext] = {
    val lateExObserver: Observer[RichContext]  = new LateExecutionObserver(execution,getToStart.value,inner.value)
    val readyObserver = Single.option(config.get("C4ROLLING")).fold(lateExObserver)(path=>
      new ReadyObserverImpl(lateExObserver, Paths.get(path), 0L)
    )
    new ProgressObserverImpl(readyObserver,endOffset)
  }
}

// states:
//   loading
//   loading ready
//   master
// trans:
//   loading -> loading
//   loading -> loading ready
//   loading ready -> loading ready
//   loading ready -> master

class ProgressObserverImpl(inner: Observer[RichContext], endOffset: NextOffset, until: Long=0) extends Observer[RichContext] with LazyLogging {
  def activate(rawWorld: RichContext): Observer[RichContext] =
    if (rawWorld.offset < endOffset) {
      val now = System.currentTimeMillis
      if(now < until) this else {
        logger.debug(s"loaded ${rawWorld.offset}/$endOffset")
        new ProgressObserverImpl(inner, endOffset, now+1000)
      }
    } else {
      logger.info(s"Stats OK -- loaded ALL/$endOffset -- uptime ${ManagementFactory.getRuntimeMXBean.getUptime}ms")
      inner.activate(rawWorld)
    }
}

class ReadyObserverImpl(inner: Observer[RichContext], path: Path, until: Long=0) extends Observer[RichContext] with LazyLogging {
  private def ignoreTheSamePath(path: Path): Unit = ()
  def activate(rawWorld: RichContext): Observer[RichContext] = {
    if(until == 0) ignoreTheSamePath(Files.write(path.resolve("c4is-ready"),Array.empty[Byte]))
    val now = System.currentTimeMillis
    if(now < until) this
    else if(Files.exists(path.resolve("c4is-master"))) {
      logger.info(s"becoming master")
      inner.activate(rawWorld)
    } else {
      logger.debug(s"ready/waiting")
      new ReadyObserverImpl(inner, path, now+1000)
    }
  }

}


@c4("ServerCompApp") final class LocalElectorDeath(config: ListConfig, execution: Execution) extends Executable with Early {
  def run(): Unit =
    for(path <- config.get("C4ELECTOR_PROC_PATH")) iteration(Paths.get(path))
  @tailrec private def iteration(path: Path): Unit = {
    if(Files.notExists(path)) execution.complete()
    Thread.sleep(1000)
    iteration(path)
  }
}

////

@c4("ServerCompApp") final class ServerExecutionFilter(inner: ExecutionFilter)
  extends ExecutionFilter(e=>inner.check(e) && e.isInstanceOf[Early])

class LateExecutionObserver(
  execution: Execution, toStart: Seq[Executable], inner: Observer[RichContext]
) extends Observer[RichContext] with LazyLogging {
  def activate(world: RichContext): Observer[RichContext] = {
    logger.info(s"tracking ${toStart.size} late services")
    toStart.filterNot(_.isInstanceOf[Early]).foreach(f => execution.fatal(Future(f.run())(_)))
    inner.activate(world)
  }
} 
Example 144
Source File: SnapshotRemoteImpl.scala    From c4proto   with Apache License 2.0 5 votes vote down vote up
package ee.cone.c4actor

import java.net.{URLDecoder, URLEncoder}
import java.nio.file.{Files, Paths}
import java.nio.charset.StandardCharsets.UTF_8

import ee.cone.c4di.c4

@c4("ConfigSimpleSignerApp") final class SimpleSignerImpl(
  config: Config, idGenUtil : IdGenUtil
)(
  fileName: String = config.get("C4AUTH_KEY_FILE")
)(
  val salt: String = new String(Files.readAllBytes(Paths.get(fileName)),UTF_8)
) extends SimpleSigner {
  def sign(data: List[String], until: Long): String = {
    val uData = until.toString :: data
    val hash = idGenUtil.srcIdFromStrings(salt :: uData:_*)
    (hash :: uData).map(URLEncoder.encode(_,"UTF-8")).mkString("=")
  }

  def retrieve(check: Boolean): Option[String]=>Option[List[String]] = _.flatMap{ signed =>
    val hash :: untilStr :: data = signed.split("=").map(URLDecoder.decode(_,"UTF-8")).toList
    val until = untilStr.toLong
    if(!check) Option(data)
    else if(until < System.currentTimeMillis) None
    else if(sign(data,until) == signed) Option(data)
    else None
  }
}

@c4("TaskSignerApp") final class SnapshotTaskSignerImpl(inner: SimpleSigner)(
  val url: String = "/need-snapshot"
) extends SnapshotTaskSigner {
  def sign(task: SnapshotTask, until: Long): String = inner.sign(List(url,task.name) ++ task.offsetOpt, until)
  def retrieve(check: Boolean): Option[String]=>Option[SnapshotTask] =
    signed => inner.retrieve(check)(signed) match {
      case Some(Seq(`url`,"next")) => Option(NextSnapshotTask(None))
      case Some(Seq(`url`,"next", offset)) => Option(NextSnapshotTask(Option(offset)))
      case Some(Seq(`url`,"debug", offset)) => Option(DebugSnapshotTask(offset))
      case _ => None
    }
} 
Example 145
Source File: Logger.scala    From c4proto   with Apache License 2.0 5 votes vote down vote up
package ee.cone.c4actor_logback_impl

import java.io.ByteArrayInputStream
import java.nio.file.{Files, Path, Paths}
import java.nio.charset.StandardCharsets.UTF_8

import ch.qos.logback.classic.LoggerContext
import ch.qos.logback.classic.joran.JoranConfigurator
import com.typesafe.scalalogging.LazyLogging
import ee.cone.c4actor._
import ee.cone.c4di.c4
import org.slf4j.LoggerFactory

import scala.annotation.tailrec

@c4("BasicLoggingApp") final class LoggerTest extends Executable with Early with LazyLogging {
  def run(): Unit = if(Option(System.getenv("C4LOGBACK_TEST")).nonEmpty) iteration(0L)
  @tailrec private def iteration(v: Long): Unit = {
    Thread.sleep(1000)
    logger.warn(s"logger test $v")
    logger.debug(s"logger test $v")
    iteration(v+1L)
  }
}

@c4("BasicLoggingApp") final class DefLoggerConfigurator(
  config: ListConfig,
  catchNonFatal: CatchNonFatal
) extends LoggerConfigurator(
  config.get("C4LOGBACK_XML").map(Paths.get(_)) ::: Paths.get("/tmp/logback.xml") :: Nil,
  catchNonFatal,
  5000
) with Executable with Early

class LoggerConfigurator(paths: List[Path], catchNonFatal: CatchNonFatal, scanPeriod: Long) extends Executable {
  def run(): Unit = iteration("")
  @tailrec private def iteration(wasContent: String): Unit = {
    val content =
      s"""
      <configuration>
        <statusListener class="ch.qos.logback.core.status.NopStatusListener" />
        ${paths.map(path=>if(Files.exists (path)) new String(Files.readAllBytes(path), UTF_8) else "").mkString}
        <appender name="CON" class="ch.qos.logback.core.ConsoleAppender">
          <encoder><pattern>%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n</pattern></encoder>
        </appender>
        <appender name="ASYNСCON" class="ch.qos.logback.classic.AsyncAppender">
          <discardingThreshold>0</discardingThreshold>
          <queueSize>1000000</queueSize>
          <appender-ref ref="CON" />
        </appender>
        <root level="INFO">
          <appender-ref ref="ASYNСCON" />
        </root>
        <shutdownHook/>
      </configuration>
      """
    if(wasContent != content) reconfigure(content)
    Thread.sleep(scanPeriod)
    iteration(content)
  }
  def reconfigure(content: String): Unit = catchNonFatal{
    println("logback reconfigure 2 started")
    val context = LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
    val configurator = new JoranConfigurator()
    configurator.setContext(context)
    context.reset()
    configurator.doConfigure(new ByteArrayInputStream(content.getBytes(UTF_8)))
    println("logback reconfigure 2 ok")
  }("reconfigure"){ e => () }
} 
Example 146
Source File: SnapshotParser.scala    From c4proto   with Apache License 2.0 5 votes vote down vote up
package ee.cone.c4actor.tests

import java.nio.file.{Files, Paths}

import ee.cone.c4actor._
import ee.cone.c4actor_kafka_impl.LZ4DeCompressorApp
import ee.cone.c4proto.ToByteString
import okio.ByteString

//C4STATE_TOPIC_PREFIX=ee.cone.c4actor.tests.SnapshotParserApp sbt ~'c4actor-extra-examples/runMain ee.cone.c4actor.ServerMain'

class SnapshotParser(execution: Execution, toUpdate: ToUpdate, snapshotLoader: SnapshotLoader, qAdapterRegistry: QAdapterRegistry) extends Executable {
  def run(): Unit = {
    println(new java.io.File(".").getCanonicalPath)
    val hashFromData = SnapshotUtilImpl.hashFromData(Files.readAllBytes(Paths.get("/c4db/home/c4proto/c4actor-extra-examples/0000000000000000-92b87c05-294d-3c1d-b443-fb83bdc71d20-c-lz4")))
    println(hashFromData)
    val fromName = SnapshotUtilImpl.hashFromName(RawSnapshot("0000000000000000-92b87c05-294d-3c1d-b443-fb83bdc71d20-c-lz4")).get.uuid
    println(hashFromData, fromName)
    val sn = snapshotLoader.load(RawSnapshot("0000000000000000-92b87c05-294d-3c1d-b443-fb83bdc71d20-c-lz4"))
    val updates = toUpdate.toUpdates(sn.toList)
    println(updates.filter(_.flags != 0L).mkString("\n"))
    execution.complete()
  }
}

class SnapshotParserApp
  extends ToStartApp
    with VMExecutionApp
    with ExecutableApp
    with RichDataApp
    with EnvConfigApp
    with LZ4DeCompressorApp
{
  lazy val loader = new RawSnapshotLoader {
    def load(snapshot: RawSnapshot): ByteString = {
      val path = Paths.get(config.get("C4DATA_DIR")).resolve(snapshot.relativePath)
      ToByteString(Files.readAllBytes(path))
    }
  }

  override def toStart: List[Executable] = new SnapshotParser(execution, toUpdate, new SnapshotLoaderImpl(loader), qAdapterRegistry) :: super.toStart
  def assembleProfiler: AssembleProfiler = NoAssembleProfiler
} 
Example 147
Source File: ConfigValues.scala    From c4proto   with Apache License 2.0 5 votes vote down vote up
package ee.cone.c4actor

import java.lang.Math.toIntExact
import java.nio.charset.StandardCharsets.UTF_8
import java.nio.file.{Files, Path, Paths}

import ee.cone.c4actor.Types.SrcId
import ee.cone.c4assemble.Single

trait SrcIdValue {def srcId: SrcId}
trait StringValue {def value: String}
case class StringConstant(value: String) extends StringValue

trait ControversialBooleanConversion {
  def convert: String => Boolean = v => v.trim.toLowerCase match {
    case "" | "0" | "false" => false
    case _ => true
  }
}

trait BooleanValue {
  def value: Boolean
}
class BooleanTrue extends BooleanValue{def value = true}
class BooleanFalse extends BooleanValue{def value = false}

trait IntValue {def value: Int}
case class IntConstant(value: Int) extends IntValue

abstract class ConfigStringOptValue(envName: String, default: () => String = () => "") extends StringValue {
  def config: ListConfig
  def value: String = Single.option(config.get(envName)).getOrElse(default())
}
abstract class ConfigIntOptValue(envName: String, default: () => Int = () => 0) extends IntValue {
  def config: ListConfig
  def value: Int = Single.option(config.get(envName)).fold(default())(s=>toIntExact(s.toLong))
}
abstract class ConfigBooleanOptValue(envName: String, default: () => Boolean = () => false) extends BooleanValue with ControversialBooleanConversion {
  def config: ListConfig
  def value: Boolean = Single.option(config.get(envName)).fold(default())(convert)
}

abstract class ConfigStringValue(envName: String) extends StringValue{
  def config: Config
  def value: String = config.get(envName)
}
abstract class ConfigFileConfig(envName: String) extends StringValue{
  def config: Config
  def value: String = read(Paths.get(config.get(envName))).trim
  private def read(path: Path) = new String(Files.readAllBytes(path),UTF_8)
}
abstract class ConfigIntValue(envName: String) extends IntValue{
  def config: Config
  def value: Int = toIntExact(config.get(envName).toLong)
}
abstract class ConfigBooleanValue(envName: String) extends BooleanValue with ControversialBooleanConversion {
  def config: Config
  def value: Boolean = convert(config.get(envName))
} 
Example 148
Source File: Reporter.scala    From sbt-dependency-updates   with Apache License 2.0 5 votes vote down vote up
package org.jmotor.sbt

import java.nio.file.{ Files, Path, Paths }

import org.jmotor.sbt.dto.ModuleStatus
import org.jmotor.sbt.parser.PluginParser
import org.jmotor.sbt.service.VersionService
import sbt.{ ModuleID, ResolvedProject }

import scala.collection.JavaConverters._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.util.{ Failure, Success, Try }


class Reporter(versionService: VersionService) {

  def dependencyUpdates(dependencies: Seq[ModuleID]): Future[Seq[ModuleStatus]] = {
    Future.traverse(dependencies)(versionService.checkForUpdates).map(_.sortBy(_.status.id))
  }

  def pluginUpdates(
    sbtBinaryVersion: String,
    project:          ResolvedProject): Future[Seq[ModuleStatus]] = {
    val dir = Paths.get(project.base.getPath, "project")
    val sbtScalaBinaryVersion = getSbtScalaBinaryVersion(sbtBinaryVersion)
    Future.traverse(plugins(dir)) { module ⇒
      versionService.checkPluginForUpdates(module, sbtBinaryVersion, sbtScalaBinaryVersion)
    }.map(_.sortBy(_.status.id))
  }

  def globalPluginUpdates(sbtBinaryVersion: String): Future[Seq[ModuleStatus]] = {
    val dir = Paths.get(System.getProperty("user.home"), ".sbt", sbtBinaryVersion, "plugins")
    val sbtScalaBinaryVersion = getSbtScalaBinaryVersion(sbtBinaryVersion)
    Future.traverse(plugins(dir)) { module ⇒
      versionService.checkPluginForUpdates(module, sbtBinaryVersion, sbtScalaBinaryVersion)
    }.map(_.sortBy(_.status.id))
  }

  def plugins(dir: Path): Seq[ModuleID] = {
    Try {
      Files.newDirectoryStream(dir, "*.sbt").asScala.toSeq.flatMap { path ⇒
        Files.readAllLines(path).asScala
      }
    } match {
      case Success(lines) ⇒ PluginParser.parse(lines)
      case Failure(_)     ⇒ Seq.empty[ModuleID]
    }
  }

  private[sbt] def getSbtScalaBinaryVersion(sbtBinaryVersion: String): String = {
    sbtBinaryVersion match {
      case "1.0" ⇒ "2.12"
      case _     ⇒ "2.10"
    }
  }

}

object Reporter {

  def apply(versionService: VersionService): Reporter = new Reporter(versionService)

} 
Example 149
Source File: Datasets.scala    From qamr   with MIT License 5 votes vote down vote up
package qamr.analysis

import cats.implicits._

import qamr.DataFiles
import qamr.QAData
import qamr.example.SentenceId

import java.nio.file.Path
import java.nio.file.Files

class Datasets(dataRoot: Path) {

  def readDataTSV(pathStr: String): QAData[SentenceId] = {
    import scala.collection.JavaConverters._
    DataFiles.readTSV(
      Files.lines(dataRoot.resolve(pathStr)).iterator.asScala.toList,
      SentenceId.fromString
    ).get
  }

  lazy val trainFull = readDataTSV("full/train.tsv")
  lazy val devFull = readDataTSV("full/dev.tsv")
  lazy val testFull = readDataTSV("full/test.tsv")
  lazy val ptbFull = readDataTSV("full/ptb.tsv")

  lazy val train = readDataTSV("filtered/train.tsv")
  lazy val dev = readDataTSV("filtered/dev.tsv")
  lazy val test = readDataTSV("filtered/test.tsv")
  lazy val ptb = readDataTSV("filtered/ptb.tsv")

} 
Example 150
Source File: FileSystemAnnotationDataService.scala    From qamr   with MIT License 5 votes vote down vote up
package qamr

import spacro.util._

import scala.util.{Try, Success}
import java.nio.file.Path
import java.nio.file.Files

import com.typesafe.scalalogging.StrictLogging

class FileSystemAnnotationDataService(dataPath: Path) extends AnnotationDataService {

  private[this] def getDataDirectoryPath = Try {
    val directory = dataPath
    if(!Files.exists(directory)) {
      Files.createDirectories(directory)
    }
    directory
  }

  private[this] def getFullFilename(name: String) = s"$name.txt"

  override def saveLiveData(name: String, contents: String): Try[Unit] = for {
    directory <- getDataDirectoryPath
    _ <- Try(Files.write(directory.resolve(getFullFilename(name)), contents.getBytes()))
  } yield ()

  import scala.collection.JavaConverters._

  override def loadLiveData(name: String): Try[List[String]] = for {
    directory <- getDataDirectoryPath
    lines <- Try(Files.lines(directory.resolve(getFullFilename(name))).iterator.asScala.toList)
  } yield lines
} 
Example 151
Source File: StreamSpecUtil.scala    From squbs   with Apache License 2.0 5 votes vote down vote up
package org.squbs.pattern.stream

import java.io.File
import java.nio.file.Files
import java.util.concurrent.atomic.AtomicInteger

import akka.stream.ThrottleMode
import akka.stream.scaladsl._
import com.typesafe.config.ConfigFactory
import net.openhft.chronicle.wire.{WireIn, WireOut}

import scala.concurrent.duration._
import scala.language.postfixOps
import scala.collection.JavaConverters._
import scala.util.Random

object StreamSpecUtil {
  val elementCount = 100000
  val failTestAt = elementCount * 3 / 10
  val elementsAfterFail = 100
  val flowRate = 1000
  val flowUnit = 10 millisecond
  val burstSize = 500
}

class StreamSpecUtil[T, S](outputPort: Int = 1) {

  import StreamSpecUtil._
  val outputPorts = outputPort
  val tempPath: File = Files.createTempDirectory("persistent_queue").toFile
  val totalProcessed = elementCount + elementsAfterFail

  val config = ConfigFactory.parseMap {
    Map(
      "persist-dir" -> s"${tempPath.getAbsolutePath}",
      "output-ports" -> s"$outputPorts",
      "roll-cycle" -> "TEST_SECONDLY".toLowerCase()
    ).asJava
  }

  val in = Source(1 to elementCount)
  lazy val atomicCounter = Vector.tabulate(outputPorts)(_ => new AtomicInteger(0))
  lazy val flowCounter = Flow[Any].map(_ => 1L).reduce(_ + _).toMat(Sink.head)(Keep.right)
  lazy val merge = Merge[S](outputPorts)
  lazy val throttle = Flow[S].throttle(flowRate, flowUnit, burstSize, ThrottleMode.shaping)
  lazy val throttleMore = Flow[S].throttle(flowRate * 9 / 10, flowUnit, burstSize, ThrottleMode.shaping)
  lazy val head = Sink.head[S]
  lazy val last = Sink.last[S]
  val minRandom = 100
  lazy val random = Random.nextInt(elementCount - minRandom - 1) + minRandom
  lazy val filterCounter = new AtomicInteger(0)
  lazy val filterARandomElement = Flow[Event[T]].map(e => (e, filterCounter.incrementAndGet())).filter(_._2 != random).map(_._1)

  def commitCounter(outputPortId: Int) = atomicCounter(outputPortId).incrementAndGet()

  def clean() = delete(tempPath)

  private def delete(file: File): Unit = {
    if (file.isDirectory)
      Option(file.listFiles).map(_.toList).getOrElse(Nil).foreach(delete)
    file.delete
  }
}

case class Person(name: String, age: Int)

class PersonSerializer extends QueueSerializer[Person] {

  override def readElement(wire: WireIn): Option[Person] = {
    for {
      name <- Option(wire.read().`object`(classOf[String]))
      age <- Option(wire.read().int32)
    } yield { Person(name, age) }
  }

  override def writeElement(element: Person, wire: WireOut): Unit = {
    wire.write().`object`(classOf[String], element.name)
    wire.write().int32(element.age)
  }
} 
Example 152
Source File: ConfigFileValidator.scala    From mvn_scalafmt   with Apache License 2.0 5 votes vote down vote up
package org.antipathy.mvn_scalafmt.validation

import java.nio.file.{Files, Path, Paths}

import org.apache.maven.plugin.logging.Log


  @throws[IllegalArgumentException]
  override def validate(location: String): Path = location match {
    case "" | null => throw buildException(s"Config path is null or empty")
    case invalidPath if !Files.exists(Paths.get(invalidPath)) =>
      throw buildException(s"Config path is invalid: $location")
    case _ => Paths.get(location)
  }

  private def buildException(message: String): Exception = {
    val exception = new IllegalArgumentException(message)
    log.error(exception)
    exception
  }
} 
Example 153
Source File: SourceFileSequenceBuilder.scala    From mvn_scalafmt   with Apache License 2.0 5 votes vote down vote up
package org.antipathy.mvn_scalafmt.builder

import java.io.File
import java.nio.file.{Files, Paths}

import org.apache.commons.io.FileUtils
import org.apache.maven.plugin.logging.Log

import scala.jdk.CollectionConverters._


  override def build(paths: Seq[File]): Seq[File] =
    if (paths == null) {
      log.warn("Could not locate any scala sources to format")
      Seq.empty[File]
    } else {
      val files = paths.map(_.getCanonicalPath).flatMap { p =>
        if (Files.exists(Paths.get(p))) {
          Some(new File(p))
        } else {
          log.warn(s"Could not locate Scala source at $p")
          None
        }
      }
      files.flatMap(file => FileUtils.listFiles(file, Array("scala", "sc", "sbt"), true).asScala)
    }
} 
Example 154
Source File: RemoteConfigWriter.scala    From mvn_scalafmt   with Apache License 2.0 5 votes vote down vote up
package org.antipathy.mvn_scalafmt.io

import org.antipathy.mvn_scalafmt.model.RemoteConfig
import java.io.File
import java.nio.charset.StandardCharsets

import org.apache.commons.io.FileUtils
import org.apache.maven.plugin.logging.Log
import java.nio.file.{Files, Path}


  override def write(input: RemoteConfig): Path = {

    log.info(s"Writing remote config to ${input.location.toAbsolutePath}")

    if (Files.exists(input.location)) {
      Files.delete(input.location)
    }

    val newConfig = new File(input.location.toAbsolutePath.toString)
    FileUtils.writeStringToFile(
      newConfig,
      input.contents,
      StandardCharsets.UTF_8
    )
    newConfig.toPath
  }
} 
Example 155
Source File: SourceFileFormatterSpec.scala    From mvn_scalafmt   with Apache License 2.0 5 votes vote down vote up
package org.antipathy.mvn_scalafmt.format

import java.io.File
import java.nio.file.Files

import org.antipathy.mvn_scalafmt.logging.MavenLogReporter
import org.antipathy.mvn_scalafmt.validation.ConfigFileValidator
import org.apache.maven.plugin.logging.SystemStreamLog
import org.scalafmt.interfaces.Scalafmt
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.GivenWhenThen
import org.scalatest.matchers.should.Matchers

class SourceFileFormatterSpec extends AnyFlatSpec with GivenWhenThen with Matchers {

  behavior of "SourceFileFormatter"

  it should "format a source file" in {

    val log        = new SystemStreamLog
    val config     = new ConfigFileValidator(log).validate(".scalafmt.conf")
    val sourceFile = new File("src/main/scala/org/antipathy/mvn_scalafmt/model/FormatResult.scala")
    val reporter   = new MavenLogReporter(log)
    val scalafmt: Scalafmt =
      Scalafmt.create(this.getClass.getClassLoader).withRespectVersion(false).withReporter(reporter)

    val result = new SourceFileFormatter(config, scalafmt, log).format(sourceFile).formattedSource

    result.trim should be(new String(Files.readAllBytes(sourceFile.toPath)).trim)
  }
} 
Example 156
Source File: RemoteConfigWriterSpec.scala    From mvn_scalafmt   with Apache License 2.0 5 votes vote down vote up
package org.antipathy.mvn_scalafmt.io

import java.io.File
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Paths}

import org.antipathy.mvn_scalafmt.model.RemoteConfig
import org.apache.commons.io.FileUtils
import org.apache.maven.plugin.logging.SystemStreamLog
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.GivenWhenThen
import org.scalatest.matchers.should.Matchers

class RemoteConfigWriterSpec extends AnyFlatSpec with GivenWhenThen with Matchers {

  behavior of "RemoteConfigWriter"

  it should "Write a config to a local path" in {

    val localPath = s"${System.getProperty("java.io.tmpdir")}${File.separator}.scalafmt.conf"
    val contents  = """version = "1.5.1"
                     |maxColumn = 120
                     |align = false
                     |rewrite.rules = [SortImports]
                     |danglingParentheses = true
                     |importSelectors = singleLine
                     |binPack.parentConstructors = true
                     |includeCurlyBraceInSelectChains = false""".stripMargin
    val writer    = new RemoteConfigWriter(new SystemStreamLog)
    val input     = RemoteConfig(contents, Paths.get(localPath))

    writer.write(input)

    new String(Files.readAllBytes(new File(localPath).toPath))
    Files.delete(input.location)
  }

  it should "Overwrite a config in a local path" in {

    val localPath = s"${System.getProperty("java.io.tmpdir")}${File.separator}.scalafmt2.conf"

    val contents    = """version = "1.5.1"
                     |maxColumn = 120
                     |align = false
                     |rewrite.rules = [SortImports]
                     |danglingParentheses = true
                     |importSelectors = singleLine
                     |binPack.parentConstructors = true
                     |includeCurlyBraceInSelectChains = false""".stripMargin
    val oldContents = "SomeOldConfig"

    val writer = new RemoteConfigWriter(new SystemStreamLog)
    val input  = RemoteConfig(contents, Paths.get(localPath))

    FileUtils.writeStringToFile(new File(localPath), oldContents, StandardCharsets.UTF_8)
    new String(Files.readAllBytes(new File(localPath).toPath)) should be(oldContents)

    writer.write(input)

    new String(Files.readAllBytes(new File(localPath).toPath)) should be(contents)
    Files.delete(input.location)
  }

} 
Example 157
Source File: ArtifactFSSaver.scala    From marvin-engine-executor   with Apache License 2.0 5 votes vote down vote up
package org.marvin.artifact.manager

import java.nio.file.{Files, Path, Paths, StandardCopyOption}

import akka.Done
import akka.actor.{Actor, ActorLogging}
import org.marvin.artifact.manager.ArtifactSaver.{GetArtifact, SaveToLocal, SaveToRemote}
import org.marvin.model.EngineMetadata

class ArtifactFSSaver(metadata: EngineMetadata) extends Actor with ActorLogging {
  override def preStart() = {
    log.info(s"${this.getClass().getCanonicalName} actor initialized...")
  }

  def generatePaths(artifactName: String, protocol: String): Map[String, Path] = {
    Map(
      "localPath" -> Paths.get(s"${metadata.artifactsLocalPath}/${metadata.name}/$artifactName"),
      "remotePath" -> Paths.get((s"${metadata.artifactsRemotePath}/${metadata.name}/${metadata.version}/$artifactName/$protocol"))
    )
  }

  def copyFile(origin: Path, destination: Path): Unit = {
    if (!destination.getParent.toFile.exists()) destination.getParent.toFile.mkdirs()

    log.info(s"Copying files from ${origin} to ${destination}")

    Files.copy(origin, destination, StandardCopyOption.REPLACE_EXISTING)

    log.info(s"File ${destination} saved!")
  }

  def validatePath(path: Path): Boolean = {
    new java.io.File(path.toString).exists
  }

  override def receive: Receive = {
    case SaveToLocal(artifactName, protocol) =>
      log.info("Receive message and starting to working...")
      val uris = generatePaths(artifactName, protocol)

      // Validate if the protocol is correct
      if (validatePath(uris("remotePath")))
        copyFile(uris("remotePath"), uris("localPath"))
      else
        log.error(s"Invalid protocol: ${protocol}, save process canceled!")

      sender ! Done

    case SaveToRemote(artifactName, protocol) =>
      log.info("Receive message and starting to working...")
      val uris = generatePaths(artifactName, protocol)

      // Validate if the protocol is correct
      if (validatePath(uris("localPath")))
        copyFile(uris("localPath"), uris("remotePath"))
      else
        log.error(s"Invalid protocol: ${protocol}, save process canceled!")

      sender ! Done

    case GetArtifact(artifactName, protocol) =>
      log.info("Receive message and starting to working...")
      val uris = generatePaths(artifactName, protocol)
      var response: String = ""

      // Validate if the protocol is correct
      if (validatePath(uris("localPath")))
        response = scala.io.Source.fromFile(uris("localPath").toString).getLines.mkString
      else
        log.error(s"Invalid protocol: ${protocol}, load process canceled!")

      sender ! response

    case _ =>
      log.warning("Received a bad format message...")
  }
} 
Example 158
Source File: SttpFileExtensions.scala    From sttp   with Apache License 2.0 5 votes vote down vote up
package sttp.client.internal

import java.nio.file.Files
import java.nio.file.Path

// wrap a Path
trait SttpFileExtensions { self: SttpFile =>

  def toPath: Path = underlying.asInstanceOf[Path]
  def toFile: java.io.File = toPath.toFile
}

trait SttpFileCompanionExtensions {
  def fromPath(path: Path): SttpFile =
    new SttpFile(path) {
      val name: String = path.getFileName.toString
      def size: Long = Files.size(path)
    }
  def fromFile(file: java.io.File): SttpFile = fromPath(file.toPath)
} 
Example 159
Source File: SttpFileExtensions.scala    From sttp   with Apache License 2.0 5 votes vote down vote up
package sttp.client.internal

import java.nio.file.Files
import java.nio.file.Path

trait SttpFileExtensions { self: SttpFile =>
  def toPath: Path = underlying.asInstanceOf[Path]
  def toFile: java.io.File = toPath.toFile
}

trait SttpFileCompanionExtensions {
  def fromPath(path: Path): SttpFile =
    new SttpFile(path) {
      val name: String = path.getFileName.toString
      def size: Long = Files.size(path)
    }
  def fromFile(file: java.io.File): SttpFile = fromPath(file.toPath)
} 
Example 160
Source File: GitHooks.scala    From stryker4s   with Apache License 2.0 5 votes vote down vote up
import java.nio.file.Files
import java.nio.file.attribute.PosixFilePermissions

import sbt._
import sbt.internal.util.ManagedLogger

import scala.collection.JavaConverters._
import scala.util.Properties


object GitHooks {
  def apply(hooksSourceDir: File, hooksTargetDir: File, log: ManagedLogger): Unit =
    if (hooksSourceDir.isDirectory && hooksTargetDir.exists()) {
      IO.listFiles(hooksSourceDir)
        .map(hook => (hook, hooksTargetDir / hook.name))
        .filterNot(_._2.exists()) // Don't write if hook already exists
        .foreach {
          case (originalHook, targetHook) =>
            log.info(s"Copying ${originalHook.name} hook to $targetHook")
            Files.copy(originalHook.asPath, targetHook.asPath)
            if (!Properties.isWin)
              targetHook.setPermissions(PosixFilePermissions.fromString("rwxr-xr-x").asScala.toSet)
        }
    }
} 
Example 161
Source File: FahrenheitSpec.scala    From swave   with Mozilla Public License 2.0 5 votes vote down vote up
package swave.docs

import java.nio.charset.StandardCharsets._
import java.nio.file.Files
import org.scalatest.{BeforeAndAfterAll, FreeSpec, Matchers}

class FahrenheitSpec extends FreeSpec with Matchers with BeforeAndAfterAll {

  val testFileContent =
    """// Temperature Readings in Fahrenheit
      |50.8
      |80.2
      |63.0
      |-14
      |
      |// Especially important temperatures
      |32
      |-459.67
      |451
    """.stripMargin

  val inputFile = {
    val path = Files.createTempFile("fahrenheit-spec-input", ".tmp")
    Files.newBufferedWriter(path, UTF_8).append(testFileContent).close()
    path.toFile
  }
  val outputFile = Files.createTempFile("fahrenheit-spec-output", ".tmp").toFile

  "the examples in the `Fahrenheit` chapter should work as expected" - {

    "example" in {
      def println(s: String) = () // disable the printing below

      //#example
      import java.io.File
      import scala.util.{Failure, Success}
      import scala.concurrent.Future
      import swave.core.io.files._   // enables `Spout.fromFile`
      import swave.compat.scodec._   // enables `ByteVector` support
      import swave.core.text._       // enables text transformations
      import swave.core._

      implicit val env = StreamEnv()
      import env.defaultDispatcher // for the future transformations below

      def fahrenheitToCelsius(f: Double): Double =
        (f - 32.0) * (5.0/9.0)

      def converter(fahrenheitReadingsInput: File,
                    celciusReadingsOutput: File): RunnableStreamGraph[Future[Long]] =
        Spout.fromFile(fahrenheitReadingsInput)    // Spout[ByteVector]
          .utf8Decode                              // Spout[String]
          .lines                                   // Spout[String]
          .filterNot(_.trim.isEmpty)               // Spout[String]
          .filterNot(_ startsWith "//")            // Spout[String]
          .map(_.toDouble)                         // Spout[Double]
          .map(fahrenheitToCelsius)                // Spout[Double]
          .map("%.2f" format _)                    // Spout[String]
          .intersperse("\n")                       // Spout[String]
          .utf8Encode                              // Spout[ByteVector]
          .to(Drain.toFile(celciusReadingsOutput)) // StreamGraph[Future[Long]]
          .seal()                                  // RunnableStreamGraph[Future[Long]]

      // when we are ready to roll, start the stream
      val run: StreamRun[Future[Long]] =
        converter(inputFile, outputFile).run()

      // since the stream runs asynchronously we can't directly access the result
      run.result.onComplete {
        case Success(x) => println(s"OK, $x bytes written")
        case Failure(e) => println(s"Error: $e")
      }

      // shut down when everything has terminated
      env.shutdownOn(run.termination)
      //#example

      import swave.core.util._
      run.termination.await()
      FileIO.readFile(outputFile).decodeUtf8 shouldEqual Right {
        """10.44
          |26.78
          |17.22
          |-25.56
          |0.00
          |-273.15
          |232.78""".stripMargin
      }
    }
  }

  override protected def afterAll(): Unit = Files.delete(inputFile.toPath)
} 
Example 162
Source File: MD5Spec.scala    From swave   with Mozilla Public License 2.0 5 votes vote down vote up
package swave.docs

import java.nio.file.Files
import java.nio.charset.StandardCharsets._
import org.scalatest.{BeforeAndAfterAll, FreeSpec, Matchers}
import scala.concurrent.duration._
import swave.core.util._

class MD5Spec extends FreeSpec with Matchers with BeforeAndAfterAll {

  val testFileContent = "swave rocks!"

  val testPath = {
    val path = Files.createTempFile("md5-spec", ".tmp")
    Files.newBufferedWriter(path, UTF_8).append(testFileContent).close()
    path
  }

  "the examples in the `MD5` chapter should work as expected" - {

    "example-0" in {
      //#example-0
      import java.security.MessageDigest
      import java.io.File
      import scala.concurrent.Future
      import swave.core.io.files._   // enables `Spout.fromFile`
      import swave.compat.scodec._   // enables `ByteVector` support
      import swave.core._

      implicit val env = StreamEnv()

      def md5sum(file: File): Future[String] = {
        val md5 = MessageDigest.getInstance("MD5")
        Spout.fromFile(file)                                      // Spout[ByteVector]
          .fold(md5) { (m, bytes) => m.update(bytes.toArray); m } // Spout[MessageDigest]
          .flatMap(_.digest().iterator)                           // Spout[Byte]
          .map(_ & 0xFF)                                          // Spout[Int]
          .map("%02x" format _)                                   // Spout[String]
          .drainToMkString(limit = 32)                            // Future[String]
      }

      // don't forget to shutdown the StreamEnv at application exit with
      // env.shutdown()
      //#example-0

      try md5sum(testPath.toFile).await(2.seconds) shouldEqual "e1b2b603f9cca4a909c07d42a5788fe3"
      finally {
        Thread.sleep(100)
        env.shutdown()
      }
    }

    "example-1" in {
      //#example-1
      import java.io.File
      import scala.concurrent.Future
      import swave.core.io.files._   // enables `Spout.fromFile`
      import swave.core.hash._       // enables the `md5` transformation
      import swave.compat.scodec._   // enables `ByteVector` support
      import swave.core._

      implicit val env = StreamEnv()

      def md5sum(file: File): Future[String] =
        Spout.fromFile(file)            // Spout[ByteVector]
          .md5                          // Spout[ByteVector]
          .flattenConcat()              // Spout[Byte]
          .map(_ & 0xFF)                // Spout[Int]
          .map("%02x" format _)         // Spout[String]
          .drainToMkString(limit = 32)  // Future[String]

      // don't forget to shutdown the StreamEnv at application exit with
      // env.shutdown()
      //#example-1

      try md5sum(testPath.toFile).await(2.seconds) shouldEqual "e1b2b603f9cca4a909c07d42a5788fe3"
      finally {
        Thread.sleep(100)
        env.shutdown()
      }
    }
  }

  override protected def afterAll(): Unit = Files.delete(testPath)
} 
Example 163
Source File: FileSpoutSpec.scala    From swave   with Mozilla Public License 2.0 5 votes vote down vote up
package swave.core.io.files

import java.nio.charset.StandardCharsets._
import java.nio.file.Files
import swave.testkit.Probes._
import swave.core._

class FileSpoutSpec extends SwaveSpec {
  import swave.core.io.files._

  implicit val env = StreamEnv()

  val TestText = {
    ("a" * 1000) +
      ("b" * 1000) +
      ("c" * 1000) +
      ("d" * 1000) +
      ("e" * 1000) +
      ("f" * 1000)
  }

  val testFile = {
    val f = Files.createTempFile("file-source-spec", ".tmp")
    Files.newBufferedWriter(f, UTF_8).append(TestText).close()
    f
  }

  val notExistingFile = {
    // this way we make sure it doesn't accidentally exist
    val f = Files.createTempFile("not-existing-file", ".tmp")
    Files.delete(f)
    f
  }

  override def afterTermination(): Unit = Files.delete(testFile)

  "Spout.fromPath must" - {

    "read contents from a file (ByteVector)" in {
      import swave.compat.scodec._

      val chunkSize = 512
      Spout
        .fromPath(testFile, chunkSize)
        .map(_.decodeUtf8.right.get)
        .drainTo(DrainProbe[String])
        .get
        .sendRequest(100)
        .expectNext(TestText.grouped(chunkSize).toList: _*)
        .expectComplete()
        .verifyCleanStop()
    }

    "read contents from a file (Array[Byte])" in {
      import swave.core.io.byteArrayBytes

      val chunkSize = 512
      Spout
        .fromPath(testFile, chunkSize)
        .map(array => new String(array, UTF_8))
        .drainTo(DrainProbe[String])
        .get
        .sendRequest(100)
        .expectNext(TestText.grouped(chunkSize).toList: _*)
        .expectComplete()
        .verifyCleanStop()
    }

    "produce an Error when the file doesn't exist" in {
      import swave.core.io.byteArrayBytes

      val drain = Spout.fromPath(notExistingFile).drainTo(DrainProbe[Array[Byte]]).get
      drain.expectError() shouldBe an[java.nio.file.NoSuchFileException]
      drain.verifyCleanStop()
    }
  }
} 
Example 164
Source File: FileDrainSpec.scala    From swave   with Mozilla Public License 2.0 5 votes vote down vote up
package swave.core.io.files

import java.nio.file.{Files, Path}
import scala.concurrent.duration._
import scodec.bits.ByteVector
import swave.compat.scodec._
import swave.core.util._
import swave.core._

class FileDrainSpec extends SwaveSpec {
  import swave.core.io.files._

  implicit val env = StreamEnv()

  val TestLines = List[String](
    "a" * 1000 + "\n",
    "b" * 1000 + "\n",
    "c" * 1000 + "\n",
    "d" * 1000 + "\n",
    "e" * 1000 + "\n",
    "f" * 1000 + "\n")

  val TestBytes = TestLines.map(ByteVector.encodeAscii(_).right.get)

  "Drain.toPath must" - {

    "write lines to a short file" in withTempPath(create = true) { path ⇒
      val result = Spout.one(ByteVector("abc" getBytes UTF8)).drainTo(Drain.toPath(path, chunkSize = 512))
      result.await(5.seconds) shouldEqual 3
      verifyFileContents(path, "abc")
    }

    "write lines to a long file" in withTempPath(create = true) { path ⇒
      val result = Spout(TestBytes).drainTo(Drain.toPath(path, chunkSize = 512))
      result.await(5.seconds) shouldEqual 6006
      verifyFileContents(path, TestLines mkString "")
    }

    "create new file if required" in withTempPath(create = false) { path ⇒
      val result = Spout(TestBytes).drainTo(Drain.toPath(path, chunkSize = 512))
      result.await(5.seconds) shouldEqual 6006
      verifyFileContents(path, TestLines mkString "")
    }
  }

  private def withTempPath(create: Boolean)(block: Path ⇒ Unit): Unit = {
    val targetFile = Files.createTempFile("file-sink", ".tmp")
    if (!create) Files.delete(targetFile)
    try block(targetFile)
    finally Files.delete(targetFile)
  }

  private def verifyFileContents(path: Path, contents: String): Unit = {
    val out = Files.readAllBytes(path)
    new String(out) shouldEqual contents
  }
} 
Example 165
Source File: FileIO.scala    From swave   with Mozilla Public License 2.0 5 votes vote down vote up
package swave.core.io.files

import java.io.File
import java.nio.channels.FileChannel
import java.nio.file.{FileSystems, Files, Path, StandardOpenOption}

import scala.util.control.NonFatal
import com.typesafe.config.Config
import swave.core.impl.util.SettingsCompanion
import swave.core.io.Bytes
import swave.core.macros._

object FileIO extends SpoutFromFiles with DrainToFiles {

  lazy val userHomePath: Path = FileSystems.getDefault.getPath(System getProperty "user.home")

  def resolveFileSystemPath(pathName: String): Path =
    if (pathName.length >= 2 && pathName.charAt(0) == '~' && pathName.charAt(1) == File.separatorChar) {
      userHomePath.resolve(pathName substring 2)
    } else FileSystems.getDefault.getPath(pathName)

  val WriteCreateOptions: Set[StandardOpenOption] = {
    import StandardOpenOption._
    Set(CREATE, TRUNCATE_EXISTING, WRITE)
  }

  final case class Settings(defaultFileReadingChunkSize: Int, defaultFileWritingChunkSize: Int) {
    requireArg(defaultFileReadingChunkSize > 0, "`defaultFileChunkSize` must be > 0")
    requireArg(defaultFileWritingChunkSize >= 0, "`defaultFileWritingChunkSize` must be >= 0")

    def withDefaultFileReadingChunkSize(defaultFileReadingChunkSize: Int) =
      copy(defaultFileReadingChunkSize = defaultFileReadingChunkSize)
    def withDefaultFileWritingChunkSize(defaultFileWritingChunkSize: Int) =
      copy(defaultFileWritingChunkSize = defaultFileWritingChunkSize)
  }

  object Settings extends SettingsCompanion[Settings]("swave.core.file-io") {
    def fromSubConfig(c: Config): Settings =
      Settings(
        defaultFileReadingChunkSize = c getInt "default-file-reading-chunk-size",
        defaultFileWritingChunkSize = c getInt "default-file-writing-chunk-size")
  }

  def writeFile[T: Bytes](fileName: String, data: T): Unit = writeFile(resolveFileSystemPath(fileName), data)
  def writeFile[T: Bytes](file: File, data: T): Unit       = writeFile(file.toPath, data)
  def writeFile[T: Bytes](path: Path, data: T, options: StandardOpenOption*): Unit = {
    implicit def decorator(value: T): Bytes.Decorator[T] = Bytes.decorator(value)
    Files.write(path, data.toArray, options: _*)
    ()
  }

  def readFile[T: Bytes](fileName: String): T = readFile(resolveFileSystemPath(fileName))
  def readFile[T: Bytes](file: File): T       = readFile(file.toPath)
  def readFile[T: Bytes](path: Path): T       = implicitly[Bytes[T]].apply(Files.readAllBytes(path))

  private[io] def quietClose(channel: FileChannel): Unit =
    try channel.close()
    catch { case NonFatal(_) ⇒ }
} 
Example 166
Source File: Main.scala    From ProxyCrawler   with Apache License 2.0 5 votes vote down vote up
package org.crowdcrawler.proxycrawler

import java.nio.charset.StandardCharsets
import java.nio.file.{Paths, Files}

import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper
import com.typesafe.scalalogging.Logger
import org.crowdcrawler.proxycrawler.checker.ProxyChecker
import org.slf4j.LoggerFactory


object Main {
  private val LOGGER = Logger(LoggerFactory.getLogger(Main.getClass))
  val OBJECT_MAPPER = new ObjectMapper() with ScalaObjectMapper
  OBJECT_MAPPER.registerModule(DefaultScalaModule)

  def main(args: Array[String]): Unit = {
    val usage = "Usage: \n\tcrawl [pluginClassName]* OutputFile\n" +
      "\tcheck proxies.json valid_proxies.json\n" +
      "\tfilter valid_proxies.json <HTTP|HTTPS|SOCKS> output.json\n" +
      "For example:\n" +
      "\t1. Crawl all supported websites and save proxies to proxies.json\n" +
      "\t\tcrawl proxies.json\n" +
      "\t2. Crawl www.cnproxy.com and save proxies to proxies.json:\n" +
      "\t\tcrawl CnProxyComPlugin proxies.json\n" +
      "\t3. Check the speed of proxies.\n" +
      "\t\tcheck proxies.json valid_proxies.json\n" +
      "\t4. Filter proxies by schema\n" +
      "\t\tfilter valid_proxies.json HTTP http.json\n"
    if (args.length < 2) {
      println(usage)
      return
    }

    val start = System.currentTimeMillis
    if (args(0) == "crawl") {
      val classNames = if (args.length == 2) {
        Array("CnProxyComPlugin", "CoolProxyNetPlugin", "GatherproxyComPlugin", "IpcnOrgPlugin",
          "ProxyListOrg", "SocksProxyNet", "USProxyOrgPlugin")
      } else {
        args.slice(1, args.length-1)
      }
      val crawler = ProxyCrawler(classNames: _*)

      val proxies = crawler.crawl()

      LOGGER.info("Writing to disk, " + proxies.size + " proxies")
      val json = OBJECT_MAPPER.writerWithDefaultPrettyPrinter.writeValueAsString(proxies)
      Files.write(Paths.get(args.last), json.getBytes(StandardCharsets.UTF_8))
    } else if (args(0) == "check") {
      val json = io.Source.fromFile(args(1), "utf-8").mkString
      val list = OBJECT_MAPPER.readValue[List[ProxyInfo]](json)

      // sort by speed desc
      val validProxies = ProxyChecker.check(list).filter(_.speed > 0)
        .sortWith((p1, p2) => p1.speed > p2.speed)
      LOGGER.info("Writing to disk, " + validProxies.size + " valid proxies out of " + list.size + " proxies")
      val newJson = OBJECT_MAPPER.writerWithDefaultPrettyPrinter
        .writeValueAsString(validProxies)
      Files.write(Paths.get(args(2)), newJson.getBytes(StandardCharsets.UTF_8))

    } else if (args(0) == "filter") {
      val json = io.Source.fromFile(args(1), "utf-8").mkString
      val list = OBJECT_MAPPER.readValue[List[ProxyInfo]](json)
      val filtered = if (args(2) == "SOCKS") {
        list.filter(p => p.schema == "SOCKS" | p.schema == "SOCKS4" || p.schema == "SOCKS5")
      } else {
        list.filter(p => p.schema == args(2))
      }

      val newJson = OBJECT_MAPPER.writerWithDefaultPrettyPrinter
        .writeValueAsString(filtered)
      Files.write(Paths.get(args(3)), newJson.getBytes(StandardCharsets.UTF_8))
    } else {
      println(usage)
      return
    }

    val end = System.currentTimeMillis
    LOGGER.info("Time elapsed " + (end - start) / 1000 + " seconds.")
  }
} 
Example 167
Source File: RecoverLog.scala    From polynote   with Apache License 2.0 5 votes vote down vote up
package polynote

import java.nio.channels.FileChannel
import java.nio.file.{Files, Paths, StandardOpenOption}
import java.time.Instant

import cats.effect.Effect
import polynote.app.{Args, MainArgs}
import polynote.kernel.logging.Logging
import polynote.messages.{Message, Notebook, NotebookUpdate, ShortList}
import polynote.server.AppEnv
import zio.{Ref, Runtime, Task, UIO, ZIO}
import zio.ZIO.effectTotal
import zio.blocking.effectBlocking
import fs2.Stream
import polynote.server.repository.{FileBasedRepository, NotebookContent}
import polynote.server.repository.format.ipynb.IPythonFormat
import polynote.server.repository.fs.WAL
import polynote.server.taskConcurrent
import scodec.bits.ByteVector
import scodec.stream.decode
import scodec.codecs
import scodec.stream.decode.StreamDecoder

object RecoverLog {

  def replay(messages: Stream[Task, (Instant, Message)], ref: Ref[Notebook], log: Logging.Service): UIO[Unit] = messages.map(_._2).evalMap {
    case nb: Notebook => ref.set(nb)
    case upd: NotebookUpdate => ref.update {
      nb => try {
        upd.applyTo(nb)
      } catch {
        case err: Throwable =>
          log.errorSync(Some("Dropped update because an error occurred when applying it"), err)
          nb
      }
    }
    case _ => ZIO.unit
  }.compile.drain.catchAll {
    err =>
      log.error(Some("Error occurred while replaying the log; printing the final state anyway."), err)
  }

  def main(implicit ev: Effect[Task]): ZIO[AppEnv, String, Int] = for {
    args     <- ZIO.access[MainArgs](_.get[Args].rest)
    path     <- ZIO(args.head).flatMap(pathStr => effectBlocking(Paths.get(pathStr).toRealPath())).orDie
    is       <- effectBlocking(FileChannel.open(path, StandardOpenOption.READ)).orDie
    log      <- Logging.access
    _        <- Logging.info(s"Reading log entries from ${path}...")
    messages  = WAL.decoder.decodeMmap(is)
    ref      <- Ref.make(Notebook("", ShortList.Nil, None))
    _        <- replay(messages, ref, log)
    format    = new IPythonFormat
    result   <- ref.get
    encoded  <- format.encodeNotebook(NotebookContent(result.cells, result.config)).orDie
    _        <- effectTotal(println(encoded))
  } yield 0
} 
Example 168
Source File: ThriftImporter.scala    From diffy   with GNU Affero General Public License v3.0 5 votes vote down vote up
package ai.diffy.scrooge

import com.twitter.scrooge.frontend.{DirImporter, Importer}
import java.io.File
import java.nio.file.Files
import java.util.zip.ZipFile
import scala.collection.JavaConversions._
import scala.io.Source

object ZippedFileImporter {
  def apply(zipFiles: Seq[ZipFile]): Importer = {
    val thriftDir = Files.createTempDirectory("thrift-")
    thriftDir.toFile.deleteOnExit()

    zipFiles foreach { zipFile =>
      zipFile.entries.toList.collect {
        case zipEntry if !zipEntry.isDirectory && zipEntry.getName.endsWith(".thrift") =>
          val data = Source.fromInputStream(zipFile.getInputStream(zipEntry), "UTF-8").mkString

          val newFile = new File(thriftDir.toString + File.separator + zipEntry.getName)
          new File(newFile.getParent).mkdirs()

          Files.write(newFile.toPath, data.getBytes)
      }
    }

    DirImporter(thriftDir.toFile)
  }
}

object FileImporter {
  def apply(files: Seq[File]): Importer = {
    val thriftDir = Files.createTempDirectory("thrift-")
    thriftDir.toFile.deleteOnExit()

    files foreach { file =>
      val newFile = new File(thriftDir.toString + File.separator + file.getName)
      Files.copy(file.toPath, newFile.toPath)
    }

    DirImporter(thriftDir.toFile)
  }
} 
Example 169
Source File: ThriftFeatureTest.scala    From diffy   with GNU Affero General Public License v3.0 5 votes vote down vote up
package ai.diffy

import java.io.{File, FileOutputStream}
import java.net.ServerSocket
import java.nio.file.Files
import java.util.zip.{ZipEntry, ZipOutputStream}

import ai.diffy.examples.thrift.ExampleServers
import ai.diffy.proxy.DifferenceProxy
import ai.diffy.thriftscala.Adder
import com.google.inject.Stage
import com.twitter.finagle.http.Status
import com.twitter.finagle.util.DefaultTimer
import com.twitter.finagle.{Http, ThriftMux}
import com.twitter.finatra.http.EmbeddedHttpServer
import com.twitter.inject.Test
import com.twitter.util.{Await, Duration, Future, FuturePool}

import scala.io.Source

class ThriftFeatureTest extends Test {

  def getPort(): Int = {
    val s  = new ServerSocket(0)
    val port = s.getLocalPort
    s.close()
    port
  }

  val env@Seq(p,s,c,d) = Seq.fill(4)(getPort())
  val environment = FuturePool.unboundedPool(ExampleServers.main(env.take(3).map(_.toString).toArray))

  val diffy = new MainService
  lazy val differenceProxy = diffy.injector.instance[DifferenceProxy]


  val thriftFile = new File("src/test/thrift/example.thrift")
  val data = Source.fromInputStream(Files.newInputStream(thriftFile.toPath), "UTF-8").mkString
  val thriftJar = Files.createTempFile("thrift", "jar")
  thriftJar.toFile.deleteOnExit()
  val out = new ZipOutputStream(new FileOutputStream(thriftJar.toFile))
  out.putNextEntry(new ZipEntry(thriftFile.getAbsolutePath))
  out.write(data.getBytes)
  out.closeEntry()
  out.close()

  val server = new EmbeddedHttpServer(
    twitterServer = diffy,
    flags = Map(
      "proxy.port" -> s":$d",
      "candidate" -> s"localhost:$c",
      "master.primary" -> s"localhost:$p",
      "master.secondary" -> s"localhost:$s",
      "serviceName" -> "myThriftService",
      "service.protocol" -> "thrift",
      "thrift.jar" -> thriftJar.toAbsolutePath.toString,
      "thrift.serviceClass" -> "Adder",
      "summary.email" -> "test"
    ),
    stage = Stage.PRODUCTION
  )

  val client = ThriftMux.client.build[Adder.MethodPerEndpoint](s"localhost:$d")

  test("verify startup") {
    server.assertHealthy()
  }

  test("verify DifferenceCollector") {
    assert(differenceProxy.collector.fields.isEmpty)
    Await.result(client.add(1, 1).liftToTry)
    var tries = 0
    while(differenceProxy.outstandingRequests.get() > 0 && tries < 10) {
      Await.result(Future.sleep(Duration.fromSeconds(1))(DefaultTimer))
      tries = tries + 1
    }
    assert(!differenceProxy.collector.fields.isEmpty)
  }

  test("verify present differences via API") {
    val response =
      Await.result(Http.fetchUrl(s"http://${server.externalHttpHostAndPort}/api/1/endpoints/add/stats"))
    assertResult(Status.Ok)(response.status)
    assert(response.getContentString().contains(""""differences":1"""))
  }

  test("verify absent endpoint in API") {
    val response =
      Await.result(Http.fetchUrl(s"http://${server.externalHttpHostAndPort}/api/1/endpoints/subtract/stats"))
    assertResult(Status.Ok)(response.status)
    assertResult("""{"error":"key not found: subtract"}""")(response.getContentString())
  }

} 
Example 170
Source File: PlanWriter.scala    From piglet   with Apache License 2.0 5 votes vote down vote up
package dbis.piglet.tools

import java.nio.file.{Files, Path, StandardOpenOption}

import dbis.piglet.op.{PigOperator, TimingOp}
import dbis.piglet.plan.DataflowPlan
import dbis.piglet.tools.logging.PigletLogging
//import guru.nidi.graphviz.engine.{Format, Graphviz}
//import guru.nidi.graphviz.parse.Parser

import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
import scala.concurrent.duration.Duration


case class Node(id: String, var time: Option[Duration] = None, var label: String = "") {
  
  private def mkLabel = {
    val t = if(time.isDefined) s"\n${time.get.toMillis}ms (${BigDecimal(time.get.toMillis / 1000.0).setScale(2,BigDecimal.RoundingMode.HALF_UP).toDouble}s)" else ""
    val l = s"$label\n$id\n$t" 
    PlanWriter.quote(l)
  }
  
  override def toString = s"op$id ${if(label.trim().nonEmpty) s"[label=$mkLabel]" else ""}"
}

case class Edge(from: String, to: String, var label: String = "") {
  override def toString = s"op$from -> op$to ${if(label.trim().nonEmpty) s"[label=$label]" else "" }"
}


  private def writeDotFile(file: Path, graph: String): Unit = {
    logger.debug(s"writing dot file to $file")
    if(Files.notExists(file.getParent)) {
      Files.createDirectories(file.getParent)
    }
    Files.write(file, List(graph).asJava, StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING)
  }
  
  
} 
Example 171
Source File: Main.scala    From sbt-coursier   with Apache License 2.0 5 votes vote down vote up
import java.io.File
import java.nio.file.Files

import scala.util.Try

object Main extends App {

  def classFound(clsName: String) = Try(
    Thread.currentThread()
      .getContextClassLoader()
      .loadClass(clsName)
  ).toOption.nonEmpty

  val shapelessFound = classFound("shapeless.HList")
  val argonautFound = classFound("argonaut.Json")
  val argonautShapelessFound = classFound("argonaut.derive.MkEncodeJson")

  assert(
    argonautShapelessFound,
    "Expected to find class from argonaut-shapeless"
  )
  assert(
    !shapelessFound,
    "Expected not to find classes from shapeless"
  )
  assert(
    !argonautFound,
    "Expected not to find classes from argonaut"
  )
} 
Example 172
Source File: Main.scala    From sbt-coursier   with Apache License 2.0 5 votes vote down vote up
import java.io.File
import java.nio.file.Files

import scala.util.Try

object Main extends App {

  def classFound(clsName: String) = Try(
    Thread.currentThread()
      .getContextClassLoader()
      .loadClass(clsName)
  ).toOption.nonEmpty

  val shapelessFound = classFound("shapeless.HList")
  val argonautFound = classFound("argonaut.Json")
  val argonautShapelessFound = classFound("argonaut.derive.MkEncodeJson")

  assert(
    argonautShapelessFound,
    "Expected to find class from argonaut-shapeless"
  )
  assert(
    !shapelessFound,
    "Expected not to find classes from shapeless"
  )
  assert(
    !argonautFound,
    "Expected not to find classes from argonaut"
  )
} 
Example 173
Source File: Main.scala    From sbt-coursier   with Apache License 2.0 5 votes vote down vote up
import java.io.File
import java.nio.file.Files

import scala.util.Try

object Main extends App {

  def classFound(clsName: String) = Try(
    Thread.currentThread()
      .getContextClassLoader()
      .loadClass(clsName)
  ).toOption.nonEmpty

  val shapelessFound = classFound("shapeless.HList")
  val argonautFound = classFound("argonaut.Json")
  val argonautShapelessFound = classFound("argonaut.derive.MkEncodeJson")

  assert(
    argonautShapelessFound,
    "Expected to find class from argonaut-shapeless"
  )
  assert(
    !shapelessFound,
    "Expected not to find classes from shapeless"
  )
  assert(
    !argonautFound,
    "Expected not to find classes from argonaut"
  )
} 
Example 174
Source File: Main.scala    From sbt-coursier   with Apache License 2.0 5 votes vote down vote up
import java.io.File
import java.nio.file.Files

object Main extends App {
  val p = new java.util.Properties
  p.load(
    Thread.currentThread()
      .getContextClassLoader
      .getResource("common-version-info.properties")
      .openStream()
  )

  val hadoopVersion = p.getProperty("version")
  Console.err.println(s"Found hadoop version $hadoopVersion")

  assert(hadoopVersion.startsWith("3.1."))

  Files.write(new File("output").toPath, "OK".getBytes("UTF-8"))
} 
Example 175
Source File: Main.scala    From sbt-coursier   with Apache License 2.0 5 votes vote down vote up
import java.io.File
import java.nio.file.Files

import scala.collection.JavaConverters._

object Main extends App {

  val cp = new collection.mutable.ArrayBuffer[File]

  def buildCp(loader: ClassLoader): Unit =
    if (loader != null) {
      loader match {
        case u: java.net.URLClassLoader =>
          cp ++= u.getURLs
            .map(_.toURI)
            .map(new File(_))
        case _ =>
      }

      buildCp(loader.getParent)
    }

  buildCp(Thread.currentThread().getContextClassLoader)

  System.err.println("Classpath:")
  for (f <- cp)
    System.err.println(s"  $f")
  System.err.println()

  val sbtBase = new File(sys.props.getOrElse(
    "sbt.global.base",
    sys.props("user.home") + "/.sbt"
  ))
  val prefixes = Seq(new File(sbtBase, "boot").getAbsolutePath) ++
    Seq("coursier.sbt-launcher.dirs.scala-jars", "coursier.sbt-launcher.dirs.base", "user.dir")
      .flatMap(sys.props.get(_))
      .map(new File(_).getAbsolutePath)
  val home = new File(sys.props("user.home")).getAbsolutePath

  def notFromCoursierCache(name: String): Unit = {
    val jars = cp.filter(_.getName.startsWith(name)).distinct
    assert(jars.nonEmpty, s"Found no JARs for $name")

    for (jar <- jars)
      assert(
        !jar.getAbsolutePath.startsWith(home) ||
          !jar.getAbsolutePath.toLowerCase(java.util.Locale.ROOT).contains("coursier") ||
          prefixes.exists(jar.getAbsolutePath.startsWith),
        s"JAR for $name ($jar) under $home and not under any of ${prefixes.mkString(", ")}"
      )
  }

  val props = Thread.currentThread()
    .getContextClassLoader
    .getResources("library.properties")
    .asScala
    .toVector
    .map(_.toString)
    .sorted

  notFromCoursierCache("scala-library")
  assert(props.lengthCompare(1) == 0, s"Found several library.properties files in classpath: $props")

  Files.write(new File("output").toPath, "OK".getBytes("UTF-8"))
} 
Example 176
Source File: ResourceUtils.scala    From sbt-lagom-descriptor-generator   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.spec

import java.io.{ File, InputStream }
import java.nio.file.{ Files, Paths, StandardOpenOption }

import scala.io.{ BufferedSource, Source }


  def writeFile(folder: File, relativeFile: File, fileContents: String): File = {
    val path = Paths.get(folder.getAbsolutePath, relativeFile.getPath)
    // `path` is tha absolute route to the file so only path.parent must be created as directories
    Files.createDirectories(path.getParent)
    Files.write(
      path,
      fileContents.getBytes,
      StandardOpenOption.CREATE,
      StandardOpenOption.SYNC,
      StandardOpenOption.TRUNCATE_EXISTING
    ).toFile

  }
} 
Example 177
Source File: PerTestSparkSession.scala    From Spark-RSVD   with Apache License 2.0 5 votes vote down vote up
package com.criteo.rsvd

import java.io.File
import java.nio.file.{Files, Path}
import java.util.concurrent.locks.ReentrantLock

import org.apache.commons.io.FileUtils
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{SQLContext, SparkSession}
import org.scalatest.{BeforeAndAfterEach, Suite}

import scala.reflect.ClassTag
import scala.util.control.NonFatal

object LocalSparkSession {
  private[this] val lock = new ReentrantLock()

  def acquire(): Unit = lock.lock()

  def release(): Unit = lock.unlock()

  def builder: SparkSession.Builder = {
    SparkSession
      .builder()
      .master("local[*]")
      .appName("test")
      .config("spark.ui.enabled", false)
  }
}


  def sparkConf: Map[String, Any] = Map()

  def toRDD[T: ClassTag](input: Seq[T]): RDD[T] = sc.parallelize(input)

  def toArray[T](input: RDD[T]): Array[T] = input.collect()

  protected def closeSession() = {
    currentSession.foreach(_.stop())
    currentSession = None
    try {
      checkpointDir.foreach(path =>
        FileUtils.deleteDirectory(new File(path.toString)))
    } catch {
      case NonFatal(_) =>
    }
    checkpointDir = None
    LocalSparkSession.release()
  }

  private def getOrCreateSession = synchronized {
    if (currentSession.isEmpty) {
      val builder = LocalSparkSession.builder
      for ((key, value) <- sparkConf) {
        builder.config(key, value.toString)
      }
      currentSession = Some(builder.getOrCreate())
      checkpointDir =
        Some(Files.createTempDirectory("spark-unit-test-checkpoint-"))
      currentSession.get.sparkContext
        .setCheckpointDir(checkpointDir.get.toString)
        currentSession.get.sparkContext.setLogLevel("WARN")
    }
    currentSession.get
  }

  override def beforeEach(): Unit = {
    LocalSparkSession.acquire()
    super.beforeEach()
  }

  override def afterEach(): Unit = {
    try {
      super.afterEach()
    } finally {
      closeSession()
    }
  }
} 
Example 178
Source File: SassCompiler.scala    From sbt-sassify   with Apache License 2.0 5 votes vote down vote up
package org.irundaia.sass

import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Path, Paths}

import com.sun.jna.Native
import org.irundaia.sass.jna.SassLibrary
import org.irundaia.util.extensions._

object SassCompiler {
  private val library = "sass"
  val libraryInstance: SassLibrary = Native.loadLibrary(library, classOf[SassLibrary])

  private val charset = StandardCharsets.UTF_8

  def compile(sass: Path, sourceDir: Path, targetDir: Path, compilerSettings: CompilerSettings): Either[CompilationFailure, CompilationSuccess] = {
    def sourceWithExtn(extension: String): Path =
      targetDir.resolve(sourceDir.relativize(sass)).resolveSibling(sass.withExtension(extension).getFileName)

    // Determine target files
    val css = sourceWithExtn(compilerSettings.extension)
    val sourceMap = sourceWithExtn(s"${compilerSettings.extension}.map")

    // Make sure that the target directory is created
    Files.createDirectories(css.getParent)

    // Compile the sources, and get the dependencies
    val eitherErrorOrOutput = doCompile(sass, compilerSettings)

    // Output the CSS and source map files
    eitherErrorOrOutput.right.foreach(output => {
        outputCss(output, css)
        outputSourceMap(sourceMap, output, compilerSettings)
      }
    )

    // Return either the compilation error or the files read/written by the compiler
    eitherErrorOrOutput
        .fold(
          error => Left(CompilationFailure(error)),
          output => Right(determineCompilationDependencies(output, css, sourceMap)))
  }

  private def doCompile(source: Path, compilerSettings: CompilerSettings): Either[SassError, SassOutput] = {
    val context = Context(source)

    compilerSettings.applySettings(source, context.options)
    context.options.inputPath = source
    context.options.outputPath = source.withExtension(compilerSettings.extension)
    context.options.sourceMapPath = source.withExtension(s"${compilerSettings.extension}.map")

    libraryInstance.sass_compile_file_context(context.nativeContext)

    val result = Output(context) match {
      case e: SassError => Left(e)
      case o: SassOutput => Right(o)
    }

    context.cleanup()

    result
  }

  private def outputCss(compilationResult: SassOutput, css: Path) = Files.write(css, compilationResult.css.getBytes(charset))

  private def outputSourceMap(sourceMap: Path, output: SassOutput, compilerSettings: CompilerSettings) =
    Option(output.sourceMap) match {
      case Some(sourceMapContent) if compilerSettings.generateSourceMaps =>
        Files.write(sourceMap, sourceMapContent.getBytes(charset))
      case _ => // Do not output any source map
    }

  private def determineCompilationDependencies(compilationResult: SassOutput, css: Path, sourceMap: Path): CompilationSuccess = {
    val filesWritten = if (Files.exists(sourceMap))
      Set(css, sourceMap)
    else
      Set(css)

    val filesRead = compilationResult.readFiles.map(Paths.get(_)).toSet

    CompilationSuccess(filesRead, filesWritten)
  }
} 
Example 179
Source File: SassCompilerTest.scala    From sbt-sassify   with Apache License 2.0 5 votes vote down vote up
package org.irundaia.sass

import java.nio.file.{Paths, Files}

import org.scalatest.{FunSpec, MustMatchers}

import scala.io.Source

class SassCompilerTest extends FunSpec with MustMatchers {
  val testDir = Files.createTempDirectory("sbt-sassify")
  val compilerSettings = CompilerSettings(Minified, true, true, Auto, Seq(), "", 10, "css")

  describe("The SassCompiler") {
    describe("using well formed scss input") {
      describe("without includes") {
        val input = Paths.get(getClass.getResource("/org/irundaia/sass/well-formed.scss").toURI)
        val compilationResults = SassCompiler.compile(input, input.getParent, testDir, compilerSettings)

        it("should compile") {
          compilationResults.isRight mustBe true
        }

        it("should contain the proper contents") {
          val cssMin = Source.fromFile(compilationResults.right.get.filesWritten.filter(_.toString.endsWith("css")).head.toFile).mkString
          val testMinCss = cssMin.replaceAll("\\/\\*.*?\\*\\/", "").replaceAll("\\s+", "")

          testMinCss must include(".test{font-size:10px")
          testMinCss must include(".test.hidden{display:none")
        }

        it("should have read one file") {
          compilationResults.right.get.filesRead.size must be(1)
        }

        it("should have read the correct file") {
          compilationResults.right.get.filesRead.head.toString must endWith("well-formed.scss")
        }
      }

      describe("with includes") {
        val input = Paths.get(getClass.getResource("/org/irundaia/sass/well-formed-using-import.scss").toURI)
        val compilationResults = SassCompiler.compile(input, input.getParent, testDir, compilerSettings)

        it("should compile") {
          compilationResults.isRight mustBe true
        }

        it("should include the contents of both the included and the including file") {
          val cssMin = Source.fromFile(compilationResults.right.get.filesWritten.filter(_.toString.endsWith("css")).head.toFile).mkString
          val testMinCss = cssMin.replaceAll("\\/\\*.*?\\*\\/", "").replaceAll("\\s+", "")

          testMinCss must include(".test-import{font-weight:bold")
          testMinCss must include(".test{font-size:10px")
          testMinCss must include(".test.hidden{display:none")
        }

        it("should have read two files") {
          compilationResults.right.get.filesRead.size must be(2)
        }

        it("should have read the included file") {
          compilationResults.right.get.filesRead.filter(_.endsWith("_well-formed-import.scss")) must not be None
        }
      }
    }

    describe("using broken scss input") {
      val input = Paths.get(getClass.getResource("/org/irundaia/sass/broken-input.scss").toURI)
      val compilationResult = SassCompiler.compile(input, input.getParent, testDir, compilerSettings)

      describe("should fail compilation") {
        compilationResult.isLeft mustBe true
      }

      describe("should throw an exception") {
        it("reporting Invalid CSS") {
          compilationResult match {
            case Left(exception) => exception.getMessage must include("Invalid CSS after ")
            case _ => fail
          }
        }

        it("reporting an error on line 2 column 15") {
          compilationResult match {
            case Left(exception: LineBasedCompilationFailure) =>
              exception.line mustBe 2
              exception.column mustBe 15
            case _ => fail
          }
        }
      }
    }
  }
} 
Example 180
Source File: AddJar.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.magic.builtin

import java.io.{File, PrintStream}
import java.net.{URL, URI}
import java.nio.file.{Files, Paths}
import java.util.zip.ZipFile
import org.apache.toree.magic._
import org.apache.toree.magic.builtin.AddJar._
import org.apache.toree.magic.dependencies._
import org.apache.toree.utils.{ArgumentParsingSupport, DownloadSupport, LogLike, FileUtils}
import com.typesafe.config.Config
import org.apache.hadoop.fs.Path
import org.apache.toree.plugins.annotations.Event

object AddJar {
  val HADOOP_FS_SCHEMES = Set("hdfs", "s3", "s3n", "file")

  private var jarDir:Option[String] = None

  def getJarDir(config: Config): String = {
    jarDir.getOrElse({
      jarDir = Some(
        if(config.hasPath("jar_dir") && Files.exists(Paths.get(config.getString("jar_dir")))) {
          config.getString("jar_dir")
        } else {
          FileUtils.createManagedTempDirectory("toree_add_jars").getAbsolutePath
        }
      )
      jarDir.get
    })
  }
}

class AddJar
  extends LineMagic with IncludeInterpreter
  with IncludeOutputStream with DownloadSupport with ArgumentParsingSupport
  with IncludeKernel with IncludePluginManager with IncludeConfig with LogLike
{
  // Option to mark re-downloading of jars
  private val _force =
    parser.accepts("f", "forces re-download of specified jar")

  // Option to mark re-downloading of jars
  private val _magic =
    parser.accepts("magic", "loads jar as a magic extension")

  // Lazy because the outputStream is not provided at construction
  private def printStream = new PrintStream(outputStream)

  )
      } else {
        downloadFile(
          new URL(jarRemoteLocation),
          new File(downloadLocation).toURI.toURL
        )
      }

      // Report download finished
      printStream.println(s"Finished download of $jarName")
    } else {
      printStream.println(s"Using cached version of $jarName")
    }

    // validate jar file
    if(! isValidJar(fileDownloadLocation)) {
      throw new IllegalArgumentException(s"Jar '$jarName' is not valid.")
    }

    if (_magic) {
      val plugins = pluginManager.loadPlugins(fileDownloadLocation)
      pluginManager.initializePlugins(plugins)
    } else {
      kernel.addJars(fileDownloadLocation.toURI)
    }
  }
} 
Example 181
Source File: FileUtils.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.utils

import java.io.File
import java.nio.file.Files


object FileUtils {

  val parentTempDir: String = "toree-tmp-dir"

  private def deleteDirRecur(file: File): Unit = {
    // delete directory recursively
    if(file.isDirectory){
      file.listFiles.foreach(deleteDirRecur)
    }
    if(file.exists){
      file.delete
    }
  }

  private def createParentTemp() = {
    val dir = Files.createTempDirectory(parentTempDir).toFile
    sys.addShutdownHook{
      // addShutdownHook will ensure when JVM exits, the temporary directory
      // assoicated with a given kernel would be deleted
      deleteDirRecur(dir)
    }
    dir
  }

  private lazy val parent: File = createParentTemp()

  
  def createManagedTempDirectory(name: String): File = {
    val dir = new File(parent, name)
    dir.mkdir()
    dir
  }
} 
Example 182
Source File: CoursierDependencyDownloaderSpec.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.dependencies

import java.net.URL
import java.nio.file.Files

import org.scalatest.{FunSpec, Matchers, OneInstancePerTest}

class CoursierDependencyDownloaderSpec extends FunSpec with Matchers
  with OneInstancePerTest
{
  private val coursierDependencyDownloader = new CoursierDependencyDownloader

  describe("CoursierDependencyDownloader") {
    describe("#addMavenRepository") {
      it("should add to the list of repositories") {
        val repo = new URL("http://some-repo.com")

        coursierDependencyDownloader.addMavenRepository(repo, None)

        val repos = coursierDependencyDownloader.getRepositories

        repos should contain (repo.toURI)
      }
    }

    describe("#removeMavenRepository") {
      it("should remove from the list of repositories") {
        val repo = new URL("http://some-repo.com")

        coursierDependencyDownloader.addMavenRepository(repo, None)
        coursierDependencyDownloader.removeMavenRepository(repo)

        val repos = coursierDependencyDownloader.getRepositories

        repos should not contain (repo.toURI)
      }
    }

    describe("#setDownloadDirectory") {
      it("should set the new download directory if valid") {
        val validDir = Files.createTempDirectory("tmpdir").toFile
        validDir.deleteOnExit()

        val result = coursierDependencyDownloader.setDownloadDirectory(validDir)
        result should be (true)

        val dir = coursierDependencyDownloader.getDownloadDirectory
        dir should be (validDir.getAbsolutePath)
      }

      it("should not change the directory if given a file") {
        val invalidDir = Files.createTempFile("tmp", "file").toFile
        invalidDir.deleteOnExit()

        val result = coursierDependencyDownloader.setDownloadDirectory(invalidDir)
        result should be (false)

        val dir = coursierDependencyDownloader.getDownloadDirectory
        dir should not be (invalidDir.getAbsolutePath)
      }

      it("should support creating missing directories") {
        val baseDir = Files.createTempDirectory("tmpdir").toFile
        val validDir = baseDir.toPath.resolve("otherdir").toFile
        validDir.deleteOnExit()
        baseDir.deleteOnExit()

        val result = coursierDependencyDownloader.setDownloadDirectory(validDir)
        result should be (true)

        val dir = coursierDependencyDownloader.getDownloadDirectory
        dir should be (validDir.getAbsolutePath)
      }
    }

    describe("#getRepositories") {
      it("should have the default repositories") {
        val expected = Seq(DependencyDownloader.DefaultMavenRepository.toURI)

        val actual = coursierDependencyDownloader.getRepositories

        actual should be (expected)
      }
    }

    describe("#getDownloadDirectory") {
      it("should have the default download directory") {
        val expected = DependencyDownloader.DefaultDownloadDirectory.getAbsolutePath

        val actual = coursierDependencyDownloader.getDownloadDirectory

        actual should be (expected)
      }
    }
  }
} 
Example 183
Source File: SwaggerSpecRunner.scala    From play-swagger   with Apache License 2.0 5 votes vote down vote up
package com.iheart.playSwagger

import java.nio.file.{ Files, Paths, StandardOpenOption }

import play.api.libs.json.{ JsValue, Json }

import scala.util.{ Failure, Success, Try }

object SwaggerSpecRunner extends App {
  implicit def cl: ClassLoader = getClass.getClassLoader

  val targetFile :: routesFile :: domainNameSpaceArgs :: outputTransformersArgs :: swaggerV3String :: apiVersion :: swaggerPrettyJson :: namingStrategy :: Nil = args.toList
  private def fileArg = Paths.get(targetFile)
  private def swaggerJson = {
    val swaggerV3 = java.lang.Boolean.parseBoolean(swaggerV3String)
    val domainModelQualifier = PrefixDomainModelQualifier(domainNameSpaceArgs.split(","): _*)
    val transformersStrs: Seq[String] = if (outputTransformersArgs.isEmpty) Seq() else outputTransformersArgs.split(",")
    val transformers = transformersStrs.map { clazz ⇒
      Try(cl.loadClass(clazz).asSubclass(classOf[OutputTransformer]).newInstance()) match {
        case Failure(ex: ClassCastException) ⇒
          throw new IllegalArgumentException("Transformer should be a subclass of com.iheart.playSwagger.OutputTransformer:" + clazz, ex)
        case Failure(ex) ⇒ throw new IllegalArgumentException("Could not create transformer", ex)
        case Success(el) ⇒ el
      }
    }

    val swaggerSpec: JsValue = SwaggerSpecGenerator(
      NamingStrategy.from(namingStrategy),
      domainModelQualifier,
      outputTransformers = transformers,
      swaggerV3 = swaggerV3,
      apiVersion = Some(apiVersion)).generate(routesFile).get

    if (swaggerPrettyJson.toBoolean) Json.prettyPrint(swaggerSpec)
    else swaggerSpec.toString
  }

  Files.write(fileArg, swaggerJson.getBytes, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE)
} 
Example 184
Source File: JobService.scala    From fusion-data   with Apache License 2.0 5 votes vote down vote up
package mass.job.service.job

import java.io.File
import java.nio.charset.StandardCharsets
import java.nio.file.{ Files, Path }

import akka.actor.typed.{ ActorRef, ActorSystem }
import akka.actor.typed.scaladsl.AskPattern._
import akka.http.scaladsl.server.directives.FileInfo
import akka.util.Timeout
import javax.inject.{ Inject, Singleton }
import mass.job.service.job.JobActor.CommandReply
import mass.message.job._

import scala.collection.immutable
import scala.concurrent.duration._
import scala.concurrent.{ ExecutionContext, Future }
import scala.reflect.ClassTag

@Singleton
class JobService @Inject() (implicit system: ActorSystem[_]) {
  implicit val timeout: Timeout = Timeout(10.seconds)
  val jobActor: ActorRef[JobActor.Command] = JobActor.init(system)

  def listOption(): Future[JobGetAllOptionResp] = askToJob[JobGetAllOptionResp](JobGetAllOptionReq())

  def uploadFiles(list: immutable.Seq[(FileInfo, File)])(implicit ec: ExecutionContext): Future[JobUploadFilesResp] = {
    askToJob[JobUploadFilesResp](JobUploadFilesReq(list)).andThen {
      case _ => list.foreach { case (_, file) => Files.deleteIfExists(file.toPath) }
    }
  }

  def uploadJobOnZip(fileInfo: FileInfo, file: Path)(implicit ec: ExecutionContext): Future[JobUploadJobResp] = {
    val req = JobUploadJobReq(
      file,
      fileInfo.fileName,
      fileInfo.contentType.charsetOption.map(_.nioCharset()).getOrElse(StandardCharsets.UTF_8))
    askToJob[JobUploadJobResp](req).andThen { case _ => Files.deleteIfExists(file) }
  }

  def updateTrigger(req: JobUpdateReq): Future[JobSchedulerResp] = askToJob[JobSchedulerResp](req)

  def page(req: JobPageReq): Future[JobPageResp] = askToJob[JobPageResp](req)

  def findItemByKey(key: String): Future[JobSchedulerResp] = askToJob[JobSchedulerResp](JobFindReq(key = key))

  def createJob(req: JobCreateReq): Future[JobCreateResp] = askToJob[JobCreateResp](req)

  def updateJob(req: JobUpdateReq): Future[JobSchedulerResp] = askToJob[JobSchedulerResp](req)

  @inline private def askToJob[RESP](req: JobMessage)(implicit tag: ClassTag[RESP]): Future[RESP] =
    jobActor.ask[JobResponse](replyTo => CommandReply(req, replyTo)).mapTo[RESP]
} 
Example 185
Source File: MassCore.scala    From fusion-data   with Apache License 2.0 5 votes vote down vote up
package mass.extension

import java.nio.file.{ Files, Path, Paths }

import akka.actor.ExtendedActorSystem
import akka.serialization.jackson.JacksonObjectMapperProvider
import com.fasterxml.jackson.databind.ObjectMapper
import com.typesafe.scalalogging.StrictLogging
import fusion.common.extension.{ FusionExtension, FusionExtensionId }
import fusion.core.extension.FusionCore
import mass.MassSettings
import mass.core.Constants


final class MassCore private (override val classicSystem: ExtendedActorSystem)
    extends FusionExtension
    with StrictLogging {
  FusionCore(classicSystem)
  val settings: MassSettings = MassSettings(classicSystem.settings.config)
  val jsonMapper: ObjectMapper = JacksonObjectMapperProvider(classicSystem).getOrCreate(Constants.JACKSON_JSON, None)
  val cborMapper: ObjectMapper = JacksonObjectMapperProvider(classicSystem).getOrCreate(Constants.JACKSON_CBOR, None)
  val tempDirectory: Path = {
    val _tempDirectory = Paths.get(
      configuration.getOrElse[String](s"${Constants.BASE_CONF}.core.temp-dir", System.getProperty("java.io.tmpdir")))
    if (!Files.isDirectory(_tempDirectory)) {
      Files.createDirectories(_tempDirectory)
    }
    _tempDirectory
  }

  logger.info(configuration.getConfig(Constants.BASE_CONF).toString)

  def name: String = configuration.getString(s"${Constants.BASE_CONF}.name")

  override def toString = s"MassCore($classicSystem)"
}

object MassCore extends FusionExtensionId[MassCore] {
  override def createExtension(system: ExtendedActorSystem): MassCore = new MassCore(system)
} 
Example 186
Source File: JobUtils.scala    From fusion-data   with Apache License 2.0 5 votes vote down vote up
package mass.job.util

import java.io.File
import java.nio.charset.Charset
import java.nio.file.{ Files, Path, StandardCopyOption }
import java.util.zip.ZipFile

import com.typesafe.scalalogging.StrictLogging
import helloscala.common.Configuration
import helloscala.common.util.{ DigestUtils, Utils }
import mass.common.util.FileUtils
import mass.core.job.JobConstants
import mass.job.JobSettings
import mass.message.job._
import mass.model.job.{ JobItem, JobTrigger }

import scala.concurrent.{ ExecutionContext, Future }


object JobUtils extends StrictLogging {
  case class JobZipInternal private (configs: Vector[JobCreateReq], entries: Vector[Path])

  def uploadJob(jobSettings: JobSettings, req: JobUploadJobReq)(implicit ec: ExecutionContext): Future[JobZip] =
    Future {
      val sha256 = DigestUtils.sha256HexFromPath(req.file)
      val dest = jobSettings.jobSavedDir.resolve(sha256.take(2)).resolve(sha256)

      val jobZipInternal = parseJobZip(req.file, req.charset, dest.resolve(JobConstants.DIST)) match {
        case Right(v) => v
        case Left(e)  => throw e
      }

      val zipPath = dest.resolve(req.fileName)
      Files.move(req.file, zipPath, StandardCopyOption.REPLACE_EXISTING)
      JobZip(zipPath, jobZipInternal.configs, jobZipInternal.entries)
    }

  @inline def parseJobZip(file: Path, charset: Charset, dest: Path): Either[Throwable, JobZipInternal] =
    parseJobZip(file.toFile, charset, dest)

  def parseJobZip(file: File, charset: Charset, dest: Path): Either[Throwable, JobZipInternal] = Utils.either {
    import scala.jdk.CollectionConverters._
    import scala.language.existentials

    val zip = new ZipFile(file, charset)
    try {
      val (confEntries, fileEntries) = zip
        .entries()
        .asScala
        .filterNot(entry => entry.isDirectory)
        .span(entry => entry.getName.endsWith(JobConstants.ENDS_SUFFIX) && !entry.isDirectory)
      val configs =
        confEntries.map(confEntry =>
          parseJobConf(FileUtils.getString(zip.getInputStream(confEntry), charset, "\n")) match {
            case Right(config) => config
            case Left(e)       => throw e
          })

      val buf = Array.ofDim[Byte](1024)
      val entryPaths = fileEntries.map { entry =>
        val entryName = entry.getName
        val savePath = dest.resolve(entryName)
        if (!Files.isDirectory(savePath.getParent)) {
          Files.createDirectories(savePath.getParent)
        }
        FileUtils.write(zip.getInputStream(entry), Files.newOutputStream(savePath), buf) // zip entry存磁盘
        savePath
      }

      JobZipInternal(configs.toVector, entryPaths.toVector)
    } finally {
      if (zip ne null) zip.close()
    }
  }

  def parseJobConf(content: String): Either[Throwable, JobCreateReq] = Utils.either {
    val conf = Configuration.parseString(content)
    val jobItem = JobItem(conf.getConfiguration("item"))
    val jobTrigger = JobTrigger(conf.getConfiguration("trigger"))
    JobCreateReq(conf.get[Option[String]]("key"), jobItem, jobTrigger)
  }
}

case class JobZip(zipPath: Path, configs: Vector[JobCreateReq], entries: Vector[Path]) 
Example 187
Source File: JobUtilsTest.scala    From fusion-data   with Apache License 2.0 5 votes vote down vote up
package mass.job.util

import java.nio.charset.StandardCharsets
import java.nio.file.{ Files, Paths }

import fusion.inject.guice.testkit.GuiceApplicationTestkit
import fusion.json.jackson.ScalaObjectMapper
import mass.MassSettings
import mass.job.JobSettings
import mass.message.job.JobUploadJobReq
import org.scalatest.wordspec.AnyWordSpecLike

class JobUtilsTest extends GuiceApplicationTestkit with AnyWordSpecLike {
  private implicit val ec = typedSystem.executionContext
  private val objectMapper = injectInstance[ScalaObjectMapper]
  private val jobSettings = JobSettings(MassSettings(configuration))

  "JobService" should {
    "uploadJob" in {
      val fileName = "hello.zip"
      val originalFile =
        Paths.get(sys.props.get("user.dir").get + "/mass-job/src/universal/examples/sample-job/" + fileName)
      val file2 = originalFile.getParent.resolve("hello2.zip")
      Files.copy(originalFile, file2)
      println("file json string: " + objectMapper.stringify(file2))
      val result =
        JobUtils.uploadJob(jobSettings, JobUploadJobReq(file2, fileName, StandardCharsets.UTF_8)).futureValue
      println(result)
    }
  }

  "conf" should {
    val str = """#key=sample
                |item {
                |  name = "Hello world!"
                |  program = "java" # Job程序类型,当前支持Java,sh(bash),python
                |  program-main = "hello.Hello" # 可执行Java主类名字,[需要将程序打成Jar包。job-program为java时有效
                |}
                |trigger {
                |  trigger-type = "cron" # Job类型,当前支持:simple、cron、event三种
                |  start-time = "2020-03-03 10:10:10" # Job开始执行时间(可选)
                |  end-time = "2020-03-13 10:10:10" # Job结束时间(可选)
                |  repeat = 4 # Job重复次数,job-type为simple时有效
                |  duration = 120.seconds # 两次Job之间的时间间隔,job-type为simple时有效
                |  cron-express = "1 0 0 * * ?" # 基于CRON的日历调度配置,job-type为cron时有效
                |}""".stripMargin
    "parse" in {
      val either = JobUtils.parseJobConf(str)
      println(either)
      val req = either.toOption.value
      req.item.name shouldBe Some("Hello world!")
    }
  }
} 
Example 188
Source File: TarFlowSpec.scala    From nexus-kg   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.archives

import java.nio.file.Files
import java.time.{Clock, Instant, ZoneId}

import akka.actor.ActorSystem
import akka.stream.scaladsl.FileIO
import akka.testkit.TestKit
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.storage.digestSink
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

import scala.concurrent.duration._

class TarFlowSpec
    extends TestKit(ActorSystem("TarFlowSpec"))
    with AnyWordSpecLike
    with Matchers
    with TestHelper
    with ScalaFutures {

  private implicit val ec    = system.dispatcher
  private implicit val clock = Clock.fixed(Instant.EPOCH, ZoneId.systemDefault())

  override implicit def patienceConfig: PatienceConfig = PatienceConfig(55.second, 150.milliseconds)

  "A TarFlow" should {

    "tar a bunch of sources" in {
      val digest =
        "3fef41c5afe7a7ee11ee9d556a564fb57784cc5247b24c6ca70783f396fa158a1c7952504d3e1aa441de20cf065d740eec454c6ffb7fbc4b6351b950ee51c886"
      val elems = 500
      val contents =
        List.tabulate(2) { i =>
          val content = (i until (i + elems)).toList.mkString(",") + "\n"
          ArchiveSource(content.length.toLong, s"some/path$i/$i.txt", produce(content))
        }
      val path = Files.createTempFile("test", ".tar")
      TarFlow.write(contents).runWith(FileIO.toPath(path)).futureValue
      FileIO.fromPath(path).runWith(digestSink("SHA-512")).futureValue.value shouldEqual digest
      Files.delete(path)
    }
  }
} 
Example 189
Source File: BaseMetabrowseCliSuite.scala    From metabrowse   with Apache License 2.0 5 votes vote down vote up
package metabrowse.tests

import caseapp.RemainingArgs
import java.nio.file.Files
import metabrowse.cli.MetabrowseCli
import metabrowse.cli.MetabrowseOptions
import org.scalatest.BeforeAndAfterAll
import org.scalatest.FunSuite
import scala.meta.io.AbsolutePath
import scala.meta.testkit.DiffAssertions
import metabrowse.{schema => d}
import metabrowse.MetabrowseEnrichments._
import GeneratedSiteEnrichments._

abstract class BaseMetabrowseCliSuite
    extends FunSuite
    with BeforeAndAfterAll
    with DiffAssertions {
  var out: AbsolutePath = _
  def options = MetabrowseOptions(
    out.toString(),
    cleanTargetFirst = true,
    nonInteractive = true
  )
  def files: Seq[String] =
    BuildInfo.exampleClassDirectory.map(_.getAbsolutePath).toSeq

  def runCli(): Unit = MetabrowseCli.run(options, RemainingArgs(files, Nil))

  override def beforeAll(): Unit = {
    out = AbsolutePath(Files.createTempDirectory("metabrowse"))
    out.toFile.deleteOnExit()
    runCli()
  }

  def checkSymbolIndex(id: String, expected: String) = {
    test(id) {
      val indexes = d.SymbolIndexes.parseFromCompressedPath(
        out.resolve("symbol").resolve(id.symbolIndexPath)
      )
      val index = indexes.indexes.find(_.symbol == id).get
      // Sort ranges to ensure we assert against deterministic input.
      val indexNormalized = index.copy(
        references = index.references
          .mapValues { ranges =>
            ranges.copy(ranges = ranges.ranges.sortBy(_.startLine))
          }
          .iterator
          .toMap
      )
      val obtained = indexNormalized.toProtoString
      assertNoDiffOrPrintExpected(obtained, expected)
    }
  }

} 
Example 190
Source File: GeneratedSiteEnrichments.scala    From metabrowse   with Apache License 2.0 5 votes vote down vote up
package metabrowse.tests
import java.nio.file.Files
import java.util.zip.GZIPInputStream
import scala.meta.io.AbsolutePath
import scalapb.GeneratedMessage
import scalapb.GeneratedMessageCompanion
import scalapb.Message

object GeneratedSiteEnrichments {
  implicit class XtensionGeneratedMessageCompanion[
      A <: GeneratedMessage with Message[A]
  ](companion: GeneratedMessageCompanion[A]) {
    def parseFromCompressedPath(path: AbsolutePath): A = {
      val in = Files.newInputStream(path.toNIO)
      val gin = new GZIPInputStream(in)
      try companion.parseFrom(gin)
      finally {
        gin.close()
        in.close()
      }
    }
  }

} 
Example 191
Source File: Sourcepath.scala    From metabrowse   with Apache License 2.0 5 votes vote down vote up
package metabrowse.server

import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.Paths


  def jdkSources(): Option[Path] = {
    for {
      javaHome <- sys.props.get("java.home")
      srcZip = Paths.get(javaHome).getParent.resolve("src.zip")
      if Files.isRegularFile(srcZip)
    } yield srcZip
  }

  private[metabrowse] def coursierFetchCompilerPlugin(
      artifact: String
  ): Path = {
    coursierFetch(List("--intransitive", artifact)).headOption.getOrElse {
      sys.error(artifact)
    }
  }
  private[metabrowse] def coursierFetch(extra: List[String]): List[Path] = {
    sys.process
      .Process(List("coursier", "fetch") ++ extra)
      .!!
      .trim
      .linesIterator
      .map(jar => Paths.get(jar))
      .toList
  }
} 
Example 192
Source File: VerifierLoggerBenchmark.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.transaction.smart

import java.io.BufferedWriter
import java.nio.file.{Files, Path, Paths}
import java.util.concurrent.TimeUnit

import cats.Id
import com.wavesplatform.account.KeyPair
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.common.utils._
import com.wavesplatform.lang.v1.compiler.Terms
import com.wavesplatform.lang.v1.compiler.Terms.{CONST_BOOLEAN, EVALUATED}
import com.wavesplatform.lang.v1.evaluator.Log
import com.wavesplatform.lang.v1.evaluator.ctx.impl.waves.Bindings
import com.wavesplatform.state.BinaryDataEntry
import com.wavesplatform.transaction.DataTransaction
import com.wavesplatform.transaction.smart.VerifierLoggerBenchmark.BigLog
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole

@OutputTimeUnit(TimeUnit.MILLISECONDS)
@BenchmarkMode(Array(Mode.AverageTime))
@Threads(1)
@Fork(1)
@Warmup(iterations = 10)
@Measurement(iterations = 10)
class VerifierLoggerBenchmark {

  @Benchmark
  def verifierLogged(bh: Blackhole, log: BigLog): Unit = {
    val logs = Verifier.buildLogs("id", log.value)
    bh.consume(log.writer.write(logs))
  }
}

object VerifierLoggerBenchmark {

  @State(Scope.Benchmark)
  class BigLog {

    val resultFile: Path       = Paths.get("log.txt")
    val writer: BufferedWriter = Files.newBufferedWriter(resultFile)

    private val dataTx: DataTransaction = DataTransaction
      .selfSigned(1.toByte, KeyPair(Array[Byte]()), (1 to 4).map(i => BinaryDataEntry(s"data$i", ByteStr(Array.fill(1024 * 30)(1)))).toList, 100000000, 0)
      .explicitGet()

    private val dataTxObj: Terms.CaseObj = Bindings.transactionObject(
      RealTransactionWrapper(dataTx, ???, ???, ???).explicitGet(),
      proofsEnabled = true
    )

    val value: (Log[Id], Either[String, EVALUATED]) =
      (
        List.fill(500)("txVal" -> Right(dataTxObj)),
        Right(CONST_BOOLEAN(true))
      )

    @TearDown
    def deleteFile(): Unit = {
      Files.delete(resultFile)
      writer.close()
    }
  }
} 
Example 193
Source File: DocExportV3.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.utils.doc

import java.io.FileWriter
import java.nio.file.{Files, Paths}

import com.github.mustachejava.DefaultMustacheFactory
import com.wavesplatform.DocSource
import com.wavesplatform.common.utils.EitherExt2
import com.wavesplatform.lang.directives.DirectiveSet
import com.wavesplatform.lang.directives.values.{Account, Expression, V3}

import scala.jdk.CollectionConverters._

object DocExportV3 {
  def main(args: Array[String]): Unit = {
      val funcV3Template = if (args.length == 1) args(0) else "lang/func-doc.template.md"
      val path = Paths.get("target/funcs")
      Files.createDirectories(path)

      val ds = DirectiveSet(V3, Account, Expression).explicitGet()
      RideFullContext.build(ds)
        .functions
        .map(
          f => {
            val argTypes = f.signature.args.map(_._2.toString).toList
            val docKey = (f.name, argTypes, V3.value.asInstanceOf[Int])
            val (doc, paramsDoc, category) = DocSource.categorizedfuncDataV3(docKey)
            val varDocs =
              (f.args, f.signature.args, paramsDoc)
                .zipped
                .toList
                .map { arg => VarDoc(arg._1, TypeDoc(arg._2._2), arg._3.replace("\n", "<br>")) }
                .asJava

            val cost = f.costByLibVersion(V3).toString
            val funcDoc = FuncDoc(f.name, TypeDoc(f.signature.result), doc.replace("\n", "<br>"), varDocs, cost)
            (funcDoc, category)
          }
        )
        .groupBy(_._2)
        .map { case (category, funcs) =>
          val indexedDocs = funcs
            .zipWithIndex
            .map { case ((func, _), index) => FuncDocV3(func, index + 1) }
            .toList
            .asJava
          val title = category.replace("-", " ").capitalize
          val writer = new FileWriter(path.resolve(category + ".md").toFile)
          val docs = CategorizedFuncsDoc(indexedDocs, title)
          (writer, docs)
        }
        .foreach { case (writer, docs) =>
          new DefaultMustacheFactory().compile(funcV3Template).execute(writer, docs)
          writer.close()
        }
    }
} 
Example 194
Source File: WalletSpecification.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.lagonaki.unit

import java.io.File
import java.nio.file.Files

import cats.syntax.option._
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.settings.WalletSettings
import com.wavesplatform.wallet.Wallet
import org.scalatest.{FunSuite, Matchers}

class WalletSpecification extends FunSuite with Matchers {

  private val walletSize = 10
  val w                  = Wallet(WalletSettings(None, "cookies".some, ByteStr.decodeBase58("FQgbSAm6swGbtqA3NE8PttijPhT4N3Ufh4bHFAkyVnQz").toOption))

  test("wallet - acc creation") {
    w.generateNewAccounts(walletSize)

    w.privateKeyAccounts.size shouldBe walletSize
    w.privateKeyAccounts.map(_.toAddress.toString) shouldBe Seq(
      "3MqMwwHW4v2nSEDHVWoh8RCQL8QrsWLkkeB",
      "3MuwVgJA8EXHukxo6rcakT5tD6FpvACtitG",
      "3MuAvUG4EAsG9RP9jaWjewCVmggaQD2t39B",
      "3MqoX4A3UGBYU7cX2JPs6BCzntNC8K8FBR4",
      "3N1Q9VVVQtY3GqhwHtJDEyHb3oWBcerZL8X",
      "3NARifVFHthMDnCwBacXijPB2szAgNTeBCz",
      "3N6dsnfD88j5yKgpnEavaaJDzAVSRBRVbMY",
      "3MufvXKZxLuNn5SHcEgGc2Vo7nLWnKVskfJ",
      "3Myt4tocZmj7o3d1gnuWRrnQWcoxvx5G7Ac",
      "3N3keodUiS8WLEw9W4BKDNxgNdUpwSnpb3K"
    )
  }

  test("wallet - acc deletion") {

    val head = w.privateKeyAccounts.head
    w.deleteAccount(head)
    assert(w.privateKeyAccounts.lengthCompare(walletSize - 1) == 0)

    w.deleteAccount(w.privateKeyAccounts.head)
    assert(w.privateKeyAccounts.lengthCompare(walletSize - 2) == 0)

    w.privateKeyAccounts.foreach(w.deleteAccount)

    assert(w.privateKeyAccounts.isEmpty)
  }

  test("reopening") {
    val walletFile = Some(createTestTemporaryFile("wallet", ".dat"))

    val w1 = Wallet(WalletSettings(walletFile, "cookies".some, ByteStr.decodeBase58("FQgbSAm6swGbtqA3NE8PttijPhT4N3Ufh4bHFAkyVnQz").toOption))
    w1.generateNewAccounts(10)
    val w1PrivateKeys = w1.privateKeyAccounts
    val w1nonce              = w1.nonce

    val w2 = Wallet(WalletSettings(walletFile, "cookies".some, None))
    w2.privateKeyAccounts.nonEmpty shouldBe true
    w2.privateKeyAccounts shouldEqual w1PrivateKeys
    w2.nonce shouldBe w1nonce
  }

  test("reopen with incorrect password") {
    val file = Some(createTestTemporaryFile("wallet", ".dat"))
    val w1   = Wallet(WalletSettings(file, "password".some, ByteStr.decodeBase58("FQgbSAm6swGbtqA3NE8PttijPhT4N3Ufh4bHFAkyVnQz").toOption))
    w1.generateNewAccounts(3)

    assertThrows[IllegalArgumentException] {
      Wallet(WalletSettings(file, "incorrect password".some, None))
    }
  }

  def createTestTemporaryFile(name: String, ext: String): File = {
    val file = Files.createTempFile(name, ext).toFile
    file.deleteOnExit()

    file
  }
} 
Example 195
Source File: TestHelpers.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform

import java.io.IOException
import java.nio.file.attribute.BasicFileAttributes
import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor}

import com.wavesplatform.account.Address
import com.wavesplatform.features.BlockchainFeatures
import com.wavesplatform.settings.{FunctionalitySettings, GenesisSettings, GenesisTransactionSettings, WavesSettings}

import scala.concurrent.duration._

object TestHelpers {
  def genesisSettings(balances: Map[Address, Long], blockTimestamp: Long = System.currentTimeMillis()): GenesisSettings = {
    val totalAmount = balances.values.sum
    val transactions = balances.map {
      case (account, amount) =>
        GenesisTransactionSettings(account.stringRepr, amount)
    }.toSeq

    GenesisSettings(blockTimestamp, blockTimestamp, totalAmount, None, transactions, 1000, 60.seconds)
  }

  def enableNG(settings: FunctionalitySettings): FunctionalitySettings =
    settings.copy(
      blockVersion3AfterHeight = 0,
      preActivatedFeatures = settings.preActivatedFeatures ++ Map(BlockchainFeatures.NG.id -> 0)
    )

  def enableNG(settings: WavesSettings): WavesSettings =
    settings.copy(
      blockchainSettings = settings.blockchainSettings.copy(functionalitySettings = enableNG(settings.blockchainSettings.functionalitySettings))
    )

  def deleteRecursively(path: Path): Unit = Files.walkFileTree(
    path,
    new SimpleFileVisitor[Path] {
      override def postVisitDirectory(dir: Path, exc: IOException): FileVisitResult = {
        Option(exc).fold {
          Files.delete(dir)
          FileVisitResult.CONTINUE
        }(throw _)
      }

      override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = {
        Files.delete(file)
        FileVisitResult.CONTINUE
      }
    }
  )
} 
Example 196
Source File: WithBlockchain.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.events

import java.nio.file.Files

import com.wavesplatform.database.TestStorageFactory
import com.wavesplatform.settings.{WavesSettings, loadConfig}
import com.wavesplatform.state.Blockchain
import com.wavesplatform.transaction.BlockchainUpdater
import com.wavesplatform.{NTPTime, TestHelpers, database}
import monix.reactive.Observer
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Suite}

trait WithBlockchain extends BeforeAndAfterEach with BeforeAndAfterAll with NTPTime { _: Suite =>
  protected def settings: WavesSettings = WavesSettings.fromRootConfig(loadConfig(None))

  private val path = Files.createTempDirectory("leveldb-test")
  private val db   = database.openDB(path.toAbsolutePath.toString)
  private val (bcu, _) = TestStorageFactory(
    settings,
    db,
    ntpTime,
    Observer.stopped,
    BlockchainUpdateTriggers.noop
  )

  protected def blockchain: Blockchain = bcu

  
  protected def initBlockchain(blockchainUpdater: Blockchain with BlockchainUpdater): Unit = ()

  override protected def beforeAll(): Unit = {
    initBlockchain(bcu)
    super.beforeAll()
  }

  override def afterAll(): Unit = {
    bcu.shutdown()
    db.close()
    TestHelpers.deleteRecursively(path)
    super.afterAll()
  }
} 
Example 197
Source File: WithDB.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform

import java.nio.file.Files

import com.wavesplatform.account.Address
import com.wavesplatform.database.LevelDBFactory
import com.wavesplatform.events.BlockchainUpdateTriggers
import com.wavesplatform.transaction.Asset
import monix.reactive.subjects.{PublishSubject, Subject}
import org.iq80.leveldb.{DB, Options}
import org.scalatest.{BeforeAndAfterEach, Suite}

trait WithDB extends BeforeAndAfterEach {
  this: Suite =>

  private val path                  = Files.createTempDirectory("lvl").toAbsolutePath
  private var currentDBInstance: DB = _

  protected val ignoreSpendableBalanceChanged: Subject[(Address, Asset), (Address, Asset)] = PublishSubject()

  protected val ignoreBlockchainUpdateTriggers: BlockchainUpdateTriggers = BlockchainUpdateTriggers.noop

  def db: DB = currentDBInstance

  override def beforeEach(): Unit = {
    currentDBInstance = LevelDBFactory.factory.open(path.toFile, new Options().createIfMissing(true))
    super.beforeEach()
  }

  override def afterEach(): Unit =
    try {
      super.afterEach()
      db.close()
    } finally {
      TestHelpers.deleteRecursively(path)
    }
} 
Example 198
Source File: MultipartFormDataWritable.scala    From api-first-hand   with MIT License 5 votes vote down vote up
package de.zalando.play.controllers

import java.nio.file.{ Files, Paths }

import akka.util.ByteString
import play.api.http.{ HeaderNames, Writeable }
import play.api.libs.Files.TemporaryFile
import play.api.mvc.MultipartFormData.FilePart
import play.api.mvc.{ Codec, MultipartFormData }


object MultipartFormDataWritable {

  val boundary = "--------ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890"

  def formatDataParts(data: Map[String, Seq[String]]): ByteString = {
    val dataParts = data.flatMap {
      case (key, values) =>
        values.map { value =>
          val name = s""""$key""""
          s"--$boundary\r\n${HeaderNames.CONTENT_DISPOSITION}: form-data; name=$name\r\n\r\n$value\r\n"
        }
    }.mkString("")
    val bytes: ByteString = Codec.utf_8.encode(dataParts)
    bytes
  }

  def filePartHeader(file: FilePart[TemporaryFile]): ByteString = {
    val name = s""""${file.key}""""
    val filename = s""""${file.filename}""""
    val contentType = file.contentType.map { ct =>
      s"${HeaderNames.CONTENT_TYPE}: $ct\r\n"
    }.getOrElse("")
    Codec.utf_8.encode(s"--$boundary\r\n${HeaderNames.CONTENT_DISPOSITION}: form-data; name=$name; filename=$filename\r\n$contentType\r\n")
  }

  val singleton = Writeable[MultipartFormData[TemporaryFile]](
    transform = { form: MultipartFormData[TemporaryFile] =>
    formatDataParts(form.dataParts) ++
      form.files.flatMap { file =>
        val fileBytes = Files.readAllBytes(Paths.get(file.ref.file.getAbsolutePath))
        filePartHeader(file) ++ fileBytes ++ Codec.utf_8.encode("\r\n")
      } ++
      Codec.utf_8.encode(s"--$boundary--")
  },
    contentType = Some(s"multipart/form-data; boundary=$boundary")
  )
} 
Example 199
Source File: FileDownloader.scala    From releaser   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc

import java.net.URL
import java.nio.file.{Files, Path}

import scala.util.{Failure, Success, Try}

class FileDownloader extends Logger {
  import resource._

  def url2File(url: String, targetFile: Path): Try[Path] = {
    if(targetFile.toFile.exists()){
      log.info(s"not downloading from $url as file already exists")
      Success(targetFile)
    } else {
      log.info(s"downloading $url to $targetFile")

      try {
        managed(new URL(url).openConnection().getInputStream).foreach { in =>
          Files.createDirectories(targetFile.getParent)
          Files.copy(in, targetFile)
        }

        Success(targetFile)
      } catch {
        case e: Exception => Failure(e)
      }
    }
  }
} 
Example 200
Source File: Repositories.scala    From releaser   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.releaser

import java.nio.file.{Files, Path}

import uk.gov.hmrc.releaser.bintray.{BintrayIvyPaths, BintrayMavenPaths, BintrayPaths}

trait RepoFlavour extends BintrayPaths {

//  def scalaVersion: String
  def releaseCandidateRepo: String
  def releaseRepo: String

  val artefactBuilder:(VersionMapping, Path) => TransformerProvider
}

trait IvyRepo extends RepoFlavour with BintrayIvyPaths {
//  val scalaVersion = "2.10"
  val artefactBuilder = IvyArtefacts.apply _
}

trait MavenRepo extends RepoFlavour with BintrayMavenPaths {
//  val scalaVersion = "2.11"
  val artefactBuilder = MavenArtefacts.apply _
}

object RepoFlavours {
  val mavenRepository: RepoFlavour = new BintrayRepository("release-candidates", "releases") with MavenRepo
  val ivyRepository: RepoFlavour = new BintrayRepository("sbt-plugin-release-candidates", "sbt-plugin-releases") with IvyRepo
}

case class BintrayRepository(releaseCandidateRepo:String, releaseRepo:String)