scala.sys.process.Process Scala Examples

The following examples show how to use scala.sys.process.Process. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: Runner.scala    From sansible   with MIT License 5 votes vote down vote up
package ansible

import scala.io.Source
import scala.sys.process.{Process, ProcessIO}

import ansible.IniEncode._
import ansible.IniEncoders._
import better.files.File
import com.typesafe.scalalogging.LazyLogging

object Runner extends LazyLogging {
  def runPlaybook(inv: Inventory)(pb: Playbook, opts: Option[String] = None): Unit = {
    val invFile = File.newTemporaryFile("ansible-inventory")
    val pbFile  = File.newTemporaryFile("ansible-playbook", ".yml")

    val pio = new ProcessIO(
      _ => (),
      out  => Source.fromInputStream(out).getLines.foreach(println),
      err => Source.fromInputStream(err).getLines.foreach(System.err.println)
    )

    val cmd = s"ansible-playbook ${opts.getOrElse("")} -i ${invFile.path} ${pbFile.path}"
    val env = Seq("ANSIBLE_FORCE_COLOR" -> "true")
    val process = Process(cmd, cwd = None, env: _*).run(pio)

    invFile.write(inv.iniEncode)
    pbFile.write(YAML.fromPlaybook(pb))
    logger.info(cmd)

    val exitCode = process.exitValue()
    logger.info(s"run completed with exit code: $exitCode")
    process.destroy()
  }
} 
Example 2
Source File: SourceHighlighter.scala    From codepropertygraph   with Apache License 2.0 5 votes vote down vote up
package io.shiftleft.utils

import better.files.File
import io.shiftleft.codepropertygraph.generated.Languages
import org.apache.logging.log4j.LogManager
import scala.sys.process.Process


case class Source(code: String, language: String)

object SourceHighlighter {
  private val logger = LogManager.getLogger(this)

  def highlight(source: Source): Option[String] = {
    val langFlag = source.language match {
      case Languages.C => "-sC"
      case other       => throw new RuntimeException(s"Attempting to call highlighter on unsupported language: $other")
    }

    val tmpSrcFile = File.newTemporaryFile("dump")
    tmpSrcFile.writeText(source.code)
    try {
      val highlightedCode = Process(Seq("source-highlight-esc.sh", tmpSrcFile.path.toString, langFlag)).!!
      Some(highlightedCode)
    } catch {
      case exception: Exception =>
        logger.info("syntax highlighting not working. Is `source-highlight` installed?")
        logger.info(exception)
        Some(source.code)
    } finally {
      tmpSrcFile.delete()
    }
  }

} 
Example 3
Source File: Shared.scala    From codepropertygraph   with Apache License 2.0 5 votes vote down vote up
package io.shiftleft.semanticcpg.language.dotextension

import better.files.File

import scala.sys.process.Process
import scala.util.{Failure, Success, Try}

trait ImageViewer {
  def view(pathStr: String): Try[String]
}

object Shared {

  def plotAndDisplay(dotStrings: List[String], viewer: ImageViewer): Unit = {
    dotStrings.foreach { dotString =>
      File.usingTemporaryFile("semanticcpg") { dotFile =>
        File.usingTemporaryFile("semanticcpg") { svgFile =>
          dotFile.write(dotString)
          createSvgFile(dotFile, svgFile).toOption.foreach(_ => viewer.view(svgFile.path.toAbsolutePath.toString))
        }
      }
    }
  }

  private def createSvgFile(in: File, out: File): Try[String] = {
    Try {
      Process(Seq("dot", "-Tsvg", in.path.toAbsolutePath.toString, "-o", out.path.toAbsolutePath.toString)).!!
    } match {
      case Success(v) => Success(v)
      case Failure(exc) =>
        System.err.println("Executing `dot` failed: is `graphviz` installed?")
        System.err.println(exc)
        Failure(exc)
    }
  }

} 
Example 4
Source File: Whitesource.scala    From akka-persistence-couchbase   with Apache License 2.0 5 votes vote down vote up
import sbt._
import sbt.Keys._
import sbtwhitesource.WhiteSourcePlugin.autoImport._
import sbtwhitesource._
import scala.sys.process.Process

object Whitesource extends AutoPlugin {
  override def requires = WhiteSourcePlugin

  override def trigger = allRequirements

  override lazy val projectSettings = Seq(
    // do not change the value of whitesourceProduct
    whitesourceProduct := "Lightbend Reactive Platform",
    whitesourceAggregateProjectName := {
      (moduleName in LocalRootProject).value.replace("-root", "") + "-" + (
        if (isSnapshot.value)
          if (describe(baseDirectory.value) contains "master") "master"
          else "adhoc"
        else majorMinor((version in LocalRootProject).value).map(_ + "-stable").getOrElse("adhoc")
      )
    },
    whitesourceForceCheckAllDependencies := true,
    whitesourceFailOnError := true
  )

  private def majorMinor(version: String): Option[String] = """\d+\.\d+""".r.findFirstIn(version)
  private def describe(base: File) = Process(Seq("git", "describe", "--all"), base).!!
} 
Example 5
Source File: TravisGithub.scala    From sbt-best-practice   with Apache License 2.0 5 votes vote down vote up
package com.thoughtworks.sbtBestPractice.travis

import java.io.File

import org.eclipse.jgit.lib.ConfigConstants._
import org.eclipse.jgit.lib.Constants._
import org.eclipse.jgit.transport.URIish
import resource._
import sbt._
import com.thoughtworks.sbtBestPractice.git.{Git => GitPlugin}
import sbt.Keys._

import scala.sys.process.Process

object TravisGithub extends AutoPlugin {

  object autoImport {

    sealed trait GitCredential

    final case class PersonalAccessToken(token: String) extends GitCredential

    final case class SshKey(privateKeyFile: File) extends GitCredential

    val githubCredential = SettingKey[GitCredential]("github-credential", "Credential for git push")

    val travisGitConfig = TaskKey[Unit]("travis-git-config", "Configure git from Travis environment variables")

  }

  import autoImport._

  private val RemoteName = "origin"

  override def trigger = allRequirements

  override def requires = Travis && GitPlugin

  override def projectSettings = Seq(
    travisGitConfig := {
      (Travis.travisBranch.?.value, Travis.travisRepoSlug.?.value) match {
        case (Some(branch), Some(slug)) =>
          val credential = githubCredential.?.value
          for (repository <- managed(GitPlugin.gitRepositoryBuilder.value.build());
               git <- managed(org.eclipse.jgit.api.Git.wrap(repository))) {
            {
              val command = git.remoteSetUrl()
              command.setName(RemoteName)
              command.setPush(true)
              credential match {
                case Some(PersonalAccessToken(key)) =>
                  command.setUri(new URIish(s"https://[email protected]/$slug.git"))
                case Some(SshKey(privateKeyFile)) =>
                  command.setUri(new URIish(s"ssh://[email protected]:$slug.git"))
                case _ =>
                  throw new MessageOnlyException("githubCredential is not set")
              }
              command.call()
            }

            git.branchCreate().setForce(true).setName(branch).call()

            {
              val config = git.getRepository.getConfig
              config.setString(CONFIG_BRANCH_SECTION, branch, CONFIG_KEY_REMOTE, RemoteName)
              config.setString(CONFIG_BRANCH_SECTION, branch, CONFIG_KEY_MERGE, raw"""$R_HEADS$branch""")
              config.save()
            }

            git.checkout().setName(branch).call()
          }
        case _ =>
          throw new MessageOnlyException("travisBranch or travisRepoSlug is not set")
      }
    },
  )

} 
Example 6
Source File: DockerCommands.scala    From sbt-docker-compose   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.tapad.docker

import sbt._
import scala.sys.process.Process
import com.tapad.docker.DockerComposeKeys._

trait DockerCommands {
  def dockerComposeUp(instanceName: String, composePath: String): Int = {
    Process(s"docker-compose -p $instanceName -f $composePath up -d").!
  }

  def dockerComposeStopInstance(instanceName: String, composePath: String): Unit = {
    Process(s"docker-compose -p $instanceName -f $composePath stop").!
  }

  def dockerComposeRemoveContainers(instanceName: String, composePath: String): Unit = {
    Process(s"docker-compose -p $instanceName -f $composePath rm -v -f").!
  }

  def dockerNetworkExists(instanceName: String, networkName: String): Boolean = {
    //Docker replaces '/' with '_' in the identifier string so search for replaced version
    //Use '-q' instead of '--format' as format was only introduced in Docker v1.13.0-rc1
    Process(s"docker network ls -q --filter=name=${instanceName.replace('/', '_')}_$networkName").!!.trim().nonEmpty
  }

  def dockerVolumeExists(instanceName: String, volumeName: String): Boolean = {
    //Docker replaces '/' with '_' in the identifier string so search for replaced version
    Process(s"docker volume ls -q --filter=name=${instanceName.replace('/', '_')}_$volumeName").!!.trim().nonEmpty
  }

  def getDockerComposeVersion: Version = {
    val version = Process("docker-compose version --short").!!
    Version(version)
  }

  def dockerPull(imageName: String): Unit = {
    Process(s"docker pull $imageName").!
  }

  def dockerMachineIp(machineName: String): String = {
    Process(s"docker-machine ip $machineName").!!.trim
  }

  def getDockerContainerId(instanceName: String, serviceName: String): String = {
    //Docker replaces '/' with '_' in the identifier string so search for replaced version
    Process(s"""docker ps --all --filter=name=${instanceName.replace('/', '_')}_${serviceName}_ --format=\"{{.ID}}\"""").!!.trim().replaceAll("\"", "")
  }

  def getDockerContainerInfo(containerId: String): String = {
    Process(s"docker inspect --type=container $containerId").!!
  }

  def dockerRemoveImage(imageName: String): Unit = {
    Process(s"docker rmi $imageName").!!
  }

  def dockerRemoveNetwork(instanceName: String, networkName: String): Unit = {
    Process(s"docker network rm ${instanceName}_$networkName").!
  }

  def dockerRemoveVolume(instanceName: String, volumeName: String): Unit = {
    Process(s"docker volume rm ${instanceName}_$volumeName").!
  }

  def dockerTagImage(currentImageName: String, newImageName: String): Unit = {
    Process(s"docker tag $currentImageName $newImageName").!!
  }

  def dockerPushImage(imageName: String): Unit = {
    Process(s"docker push $imageName").!
  }

  def dockerRun(command: String): Unit = {
    Process(s"docker run $command").!
  }

  def getDockerPortMappings(containerId: String): String = {
    Process(s"docker port $containerId").!!
  }

  def isDockerForMacEnvironment: Boolean = {
    val info = Process("docker info").!!
    info.contains("Operating System: Docker for Mac") ||
      info.contains("Operating System: Docker Desktop") ||
      (info.contains("Operating System: Alpine Linux") && info.matches("(?s).*Kernel Version:.*-moby.*"))
  }

  
  def runVariablesForSubstitutionTask(state: State): Vector[(String, String)] = {
    val extracted = Project.extract(state)
    val (_, value) = extracted.runTask(variablesForSubstitutionTask, state)
    value.toVector
  }
} 
Example 7
Source File: BackupHandler.scala    From eclair   with Apache License 2.0 5 votes vote down vote up
package fr.acinq.eclair.db

import java.io.File
import java.nio.file.{Files, StandardCopyOption}

import akka.actor.{Actor, ActorLogging, Props}
import akka.dispatch.{BoundedMessageQueueSemantics, RequiresMessageQueue}
import fr.acinq.eclair.channel.ChannelPersisted

import scala.sys.process.Process
import scala.util.{Failure, Success, Try}



class BackupHandler private(databases: Databases, backupFile: File, backupScript_opt: Option[String]) extends Actor with RequiresMessageQueue[BoundedMessageQueueSemantics] with ActorLogging {

  // we listen to ChannelPersisted events, which will trigger a backup
  context.system.eventStream.subscribe(self, classOf[ChannelPersisted])

  def receive = {
    case persisted: ChannelPersisted =>
      val start = System.currentTimeMillis()
      val tmpFile = new File(backupFile.getAbsolutePath.concat(".tmp"))
      databases.backup(tmpFile)
      // this will throw an exception if it fails, which is possible if the backup file is not on the same filesystem
      // as the temporary file
      Files.move(tmpFile.toPath, backupFile.toPath, StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.ATOMIC_MOVE)
      val end = System.currentTimeMillis()

      // publish a notification that we have updated our backup
      context.system.eventStream.publish(BackupCompleted)

      log.debug(s"database backup triggered by channelId=${persisted.channelId} took ${end - start}ms")

      backupScript_opt.foreach(backupScript => {
        Try {
          // run the script in the current thread and wait until it terminates
          Process(backupScript).!
        } match {
          case Success(exitCode) => log.debug(s"backup notify script $backupScript returned $exitCode")
          case Failure(cause) => log.warning(s"cannot start backup notify script $backupScript:  $cause")
        }
      })
  }
}

sealed trait BackupEvent

// this notification is sent when we have completed our backup process (our backup file is ready to be used)
case object BackupCompleted extends BackupEvent

object BackupHandler {
  // using this method is the only way to create a BackupHandler actor
  // we make sure that it uses a custom bounded mailbox, and a custom pinned dispatcher (i.e our actor will have its own thread pool with 1 single thread)
  def props(databases: Databases, backupFile: File, backupScript_opt: Option[String]) = Props(new BackupHandler(databases, backupFile, backupScript_opt)).withMailbox("eclair.backup-mailbox").withDispatcher("eclair.backup-dispatcher")
} 
Example 8
Source File: DictionaryBasedNormalizer.scala    From scalastringcourseday7   with Apache License 2.0 5 votes vote down vote up
package text.normalizer

import java.nio.charset.{CodingErrorAction, StandardCharsets}
import java.nio.file.Path

import text.{StringNone, StringOption}
import util.Config

import scala.collection.mutable
import scala.collection.mutable.ListBuffer
import scala.sys.process.Process
import scala.util.matching.Regex


class DictionaryBasedNormalizer(dictionaryNameOpt: StringOption) {
  private def ascii2native(inputPath: Path): Iterator[String] = {
    import util.ProcessBuilderUtils._
    Process(Seq[String](
      s"${System.getProperty("java.home")}/../bin/native2ascii",
      "-reverse",
      "-encoding", "UTF-8",
      inputPath.toAbsolutePath.toString)).lineStream(
        StandardCharsets.UTF_8,
        CodingErrorAction.REPORT,
        CodingErrorAction.REPORT,
        StringNone)
  }
  private val regex: Regex = """([^#:][^:]*):\[([^#]+)\](#.*)?""".r
  private val terms: Seq[(String, String)] = initialize()

  private def initialize(): Seq[(String, String)] = {
    if (dictionaryNameOpt.isEmpty) {
      return Nil
    }
    val dictionaryName: String = dictionaryNameOpt.get
    val map = mutable.Map.empty[String, List[String]]
    val buffer = ListBuffer.empty[(String, String)]
    val filePath: Path = Config.resourceFile("normalizer", dictionaryName)
    ascii2native(filePath) foreach {
      case regex(representation, notationalVariants, _) =>
        val trimmedRepresentation: String = representation.trim match {
          case "\"\"" => ""
          case otherwise => otherwise
        }
        val sortedNotationalVariants: List[String] = sortNotationVariants(notationalVariants.split(',').toList)
        map(trimmedRepresentation) = if (map.contains(trimmedRepresentation)) {
          sortNotationVariants(map(trimmedRepresentation) ++ sortedNotationalVariants)
        } else {
          sortedNotationalVariants
        }
      case _ =>
        //Do nothing
    }
    sortRepresentations(map.keySet.toList) foreach {
      representation =>
        map(representation) foreach {
          notationalVariant =>
            buffer += ((notationalVariant, representation))
        }
    }
    buffer.result
  }

  protected def sortNotationVariants(notationVariants: List[String]): List[String] = {
    notationVariants.sorted//alphabetical order
  }

  protected def sortRepresentations(representations: List[String]): List[String] = {
    representations.sorted//alphabetical order
  }

  def normalize(text: StringOption): StringOption = {
    text map {
      t: String =>
        var result: String = t
        if (terms.nonEmpty) {
          terms foreach {
            case (term, replacement) =>
              result = replaceAll(result, term, replacement)
            case _ =>
              //Do nothing
          }
        }
        result
    }
  }

  protected def replaceAll(input: String, term: String, replacement: String): String = {
    import util.primitive._
    input.replaceAllLiteratim(term, replacement)
  }
} 
Example 9
Source File: DeltaErrorsSuite.scala    From delta   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.delta

import scala.sys.process.Process

import org.apache.hadoop.fs.Path
import org.scalatest.GivenWhenThen

import org.apache.spark.sql.QueryTest
import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}

trait DeltaErrorsSuiteBase
    extends QueryTest
    with SharedSparkSession    with GivenWhenThen
    with SQLTestUtils {

  val MAX_URL_ACCESS_RETRIES = 3
  val path = "/sample/path"

  // Map of error name to the actual error message it throws
  // When adding an error, add the name of the function throwing the error as the key and the value
  // as the error being thrown
  def errorsToTest: Map[String, Throwable] = Map(
    "useDeltaOnOtherFormatPathException" ->
      DeltaErrors.useDeltaOnOtherFormatPathException("operation", path, spark),
    "useOtherFormatOnDeltaPathException" ->
      DeltaErrors.useOtherFormatOnDeltaPathException("operation", path, path, "format", spark),
    "createExternalTableWithoutLogException" ->
      DeltaErrors.createExternalTableWithoutLogException(new Path(path), "tableName", spark),
    "createExternalTableWithoutSchemaException" ->
      DeltaErrors.createExternalTableWithoutSchemaException(new Path(path), "tableName", spark),
    "createManagedTableWithoutSchemaException" ->
      DeltaErrors.createManagedTableWithoutSchemaException("tableName", spark),
    "multipleSourceRowMatchingTargetRowInMergeException" ->
      DeltaErrors.multipleSourceRowMatchingTargetRowInMergeException(spark),
    "concurrentModificationException" -> new ConcurrentWriteException(None))

  def otherMessagesToTest: Map[String, String] = Map(
    "deltaFileNotFoundHint" ->
      DeltaErrors.deltaFileNotFoundHint(
        DeltaErrors.generateDocsLink(
          sparkConf,
          DeltaErrors.faqRelativePath,
          skipValidation = true), path))

  def errorMessagesToTest: Map[String, String] =
    errorsToTest.mapValues(_.getMessage) ++ otherMessagesToTest

  def checkIfValidResponse(url: String, response: String): Boolean = {
    response.contains("HTTP/1.1 200 OK") || response.contains("HTTP/2 200")
  }

  def getUrlsFromMessage(message: String): List[String] = {
    val regexToFindUrl = "https://[^\\s]+".r
    regexToFindUrl.findAllIn(message).toList
  }

  def testUrls(): Unit = {
    errorMessagesToTest.foreach { case (errName, message) =>
      getUrlsFromMessage(message).foreach { url =>
        Given(s"*** Checking response for url: $url")
        var response = ""
        (1 to MAX_URL_ACCESS_RETRIES).foreach { attempt =>
          if (attempt > 1) Thread.sleep(1000)
          response = Process("curl -I " + url).!!
          if (!checkIfValidResponse(url, response)) {
            fail(
              s"""
                 |A link to the URL: '$url' is broken in the error: $errName, accessing this URL
                 |does not result in a valid response, received the following response: $response
         """.stripMargin)
          }
        }
      }
    }
  }

  test("Validate that links to docs in DeltaErrors are correct") {
    testUrls()
  }
}

class DeltaErrorsSuite
  extends DeltaErrorsSuiteBase 
Example 10
Source File: DotRenderer.scala    From reftree   with GNU General Public License v3.0 5 votes vote down vote up
package reftree.render

import reftree.dot.Graph

import java.io.StringWriter
import java.nio.charset.StandardCharsets
import java.nio.file.Path

import scala.sys.process.{Process, BasicIO}

object DotRenderer {
  case class RenderingException(message: String) extends Exception(message)

  def render(
    graph: Graph, output: Path, options: RenderingOptions, format: String
  ): Unit = {
    val args = Seq(
      "-K", "dot",
      "-T", format,
      s"-Gdpi=${options.density}",
      "-o", output.toString
    )
    val process = Process("dot", args)
    val error = new StringWriter
    val io = BasicIO.standard { stream ⇒
      stream.write(graph.encode.getBytes(StandardCharsets.UTF_8))
      stream.close()
    }.withError(BasicIO.processFully(error))
    (process run io).exitValue()
    if (error.toString.nonEmpty) throw RenderingException(error.toString)
    ()
  }
} 
Example 11
Source File: Whitesource.scala    From akka-persistence-cassandra   with Apache License 2.0 5 votes vote down vote up
import sbt._
import sbt.Keys._
import sbtwhitesource.WhiteSourcePlugin.autoImport._
import sbtwhitesource._

import scala.sys.process.Process

object Whitesource extends AutoPlugin {
  override def requires = WhiteSourcePlugin

  override def trigger = allRequirements

  override lazy val projectSettings = Seq(
    // do not change the value of whitesourceProduct
    whitesourceProduct := "Lightbend Reactive Platform",
    whitesourceAggregateProjectName := {
      val projectName =
        (moduleName in LocalRootProject).value.replace("-root", "")
      projectName + "-" + (if (isSnapshot.value)
                             if (describe(baseDirectory.value) contains "master") "master"
                             else "adhoc"
                           else
                             CrossVersion
                               .partialVersion((version in LocalRootProject).value)
                               .map {
                                 case (major, minor) => s"$major.$minor-stable"
                               }
                               .getOrElse("adhoc"))
    },
    whitesourceForceCheckAllDependencies := true,
    whitesourceFailOnError := true)

  private def describe(base: File) = Process(Seq("git", "describe", "--all"), base).!!
} 
Example 12
Source File: SparkSubmit.scala    From spark-bench   with Apache License 2.0 5 votes vote down vote up
package com.ibm.sparktc.sparkbench.sparklaunch.submission.sparksubmit

import com.ibm.sparktc.sparkbench.sparklaunch.confparse.SparkJobConf
import com.ibm.sparktc.sparkbench.sparklaunch.submission.Submitter
import com.ibm.sparktc.sparkbench.utils.SparkBenchException

import scala.sys.process.Process

object SparkSubmit extends Submitter {
  private val log = org.slf4j.LoggerFactory.getLogger(getClass)

  override def launch(conf: SparkJobConf): Unit = {
    val errorMessage = "Spark installation home not specified. Failed to find the spark-submit executable. " +
      "Please check sparkHome in your config file or $SPARK_HOME in your environment."
    val sparkHome: String = conf.submissionParams.getOrElse("spark-home", throw SparkBenchException(errorMessage)).asInstanceOf[String]
    val preppedStatement = convert(conf, sparkHome)
    submit(preppedStatement, sparkHome)
  }

  private def convert(conf: SparkJobConf, sparkHome: String): Seq[String] = {

    Seq(s"$sparkHome/bin/spark-submit") ++
      Seq("--class", conf.className) ++
      convertSparkArgs(conf.sparkArgs) ++
      convertSparkConf(conf.sparkConfs) ++
      Seq(conf.sparkBenchJar) ++
      conf.childArgs
  }

  private def submit(strSeq: Seq[String], sparkHome: String): Unit = {
    val process = Process(strSeq, None, "SPARK_HOME" -> sparkHome)
    log.info(" *** SPARK-SUBMIT: " + process.toString)
    if (process.! != 0) {
      throw new Exception(s"spark-submit failed to complete properly given these arguments: \n\t${strSeq.mkString("\n")}")
    }
  }

  private def convertSparkArgs(map: Map[String, String]): Seq[String] =
    map.foldLeft(Seq.empty[String]) {
      case (arr, (k, v)) => arr ++ Seq("--" + k, v)
    }

  private def convertSparkConf(map: Map[String, String]): Seq[String] =
    map.foldLeft(Seq.empty[String]) {
      case (arr, (k, v)) => arr ++ Seq("--conf", s"$k=$v")
    }
} 
Example 13
Source File: FunctionInfoProviderRunner.scala    From mist   with Apache License 2.0 5 votes vote down vote up
package io.hydrosphere.mist.master.jobs

import akka.actor.{Actor, ActorRef, ActorSystem, Props, ReceiveTimeout}
import io.hydrosphere.mist.core.CommonData
import io.hydrosphere.mist.core.CommonData.RegisterJobInfoProvider
import io.hydrosphere.mist.master.FunctionInfoProviderConfig

import scala.concurrent.duration.{Duration, FiniteDuration}
import scala.concurrent.{Future, Promise}
import scala.sys.process.Process

class FunctionInfoProviderRunner(
  runTimeout: FiniteDuration,
  cacheEntryTtl: FiniteDuration,
  masterHost: String,
  clusterPort: Int,
  sparkConf: Map[String, String]
) extends WithSparkConfArgs {

  def run()(implicit system: ActorSystem): Future[ActorRef] = {
    val refWaiter = ActorRefWaiter(runTimeout)(system)
    val cmd =
      Seq(s"${sys.env("MIST_HOME")}/bin/mist-function-info-provider",
        "--master", masterHost,
        "--cluster-port", clusterPort.toString,
        "--cache-entry-ttl", cacheEntryTtl.toMillis.toString)

    val builder = Process(cmd, None, ("SPARK_CONF", sparkConfArgs(sparkConf).mkString(" ")))
    builder.run(false)
    refWaiter.waitRef()
  }
}

trait WithSparkConfArgs {

  def sparkConfArgs(sparkConf: Map[String, String]): Seq[String] = {
    sparkConf.map { case (k, v) => s"--conf $k=$v" }
      .toSeq
  }
}

trait ActorRefWaiter {
  def waitRef(): Future[ActorRef]
}

object ActorRefWaiter {

  class IdentityActor(pr: Promise[ActorRef], initTimeout: Duration) extends Actor {

    override def preStart(): Unit = {
      context.setReceiveTimeout(initTimeout)
    }

    override def receive: Receive = {
      case RegisterJobInfoProvider(ref) =>
        pr.success(ref)
        context stop self

      case ReceiveTimeout =>
        pr.failure(new IllegalStateException("Initialization of FunctionInfoProvider failed of timeout"))
        context stop self
    }
  }

  def apply(initTimeout: Duration)(implicit system: ActorSystem): ActorRefWaiter = new ActorRefWaiter {
    override def waitRef(): Future[ActorRef] = {
      val pr = Promise[ActorRef]
      system.actorOf(Props(new IdentityActor(pr, initTimeout)), CommonData.FunctionInfoProviderRegisterActorName)
      pr.future
    }
  }

}

object FunctionInfoProviderRunner {


  def create(config: FunctionInfoProviderConfig, masterHost: String, clusterPort: Int): FunctionInfoProviderRunner = {
    sys.env.get("SPARK_HOME") match {
      case Some(_) =>
        new FunctionInfoProviderRunner(config.runTimeout, config.cacheEntryTtl, masterHost, clusterPort, config.sparkConf)
      case None => throw new IllegalStateException("You should provide SPARK_HOME env variable for running mist")
    }

  }
} 
Example 14
Source File: ChangedFilesBuilder.scala    From mvn_scalafmt   with Apache License 2.0 5 votes vote down vote up
package org.antipathy.mvn_scalafmt.builder

import java.io.File
import java.nio.file.Paths

import org.apache.maven.plugin.logging.Log

import scala.sys.process.{Process, ProcessLogger}
import scala.util.{Failure, Success, Try}


  override def build(input: Seq[File]): Seq[File] =
    if (diff) {
      log.info(s"Checking for files changed from $branch")
      Try {
        val changedFiles = changeFunction()
        log.info(changedFiles.mkString(s"Changed from $branch:\n", "\n", ""))
        changedFiles.filter(isSupportedFile)
      } match {
        case Success(value) => value
        case Failure(e) =>
          log.error("Could not obtain list of changed files", e)
          throw e
      }
    } else {
      input
    }

}

// $COVERAGE-OFF$
object ChangedFilesBuilder {

  def apply(log: Log, diff: Boolean, branch: String, workingDirectory: File): ChangedFilesBuilder = {
    val logger: ProcessLogger = ProcessLogger(_ => (), err => log.error(err))

    def run(cmd: String) = Process(cmd, workingDirectory).!!(logger).trim

    val prefix = ": "
    val actualBranch =
      if (!branch.startsWith(prefix)) branch
      else run(branch.substring(prefix.length))

    def processFunction(): Seq[File] = {
      val diffOutput    = run(s"git diff --name-only --diff-filter=d $actualBranch")
      val gitRootOutput = run("git rev-parse --show-toplevel")
      val gitRootPath   = Paths.get(gitRootOutput)
      diffOutput.linesIterator
        .map(gitRootPath.resolve)
        .map(_.toFile)
        .toSeq
    }

    new ChangedFilesBuilder(log, diff, actualBranch, processFunction)
  }
}
// $COVERAGE-ON$ 
Example 15
Source File: ProcessRunner.scala    From stryker4s   with Apache License 2.0 5 votes vote down vote up
package stryker4s.run.process

import better.files.File
import grizzled.slf4j.Logging

import scala.concurrent.duration.{Duration, MINUTES}
import scala.sys.process.{Process, ProcessLogger}
import scala.util.Try
import cats.effect.IO

trait ProcessRunner extends Logging {
  def apply(command: Command, workingDir: File): Try[Seq[String]] = {
    Try {
      Process(s"${command.command} ${command.args}", workingDir.toJava)
        .!!<(ProcessLogger(debug(_)))
        .linesIterator
        .toSeq
    }
  }

  def apply(command: Command, workingDir: File, envVar: (String, String)): Try[Int] = {
    val mutantProcess = Process(s"${command.command} ${command.args}", workingDir.toJava, envVar)
      .run(ProcessLogger(debug(_)))

    val exitCodeFuture = IO(mutantProcess.exitValue())
    // TODO: Maybe don't use unsafeRunTimed
    // TODO: Use timeout decided by initial test-run duration
    Try(exitCodeFuture.unsafeRunTimed(Duration(2, MINUTES)).get)
  }
}

object ProcessRunner {
  private def isWindows: Boolean = sys.props("os.name").toLowerCase.contains("windows")

  def apply(): ProcessRunner = {
    if (isWindows) new WindowsProcessRunner
    else new UnixProcessRunner
  }
} 
Example 16
Source File: Webpack.scala    From recogito2   with Apache License 2.0 5 votes vote down vote up
import java.net.InetSocketAddress
import play.sbt.PlayRunHook
import sbt._
import scala.sys.process.Process

object Webpack {
  def apply(base: File): PlayRunHook = {
    object WebpackHook extends PlayRunHook {
      var process: Option[Process] = None

      override def beforeStarted() = {
        process = Option(
          Process("webpack", base).run()
        )
      }

      override def afterStarted(addr: InetSocketAddress) = {
        process = Option(
          Process("webpack --watch --watch-poll", base).run()
        )
      }

      override def afterStopped() = {
        process.foreach(_.destroy())
        process = None
      }
    }

    WebpackHook
  }
} 
Example 17
Source File: DockerRunAction.scala    From berilia   with Apache License 2.0 5 votes vote down vote up
package com.criteo.dev.cluster.docker

import java.io.{File, PrintWriter}

import com.criteo.dev.cluster.{DevClusterProcess, GeneralConstants, GeneralUtilities}
import org.slf4j.LoggerFactory

import scala.collection.mutable.ListBuffer
import scala.sys.process.{Process, ProcessLogger}


object DockerRunAction {

  private val logger = LoggerFactory.getLogger(classOf[DockerBuildAction])

  private val processLogger = ProcessLogger(
    (e: String) => logger.info("err " + e))

  private val ports = new ListBuffer[String]

  def apply(hosts: Map[String, String],
            image: String,
            mountDir: Option[String] = None,
            command: Option[String] = None,
            ports: Array[PortMeta],
            conf: Map[String, String],
            background: Boolean = false) : Option[String] = {
    val sb = new StringBuilder("docker run -P")
    if (background) {
      sb.append(" -d")
    } else {
      sb.append(" -it")
    }
    hosts.foreach {
      case (ip, name) => sb.append(s" --add-host=$name:$ip")
    }

    ports.foreach(p => {
      if (p.exposedPort.isDefined) {
        sb.append(s" -p ${p.exposedPort.get}:${p.port}")
      } else {
        sb.append(s" -p ${p.port}")
      }
    })

    if (mountDir.isDefined) {
      sb.append(s" -v ${mountDir.get}")
      sb.append(":/mount")
    }

    sb.append(s" $image")

    if (command.isDefined) {
      sb.append(s" ${command.get}")
    }

    val commandString = sb.toString
    println(commandString)

    if (background) {
      val output = DevClusterProcess.process(sb.toString).!!.stripLineEnd
      Some(output)
    } else {
      //write command to execute later (in dev-cluster script)
      DockerUtilities.writeDockerCommand(commandString)
      None
    }
  }
} 
Example 18
Source File: StartLocalCliAction.scala    From berilia   with Apache License 2.0 5 votes vote down vote up
package com.criteo.dev.cluster.docker

import com.criteo.dev.cluster.aws.AwsUtilities.NodeRole
import com.criteo.dev.cluster._
import com.criteo.dev.cluster.config.GlobalConfig
import org.slf4j.LoggerFactory

import scala.sys.process.Process


@Public object StartLocalCliAction extends CliAction[Unit] {

  private val logger = LoggerFactory.getLogger(StartLocalCliAction.getClass)

  override def command: String = "start-local"

  override def usageArgs: List[Any] = List(Option("instanceId"))

  override def help: String = "Starts a local cluster docker container. If instanceId specified, " +
    "only start that one container, else starts them all."

  override def applyInternal(args: List[String], config: GlobalConfig): Unit = {
    val conf = config.backCompat
    //instance id is optional
    val instanceId = if (args.length == 1) Some(args(0)) else None

    val dockerMetas = DockerUtilities.getDockerContainerMetadata(
      DockerConstants.localClusterContainerLabel,
      instanceId)
    dockerMetas.foreach(d => {
      val command = s"docker start ${d.id}"
      DevClusterProcess.process  (command).!!

      //add other required confs needed by the setup action (target ip, port)
      val dockerCluster = NodeFactory.getDockerNode(config.target.local, d)
      DockerUtilities.blockOnSsh(dockerCluster)
      StartServiceAction(dockerCluster, NodeRole.Master)

      //print out new docker container info.
      val dockerMetas = DockerUtilities.getDockerContainerMetadata(
        DockerConstants.localClusterContainerLabel,
        instanceId)
      DockerUtilities.printClusterDockerContainerInfo(conf, dockerMetas)
    })
  }
} 
Example 19
Source File: DockerBuildAction.scala    From berilia   with Apache License 2.0 5 votes vote down vote up
package com.criteo.dev.cluster.docker

import com.criteo.dev.cluster.DevClusterProcess
import org.slf4j.LoggerFactory

import scala.collection.mutable.ListBuffer
import scala.sys.process.{Process, ProcessLogger}



class DockerBuildAction (dockerFile: String, dockerImage: String) {

  private val logger = LoggerFactory.getLogger(classOf[DockerBuildAction])

  private val processLogger = ProcessLogger(
    (e: String) => logger.info("err " + e))

  private val args = new ListBuffer[Pair[String, String]]
  private val ports = new ListBuffer[PortMeta]

  def addArg(key: String, value: String) = {
     args.+=(Pair(key, value))
  }

  def run() : Unit = {
    val sb = new StringBuilder("docker build")
    sb.append(s" -t $dockerImage")
    sb.append(s" -f ./${DockerConstants.dockerBaseDir}/$dockerFile")
    args.foreach(p =>  sb.append(s" --build-arg ${p._1}=${p._2}"))
    sb.append(s" ${DockerConstants.dockerBaseDir}")
    val out = DevClusterProcess.process(sb.toString).!
    if (out != 0) {
      throw new Exception("Failure running docker command.")
    }
  }
}

object DockerBuildAction {
  def apply(dockerFile: String, dockerImage: String) = {
    val dba = new DockerBuildAction(dockerFile, dockerImage)
    dba.run
  }
} 
Example 20
Source File: ZipkinComponentServer.scala    From zipkin-mesos-framework   with Apache License 2.0 5 votes vote down vote up
package net.elodina.mesos.zipkin.components

import java.io.File

import net.elodina.mesos.zipkin.http.HttpServer

import scala.sys.process.Process
import scala.sys.process.ProcessBuilder

class ZipkinComponentServer {

  var process: Process = null

  @volatile var shutdownInitiated = false

  def isStarted = Option(process).isDefined

  def start(taskConfig: TaskConfig, taskId: String) = {
    val jarMask = ZipkinComponent.getComponentFromTaskId(taskId) match {
      case "collector" => HttpServer.collectorMask
      case "query" => HttpServer.queryMask
      case "web" => HttpServer.webMask
      case _ => throw new IllegalArgumentException(s"Illegal component name found in task id: $taskId")
    }
    val distToLaunch = initJar(jarMask)
    process = configureProcess(taskConfig, distToLaunch).run()
    //TODO: consider logs redirect
  }

  def await(): Option[Int] = {
    if (isStarted) Some(process.exitValue()) else None
  }

  def acknowledgeShutdownStatus(): Boolean = {
    val oldStatus = shutdownInitiated
    if (shutdownInitiated) shutdownInitiated = false
    oldStatus
  }

  def stop(shutdownInitiated: Boolean) {
    if (isStarted) {
      this.shutdownInitiated = shutdownInitiated
      process.destroy()
    }
  }

  private def initJar(jarMask: String): File = {
    new File(".").listFiles().find(file => file.getName.matches(jarMask)) match {
      case None => throw new IllegalStateException("Corresponding jar not found")
      case Some(componentDist) => componentDist
    }
  }

  private def configureProcess(taskConfig: TaskConfig, distToLaunch: File): ProcessBuilder = {
    val configFileArg = taskConfig.configFile.map(Seq("-f", _))
    var command = Seq("java", "-jar", distToLaunch.getCanonicalPath)
    configFileArg.foreach(command ++= _)
    command ++= taskConfig.flags.map { case (k: String, v: String) => s"-$k=$v" }
    Process(command, Some(new File(".")), taskConfig.env.toList: _*)
  }
} 
Example 21
Source File: GitFetcher.scala    From sbt-git-versioning   with MIT License 5 votes vote down vote up
package com.rallyhealth.sbt.versioning

import sbt.util.Logger

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future, TimeoutException}
import scala.util.control.NonFatal
import scala.sys.process.Process


  def fetchRemotes(remotes: Seq[String], timeout: Duration)(implicit logger: Logger): Seq[FetchResult] = {
    val outputLogger = new BufferingProcessLogger
    val processResult = Process("git remote") ! outputLogger

    processResult match {
      case 0 =>
        logger.debug("Fetching remote sources...")
        val remotes = outputLogger.stdout

        val tagsToFetch = remotes.filter(remotes.contains)
        if (tagsToFetch.nonEmpty) {
          logger.info("Fetching tags from: " + tagsToFetch.mkString(", "))
          tagsToFetch.flatMap(remote => fetchTagsFromRemote(remote, timeout))
        } else {
          logger.debug("No tags to fetch")
          Seq.empty[FetchResult]
        }

      case exitCode =>
        logger.error(s"Fetching remotes failed enumerating remotes [git exitCode=$exitCode]")
        Seq.empty[FetchResult]
    }
  }

  private def fetchTagsFromRemote(remote: String, timeout: Duration)(implicit logger: Logger): Seq[FetchResult] = {

    val outputLogger = new BufferingProcessLogger
    val process = Process(s"git fetch $remote --tags").run(outputLogger)
    val resultFuture = Future {
      if (process.exitValue() == 0) {
        outputLogger.stderr.filter(_.contains("[new tag]")).flatMap {
          case tagResultRegex(tag) =>
            logger.debug(s"Fetched from remote=$remote tag=$tag")
            Some(FetchResult(remote, tag))
          case line =>
            logger.warn(s"Unable to parse git result=$line, skipping")
            None
        }
      } else {
        logger.error(s"Fetching remote=$remote failed [git exitCode=${process.exitValue()}]")
        Seq.empty[FetchResult]
      }
    }

    try {
      val result = Await.result(resultFuture, timeout)
      logger.debug(s"Successfully fetched $remote")
      result
    } catch {
      case _: TimeoutException =>
        process.destroy()
        logger.error(s"Fetching remote=$remote timed out [git exitCode=${process.exitValue()}]")
        Seq.empty
      case NonFatal(exc) =>
        logger.error(s"Fetching remote=$remote failed [git exitCode=${process.exitValue()}]")
        logger.trace(exc)
        Seq.empty
    }
  }
} 
Example 22
Source File: TestUtils.scala    From codacy-analysis-cli   with GNU Affero General Public License v3.0 5 votes vote down vote up
package com.codacy.analysis.core.utils

import java.nio.file.attribute.PosixFilePermission
import java.nio.file.{Path, Paths}

import better.files.File
import com.codacy.plugins.api.results
import io.circe.Decoder
import org.specs2.concurrent.ExecutionEnv
import org.specs2.matcher.MatchResult

import scala.sys.process.Process

object TestUtils {

  implicit val categoryDecoder: Decoder[results.Pattern.Category.Value] =
    Decoder.decodeEnumeration(results.Pattern.Category)

  implicit val levelDecoder: Decoder[results.Result.Level.Value] =
    Decoder.decodeEnumeration(results.Result.Level)
  implicit val fileDecoder: Decoder[Path] = Decoder[String].map(Paths.get(_))
  implicit val executionEnv: ExecutionEnv = ExecutionEnv.fromGlobalExecutionContext

  def withClonedRepo[T](gitUrl: String, commitUUid: String)(block: (File, File) => MatchResult[T]): MatchResult[T] =
    (for {
      directory <- File.temporaryDirectory()
      file <- File.temporaryFile()
    } yield {
      directory
        .addPermission(PosixFilePermission.OWNER_READ)
        .addPermission(PosixFilePermission.GROUP_READ)
        .addPermission(PosixFilePermission.OTHERS_READ)
        .addPermission(PosixFilePermission.OWNER_EXECUTE)
        .addPermission(PosixFilePermission.GROUP_EXECUTE)
        .addPermission(PosixFilePermission.OTHERS_EXECUTE)
      Process(Seq("git", "clone", gitUrl, directory.pathAsString)).!
      Process(Seq("git", "reset", "--hard", commitUUid), directory.toJava).!
      block(file, directory)
    }).get()

  def withTemporaryGitRepo[T](fn: File => MatchResult[T]): MatchResult[T] = {
    (for {
      temporaryDirectory <- File.temporaryDirectory()
    } yield {
      Process(Seq("git", "init"), temporaryDirectory.toJava).!
      Process(Seq("git", "commit", "--allow-empty", "-m", "initial commit"), temporaryDirectory.toJava).!
      fn(temporaryDirectory)
    }).get

  }
} 
Example 23
Source File: HeadSandbox.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.navigator.test.runner

import java.io.File

import scala.concurrent._
import scala.concurrent.duration._
import scala.sys.process.{Process, ProcessLogger}
import scala.util.Success


    def buffer[T](f: => T): T = f
  }

  def runAsync(port: Int, darFile: File, scenario: String): Unit => Unit = {
    // Run the sandbox.
    val logger = new SandboxLogger
    val sandbox = Process(
      Seq("sbt", s"sandbox/run ${darFile.getAbsolutePath} --port $port --scenario $scenario"),
      new File("../../../ledger"))
      .run(logger)

    // Sbt takes a long time to compile and start up, longer than Navigator keeps trying to connect.
    // Block for a while until the sandbox shows signs of being started up.
    logger.waitForStartup(300.seconds)

    val shutdown = (_: Unit) => {
      sandbox.destroy()
    }

    sys addShutdownHook shutdown(())
    _ =>
      shutdown(())
  }
} 
Example 24
Source File: HeadDamli.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.navigator.test.runner

import java.io.File
import java.nio.file.Files

import scala.sys.error
import scala.sys.process.Process


object HeadDamlc {
  private val packageName = "Test"

  def run(damlPath: String): (File, Unit => Unit) = {
    val damlFile = new File(damlPath)

    val tempDirectory = Files.createTempDirectory("navigator-integration-test").toFile
    val darFile = new File(tempDirectory, s"$packageName.dar")

    tempDirectory.mkdirs()
    tempDirectory.deleteOnExit()
    val shutdown: Unit => Unit = _ => { tempDirectory.delete(); () }

    // DAML -> DAR
    val exitCode = Process(
      s"bazel run damlc -- package $damlPath $packageName --output ${darFile.getAbsolutePath}").!
    if (exitCode != 0) {
      shutdown(())
      error(s"Dar packager: error while running damlc package for $damlPath: exit code $exitCode")
    }

    (darFile, shutdown)
  }
} 
Example 25
Source File: RunProcess.scala    From zorechka-bot   with MIT License 5 votes vote down vote up
package com.wix.zorechka.clients.process

import java.nio.file.Path

import zio.{Task, ZIO}

import scala.collection.mutable.ListBuffer
import scala.sys.process.{Process, ProcessLogger}

case class ClientOutput(value: List[String]) extends AnyVal

object RunProcess {
  def execCmd(command: List[String], workDir: Path, extraEnv: List[(String, String)] = List.empty): Task[ClientOutput] = ZIO.effect {
    val lb = ListBuffer.empty[String]
    val log = new ProcessLogger {
      override def out(s: => String): Unit = {
        println(s)
        lb.append(s)
      }
      override def err(s: => String): Unit = {
        println(s)
        lb.append(s)
      }
      override def buffer[T](f: => T): T = f
    }

    println(command.mkString(" "))
    val exitStatus = Process(command, Some(workDir.toFile), extraEnv: _*).!(log)
    if (exitStatus != 0 && exitStatus != 3)
      throw new IllegalStateException(s"Got status $exitStatus")
    ClientOutput(lb.result())
  }
} 
Example 26
Source File: package.scala    From sbt-docker-compose   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.github.ehsanyou.sbt.docker.compose

import com.github.ehsanyou.sbt.docker.compose.DataTypes.Cwd
import com.github.ehsanyou.sbt.docker.compose.DataTypes.InvalidExitCodeException
import sbt.Def
import sbt._

import scala.concurrent.Future
import scala.sys.process.Process

package object helpers {

  implicit def stateToExtracted(state: State): Extracted = Project.extract(state)

  object sbtFutureTask {
    def apply[T](t: T): Future[Def.Initialize[Task[T]]] = Future.successful(Def task t)
    def empty: Future[Def.Initialize[Task[Unit]]] = apply(())
  }

  object sbtTask {
    def empty: Def.Initialize[Task[Unit]] = Def task (())
  }

  def redPrinter(str: String): Unit = println(scala.Console.RED + str + scala.Console.WHITE)
  def greenPrinter(str: String): Unit = println(scala.Console.GREEN + str + scala.Console.WHITE)
  def yellowPrinter(str: String): Unit = println(scala.Console.YELLOW + str + scala.Console.WHITE)

  def process[T](
    command: String
  )(
    onSuccess: => T
  )(
    implicit cwd: Cwd
  ): T = {
    if (Process(command, cwd.dir).! == 0) onSuccess
    else throw new InvalidExitCodeException(s"`$command` command returned non-zero exit code.")
  }

  def processNonBlocking(
    command: String
  )(
    implicit cwd: Cwd
  ): Process =
    Process(command, cwd.dir).run()
} 
Example 27
Source File: SampleRoutes.scala    From akka_streams_tutorial   with MIT License 5 votes vote down vote up
package akkahttp

import java.io.File

import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import org.slf4j.{Logger, LoggerFactory}

import scala.concurrent.Await
import scala.concurrent.duration._
import scala.sys.process.Process
import scala.util.{Failure, Success}


object SampleRoutes extends App {
  val logger: Logger = LoggerFactory.getLogger(this.getClass)
  implicit val system = ActorSystem("SampleRoutes")
  implicit val executionContext = system.dispatcher


  def getFromBrowsableDir: Route = {
    val dirToBrowse = File.separator + "tmp"

    // pathPrefix allows loading dirs and files recursively
    pathPrefix("entries") {
      getFromBrowseableDirectory(dirToBrowse)
    }
  }

  def parseFormData: Route = path("post") {
    formFields('color, 'age.as[Int]) { (color, age) =>
      complete(s"The color is '$color' and the age is $age")
    }
  }

  def routes: Route = {
    getFromBrowsableDir ~ parseFormData
  }

  val bindingFuture = Http().bindAndHandle(routes, "127.0.0.1", 8000)

  bindingFuture.onComplete {
    case Success(b) =>
      println("Server started, listening on: " + b.localAddress)
    case Failure(e) =>
      println(s"Server could not bind to... Exception message: ${e.getMessage}")
      system.terminate()
  }

  def browserClient() = {
    val os = System.getProperty("os.name").toLowerCase
    if (os == "mac os x") Process("open ./src/main/resources/SampleRoutes.html").!
  }

  browserClient()

  sys.addShutdownHook {
    println("About to shutdown...")
    val fut = bindingFuture.map(serverBinding => serverBinding.terminate(hardDeadline = 3.seconds))
    println("Waiting for connections to terminate...")
    val onceAllConnectionsTerminated = Await.result(fut, 10.seconds)
    println("Connections terminated")
    onceAllConnectionsTerminated.flatMap { _ => system.terminate()
    }
  }
} 
Example 28
Source File: package.scala    From sbt-reactive-app   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.rp.sbtreactiveapp

import java.io.File
import java.nio.file.Paths
import org.apache.tools.ant.filters.StringInputStream
import sbt.Logger
import scala.collection.immutable.Seq
import scala.sys.process.{ Process, ProcessLogger }

package object cmd {
  
  private[cmd] def run(
    cwd: File = Paths.get(".").toRealPath().toFile,
    env: Map[String, String] = Map.empty,
    input: Option[String] = None,
    logStdErr: Option[Logger] = None,
    logStdOut: Option[Logger] = None)(args: String*): (Int, Seq[String], Seq[String]) = {
    var outList = List.empty[String]
    var errList = List.empty[String]

    val stringLogger = ProcessLogger(
      { s =>
        outList = s :: outList

        logStdOut.foreach(_.info(s))
      },
      { s =>
        errList = s :: errList

        logStdErr.foreach(_.error(s))
      })

    val exitCode =
      input
        .map(new StringInputStream(_))
        .foldLeft(Process(args, cwd = cwd, env.toVector: _*))(_ #< _)
        .run(stringLogger)
        .exitValue()

    (exitCode, outList.reverse, errList.reverse)
  }

  private[cmd] def runSuccess(failMsg: String)(result: (Int, Seq[String], Seq[String])): Unit = {
    if (result._1 != 0) {
      sys.error(s"$failMsg [${result._1}]")
    }
  }
} 
Example 29
Source File: GitSpec.scala    From codacy-analysis-cli   with GNU Affero General Public License v3.0 5 votes vote down vote up
package com.codacy.analysis.core.git

import better.files.File
import org.specs2.control.NoLanguageFeatures
import org.specs2.mutable.Specification
import com.codacy.analysis.core.utils.TestUtils._

import scala.sys.process.Process

class GitSpec extends Specification with NoLanguageFeatures {

  "Git" should {
    "create a repository" in {
      (for {
        temporaryDirectory <- File.temporaryDirectory()
      } yield {
        Process(Seq("git", "init"), temporaryDirectory.toJava).!

        Git.repository(temporaryDirectory) must beSuccessfulTry
      }).get
    }

    "get the current commit uuid" in {
      withTemporaryGitRepo(directory => {
        val expectedUuid = Process(Seq("git", "rev-parse", "HEAD"), directory.toJava).!!.trim
        Git.currentCommitUuid(directory) must beLike {
          case Some(commit) => commit.value must beEqualTo(expectedUuid)
        }
      })
    }

    "fail to create a repository" in {
      (for {
        temporaryDirectory <- File.temporaryDirectory()
      } yield {
        Git.repository(temporaryDirectory) must beFailedTry
      }).get
    }
  }

} 
Example 30
Source File: RepositorySpec.scala    From codacy-analysis-cli   with GNU Affero General Public License v3.0 5 votes vote down vote up
package com.codacy.analysis.core.git

import better.files.File
import com.codacy.analysis.core.utils.TestUtils._
import org.specs2.control.NoLanguageFeatures
import org.specs2.mutable.Specification

import scala.sys.process.Process
import scala.util.Success

class RepositorySpec extends Specification with NoLanguageFeatures {

  "Repository" should {
    "get latest commit" in {

      "when it exists" in {
        (for {
          temporaryDirectory <- File.temporaryDirectory()
          temporaryFile <- File.temporaryFile(parent = Some(temporaryDirectory))
        } yield {
          Process(Seq("git", "init"), temporaryDirectory.toJava).!
          Process(Seq("git", "add", temporaryDirectory.relativize(temporaryFile).toString), temporaryDirectory.toJava).!
          Process(Seq("git", "commit", "-m", "tmp"), temporaryDirectory.toJava).!

          Git.repository(temporaryDirectory).flatMap(_.latestCommit) must beSuccessfulTry
        }).get
      }

      "when it doesn't exist" in {
        (for {
          temporaryDirectory <- File.temporaryDirectory()
        } yield {
          Process(Seq("git", "init"), temporaryDirectory.toJava).!

          Git.repository(temporaryDirectory).flatMap(_.latestCommit) must beFailedTry
        }).get
      }
    }

    "get all uncommitted changes" in {

      "changed files" in {
        withTemporaryGitRepo { directory =>
          val file = directory / "random_file.file"
          file.createFileIfNotExists(createParents = true)

          Process(Seq("git", "add", "."), directory.toJava).!
          Process(Seq("git", "commit", "-m", "added a new file!"), directory.toJava).!

          file.write("Random file contents")

          Git.repository(directory).flatMap(_.uncommitedFiles) must beLike {
            case Success(uncommited) =>
              uncommited must contain(exactly(relativePath(file, directory)))
          }
        }
      }

      "untracked files" in {
        "with an untracked folder that contains an untracked file" in {
          withTemporaryGitRepo { directory =>
            val deepFile = directory / "mainFolder" / "subFolder" / "deepFile.sc"
            deepFile.createFileIfNotExists(createParents = true)

            Git.repository(directory).flatMap(_.uncommitedFiles) must beLike {
              case Success(uncommited) =>
                uncommited must contain(exactly(relativePath(deepFile, directory)))
            }
          }
        }

        "with an untracked folder with no content" in {
          withTemporaryGitRepo { directory =>
            val noContentsFolder = directory / "mainFolder" / "noContents"
            noContentsFolder.createDirectoryIfNotExists(createParents = true)

            Git.repository(directory).flatMap(_.uncommitedFiles) must beLike {
              case Success(uncommited) =>
                uncommited must beEmpty
            }
          }
        }
      }
    }

  }

  private def relativePath(targetFile: File, directory: File): String = {
    targetFile.pathAsString.stripPrefix(s"${directory.pathAsString}/")
  }
} 
Example 31
Source File: CommitSpec.scala    From codacy-analysis-cli   with GNU Affero General Public License v3.0 5 votes vote down vote up
package com.codacy.analysis.core.git

import better.files.File
import org.specs2.control.NoLanguageFeatures
import org.specs2.mutable.Specification
import com.codacy.analysis.core.utils.TestUtils._

import scala.sys.process.Process
import scala.util.Success

class CommitSpec extends Specification with NoLanguageFeatures {

  "Commit" should {
    "get all files" in {
      withTemporaryGitRepo { temporaryDirectory =>
        (for {
          tempFile1 <- File.temporaryFile(parent = Some(temporaryDirectory))
          tempFile2 <- File.temporaryFile(parent = Some(temporaryDirectory))
          tempFile3 <- File.temporaryFile(parent = Some(temporaryDirectory))
        } yield {

          def addFile(file: File) = {
            Process(Seq("git", "add", temporaryDirectory.relativize(file).toString), temporaryDirectory.toJava).!
          }
          addFile(tempFile1)
          addFile(tempFile2)
          addFile(tempFile3)
          Process(Seq("git", "commit", "-m", "tmp"), temporaryDirectory.toJava).!

          val expectedFiles =
            List(tempFile1, tempFile2, tempFile3).map(temporaryDirectory.relativize)
          Git.repository(temporaryDirectory).flatMap(_.latestCommit).flatMap(_.files) must beLike {
            case Success(fileSet) => fileSet must containTheSameElementsAs(expectedFiles)
          }
        }).get()
      }
    }
  }
} 
Example 32
Source File: OSType.scala    From airframe   with Apache License 2.0 5 votes vote down vote up
package wvlet.airframe.control
import scala.sys.process.Process

//--------------------------------------
//
// OSType.scala
// Since: 2012/01/30 11:58
//
//--------------------------------------


object OS {
  def isWindows: Boolean = getType == OSType.Windows
  def isMac: Boolean     = getType == OSType.Mac
  def isLinux: Boolean   = getType == OSType.Linux
  lazy val isCygwin: Boolean = {
    Shell.findCommand("uname") match {
      case Some(uname) => Process(uname).!!.startsWith("CYGWIN")
      case None        => false
    }
  }

  def isUnix: Unit = {}

  val getType: OSType = {
    val osName: String = System.getProperty("os.name", "unknown").toLowerCase
    if (osName.contains("win")) {
      OSType.Windows
    } else if (osName.contains("mac")) {
      OSType.Mac
    } else if (osName.contains("linux")) {
      OSType.Linux
    } else
      OSType.Other
  }
} 
Example 33
Source File: FrontEnd.scala    From Cortex   with GNU Affero General Public License v3.0 5 votes vote down vote up
import sbt.Keys._
import sbt._
import scala.sys.process.Process
import Path.rebase

object FrontEnd extends AutoPlugin {

  object autoImport {
    val frontendFiles = taskKey[Seq[(File, String)]]("Front-end files")
  }

  import autoImport._

  override def trigger = allRequirements

  override def projectSettings = Seq[Setting[_]](
    frontendFiles := {
      val s = streams.value
      s.log.info("Building front-end ...")
      s.log.info("npm install")
      Process("npm" :: "install" :: Nil, baseDirectory.value / "www") ! s.log
      s.log.info("npm run build")
      Process("npm" :: "run" :: "build" :: Nil, baseDirectory.value / "www") ! s.log
      val dir = baseDirectory.value / "www" / "dist"
      dir.**(AllPassFilter) pair rebase(dir, "www")
    })
} 
Example 34
Source File: ProcessJobRunnerSrv.scala    From Cortex   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.thp.cortex.services

import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Path, Paths}

import akka.actor.ActorSystem
import javax.inject.{Inject, Singleton}
import org.elastic4play.utils.RichFuture
import org.thp.cortex.models._
import play.api.Logger
import play.api.libs.json.Json

import scala.concurrent.duration.FiniteDuration
import scala.concurrent.{ExecutionContext, Future}
import scala.sys.process.{Process, ProcessLogger, _}
import scala.util.Try

@Singleton
class ProcessJobRunnerSrv @Inject()(implicit val system: ActorSystem) {

  lazy val logger = Logger(getClass)

  private val pythonPackageVersionRegex = "^Version: ([0-9]*)\\.([0-9]*)\\.([0-9]*)".r

  def checkCortexUtilsVersion(pythonVersion: String): Option[(Int, Int, Int)] =
    Try {
      (s"pip$pythonVersion" :: "show" :: "cortexutils" :: Nil)
        .lineStream
        .collectFirst {
          case pythonPackageVersionRegex(major, minor, patch) ⇒ (major.toInt, minor.toInt, patch.toInt)
        }
    }.getOrElse(None)

  def run(jobDirectory: Path, command: String, job: Job, timeout: Option[FiniteDuration])(implicit ec: ExecutionContext): Future[Unit] = {
    val baseDirectory = Paths.get(command).getParent.getParent
    val output        = StringBuilder.newBuilder
    logger.info(s"Execute $command in $baseDirectory, timeout is ${timeout.fold("none")(_.toString)}")
    val process = Process(Seq(command, jobDirectory.toString), baseDirectory.toFile)
      .run(ProcessLogger { s ⇒
        logger.info(s"  Job ${job.id}: $s")
        output ++= s
      })
    val execution = Future
      .apply {
        process.exitValue()
        ()
      }
      .map { _ ⇒
        val outputFile = jobDirectory.resolve("output").resolve("output.json")
        if (!Files.exists(outputFile) || Files.size(outputFile) == 0) {
          val report = Json.obj("success" → false, "errorMessage" → output.toString)
          Files.write(outputFile, report.toString.getBytes(StandardCharsets.UTF_8))
        }
        ()
      }
      .recoverWith {
        case error ⇒
          logger.error(s"Execution of command $command failed", error)
          Future.apply {
            val report = Json.obj("success" → false, "errorMessage" → s"${error.getMessage}\n$output")
            Files.write(jobDirectory.resolve("output").resolve("output.json"), report.toString.getBytes(StandardCharsets.UTF_8))
            ()
          }
      }
    timeout.fold(execution)(t ⇒ execution.withTimeout(t, killProcess(process)))
  }

  def killProcess(process: Process): Unit = {
    logger.info("Timeout reached, killing process")
    process.destroy()
  }
} 
Example 35
Source File: Main.scala    From perf_tester   with Apache License 2.0 5 votes vote down vote up
package org.preftester

import java.io.File
import java.nio.file.{Files, Paths}

import com.typesafe.config.{ConfigFactory, ConfigObject, ConfigParseOptions}
import org.perftester.results.renderer.TextRenderer
import org.perftester.results.{ResultReader, RunResult}

import scala.collection.JavaConverters._
import scala.sys.process.Process
import scala.util.{Random, Try}

object Main extends App {
  val baseDir = Paths.get(args.headOption.getOrElse("."))

  case class Configuration(
                            reference: String,
                            baseScalaVersion: String,
                            buildLocally: Boolean,
                            jvmOptions: String,
                            scalaOptions: String
                          ){
    val scalaVersion = if(buildLocally) s"$baseScalaVersion-$reference-SNAPSHOT" else reference
  }

  val config = ConfigFactory.parseFile(
    baseDir.resolve("benchmark.conf").toFile,
    ConfigParseOptions.defaults().setAllowMissing(false)
  )

  val benchmarks = config.getObject("benchmarks").asScala.map {
    case (name, obj: ConfigObject) =>
      def read(name: String, default: String) = Try(obj.toConfig.getString(name)).getOrElse(default)

      name -> Configuration(
        reference = read("reference", name),
        baseScalaVersion = read("baseScalaVersion", "2.12.4"),
        buildLocally = read("buildLocally", "false").toBoolean,
        jvmOptions = read("jvmOptions", ""),
        scalaOptions = read("scalaOptions", "")
      )
  }.toSeq

  val iterations = config.getInt("iterations")
  val N = config.getInt("N")
  val M = config.getInt("M")

  val results = (1 to iterations).foldLeft(Map.empty[String, Vector[RunResult]]){
    case (all, i) =>
      Random.shuffle(benchmarks).foldLeft(all){
        case (all, (name, benchmark)) =>
          val location = baseDir.resolve(benchmark.scalaVersion)
          val cmd = Seq(s"./run.sh", ".", N, M, benchmark.scalaOptions).map(_.toString)
          println(s"## Run $i for $name")
          val env = if(benchmark.jvmOptions.isEmpty) Nil else Seq("_JAVA_OPTIONS" -> benchmark.jvmOptions)
          val output = Process(cmd, location.toFile, env:_*).!!
          println(output)
          val resultsDir = location.resolve("output").resolve("profile.txt")
          if (Files.exists(resultsDir)){
            val result = ResultReader.readResults(name, resultsDir, N)
            val previous = all.getOrElse(name, Vector.empty)
            all + (name -> (previous :+ result))
          } else all

      }
  }
  results.foreach{ case (name, results) =>
    println(s"########## Result for $name ##########")
    TextRenderer.outputTextResults(iterations, results)
  }
} 
Example 36
Source File: OutputCategoryList.scala    From jigg   with Apache License 2.0 5 votes vote down vote up
package jigg.nlp.ccg



import java.io.FileWriter

import scala.collection.mutable.ArrayBuffer
import scala.sys.process.Process
import scala.collection.mutable.HashMap

import lexicon._

import breeze.config.CommandLineParser

object OutputCategoryList {

  case class Params(
    bank: Opts.BankInfo,
    dict: Opts.DictParams
  )

  case class CategoryInfo(sentence: GoldSuperTaggedSentence, position: Int, num: Int = 1) {
    def increment(): CategoryInfo = this.copy(num = num + 1)
    def replace(_sentence: GoldSuperTaggedSentence, _p: Int) =
      CategoryInfo(_sentence, _p, num + 1)
  }

  def main(args:Array[String]) = {

    val params = CommandLineParser.readIn[Params](args)

    val dict = new JapaneseDictionary(params.dict.categoryDictinoary)
    val bank = CCGBank.select(params.bank, dict)

    val trainSentences: Array[GoldSuperTaggedSentence] = bank.trainSentences

    val stats = new HashMap[Category, CategoryInfo]

    trainSentences foreach { sentence =>
      (0 until sentence.size) foreach { i =>
        val cat = sentence.cat(i)
        stats.get(cat) match {
          case Some(info) =>
            if (sentence.size > info.sentence.size)
              stats += ((cat, info.replace(sentence, i)))
            else
              stats += ((cat, info.increment()))
          case None => stats += ((cat, CategoryInfo(sentence, i)))
          case _ =>
        }
      }
    }
    def highlight(sentence: Sentence, i: Int) = {
      val tokens = sentence.wordSeq
      // tokens.take(i).mkString("") + s"\\x1b[1;31m{${tokens(i)}}\\x1b[0m" + tokens.drop(i+1).mkString("")
      tokens.slice(i-5, i).mkString("") + s"${tokens(i)}" + tokens.slice(i+1, i+6).mkString("")
    }

    var fw = new FileWriter("./category.lst")
    stats.toSeq.sortBy(_._2.num).reverse.foreach {
      case (cat, CategoryInfo(sentence, i, num)) =>
        fw.write("%s\t%s\t%s\t%s\n"
          .format(num, cat, sentence.pos(i), highlight(sentence, i)))
    }
    fw.flush
    fw.close

    val noFeatureCategories = new HashMap[String, CategoryInfo]
    stats foreach { case (cat, CategoryInfo(sentence, i, numWithFeat)) =>
      val noFeature = cat.toStringNoFeature
      noFeatureCategories.get(noFeature) match {
        case Some(exist) =>
          val newNum = numWithFeat + exist.num
          val newInfo = exist.copy(num = newNum)
          noFeatureCategories += (noFeature -> newInfo)
        case None =>
          noFeatureCategories += (noFeature -> CategoryInfo(sentence, i, numWithFeat))
        case _ =>
      }
    }

    fw = new FileWriter("./category.nofeature.lst")
    noFeatureCategories.toSeq.sortBy(_._2.num).reverse.foreach {
      case (cat, CategoryInfo(sentence, i, num)) =>
        fw.write("%s\t%s\t%s\t%s\n"
          .format(num, cat, sentence.pos(i), highlight(sentence, i)))
    }
    fw.flush
    fw.close
  }
} 
Example 37
Source File: CCGBankToCabochaFormat.scala    From jigg   with Apache License 2.0 5 votes vote down vote up
package jigg.nlp.ccg


object CCGBankToCabochaFormat {

  case class Opts(
    @Help(text="Path to CCGBank file") ccgbank: File = new File(""),
    @Help(text="Path to output") output: File = new File(""),
    @Help(text="Cabocha command (path to cabocha)") cabocha: String = "cabocha"
  )

  type Tree = ParseTree[NodeLabel]

  def main(args:Array[String]) = {
    val opts = CommandLineParser.readIn[Opts](args)

    val dict = new JapaneseDictionary()
    val extractors = TreeExtractor(
      new JapaneseParseTreeConverter(dict),
      new CCGBankReader)

    val trees = extractors.readTrees(opts.ccgbank, -1, true)
    val rawString = trees map (extractors.treeConv.toSentenceFromLabelTree) map (_.wordSeq.mkString("")) mkString ("\n")
    val is = new java.io.ByteArrayInputStream(rawString.getBytes("UTF-8"))
    val out = (Process(s"${opts.cabocha} -f1") #< is).lineStream_!

    val os = jigg.util.IOUtil.openOut(opts.output.getPath)
    out foreach { line =>
      os.write(line + "\n")
    }
    os.flush
    os.close
  }
} 
Example 38
Source File: CGroupProcessLauncher.scala    From incubator-retired-gearpump   with Apache License 2.0 5 votes vote down vote up
package org.apache.gearpump.cluster.worker

import java.io.File
import scala.sys.process.Process

import com.typesafe.config.Config
import org.slf4j.{Logger, LoggerFactory}

import org.apache.gearpump.cluster.scheduler.Resource
import org.apache.gearpump.util.{ProcessLogRedirector, RichProcess}


class CGroupProcessLauncher(val config: Config) extends ExecutorProcessLauncher {
  private val APP_MASTER = -1
  private val cgroupManager: Option[CGroupManager] = CGroupManager.getInstance(config)
  private val LOG: Logger = LoggerFactory.getLogger(getClass)

  override def cleanProcess(appId: Int, executorId: Int): Unit = {
    if (executorId != APP_MASTER) {
      cgroupManager.foreach(_.shutDownExecutor(appId, executorId))
    }
  }

  override def createProcess(
      appId: Int, executorId: Int, resource: Resource, appConfig: Config, options: Array[String],
    classPath: Array[String], mainClass: String, arguments: Array[String]): RichProcess = {
    val cgroupCommand = if (executorId != APP_MASTER) {
      cgroupManager.map(_.startNewExecutor(appConfig, resource.slots, appId,
        executorId)).getOrElse(List.empty)
    } else List.empty
    LOG.info(s"Launch executor $executorId with CGroup ${cgroupCommand.mkString(" ")}, " +
      s"classpath: ${classPath.mkString(File.pathSeparator)}")

    val java = System.getProperty("java.home") + "/bin/java"
    val command = cgroupCommand ++ List(java) ++ options ++ List("-cp", classPath
      .mkString(File.pathSeparator), mainClass) ++ arguments
    LOG.info(s"Starting executor process java $mainClass ${arguments.mkString(" ")}; " +
      s"options: ${options.mkString(" ")}")
    val logger = new ProcessLogRedirector()
    val process = Process(command).run(logger)
    new RichProcess(process, logger)
  }
} 
Example 39
Source File: Util.scala    From incubator-retired-gearpump   with Apache License 2.0 5 votes vote down vote up
package org.apache.gearpump.util

import java.io.{BufferedReader, File, FileInputStream, InputStreamReader}
import java.net.{ServerSocket, URI}
import scala.concurrent.forkjoin.ThreadLocalRandom
import scala.sys.process.Process
import scala.util.{Failure, Success, Try}

import com.typesafe.config.{Config, ConfigFactory}

import org.apache.gearpump.cluster.AppJar
import org.apache.gearpump.jarstore.JarStoreClient
import org.apache.gearpump.transport.HostPort

object Util {
  val LOG = LogUtil.getLogger(getClass)
  private val defaultUri = new URI("file:///")
  private val appNamePattern = "^[a-zA-Z_][a-zA-Z0-9_]+$".r.pattern

  def validApplicationName(appName: String): Boolean = {
    appNamePattern.matcher(appName).matches()
  }

  def getCurrentClassPath: Array[String] = {
    val classpath = System.getProperty("java.class.path")
    val classpathList = classpath.split(File.pathSeparator)
    classpathList
  }

  def version: String = {
    val home = System.getProperty(Constants.GEARPUMP_HOME)
    val version = Try {
      val versionFile = new FileInputStream(new File(home, "VERSION"))
      val reader = new BufferedReader(new InputStreamReader(versionFile))
      val version = reader.readLine().replace("version:=", "")
      versionFile.close()
      version
    }
    version match {
      case Success(version) =>
        version
      case Failure(ex) =>
        LOG.error("failed to read VERSION file, " + ex.getMessage)
        "Unknown-Version"
    }
  }

  def startProcess(options: Array[String], classPath: Array[String], mainClass: String,
      arguments: Array[String]): RichProcess = {
    val java = System.getProperty("java.home") + "/bin/java"

    val command = List(java) ++ options ++
      List("-cp", classPath.mkString(File.pathSeparator), mainClass) ++ arguments
    LOG.info(s"Starting executor process java $mainClass ${arguments.mkString(" ")} " +
      s"\n ${options.mkString(" ")}")
    val logger = new ProcessLogRedirector()
    val process = Process(command).run(logger)
    new RichProcess(process, logger)
  }

  
  def resolveJvmSetting(conf: Config): AppJvmSettings = {

    import org.apache.gearpump.util.Constants._

    val appMasterVMArgs = Try(conf.getString(GEARPUMP_APPMASTER_ARGS).split("\\s+")
      .filter(_.nonEmpty)).toOption
    val executorVMArgs = Try(conf.getString(GEARPUMP_EXECUTOR_ARGS).split("\\s+")
      .filter(_.nonEmpty)).toOption

    val appMasterClassPath = Try(
      conf.getString(GEARPUMP_APPMASTER_EXTRA_CLASSPATH)
        .split("[;:]").filter(_.nonEmpty)).toOption

    val executorClassPath = Try(
      conf.getString(GEARPUMP_EXECUTOR_EXTRA_CLASSPATH)
        .split(File.pathSeparator).filter(_.nonEmpty)).toOption

    AppJvmSettings(
      JvmSetting(appMasterVMArgs.getOrElse(Array.empty[String]),
        appMasterClassPath.getOrElse(Array.empty[String])),
      JvmSetting(executorVMArgs
        .getOrElse(Array.empty[String]), executorClassPath.getOrElse(Array.empty[String])))
  }

  def asSubDirOfGearpumpHome(dir: String): File = {
    new File(System.getProperty(Constants.GEARPUMP_HOME), dir)

  }
} 
Example 40
Source File: Config.scala    From scala-steward   with Apache License 2.0 5 votes vote down vote up
package org.scalasteward.core.application

import better.files._
import cats.effect.Sync
import org.http4s.Uri
import org.http4s.Uri.UserInfo
import org.scalasteward.core.application.Cli.EnvVar
import org.scalasteward.core.git.Author
import org.scalasteward.core.util
import org.scalasteward.core.vcs.data.AuthenticatedUser
import scala.concurrent.duration.FiniteDuration
import scala.sys.process.Process


final case class Config(
    workspace: File,
    reposFile: File,
    defaultRepoConfigFile: Option[File],
    gitAuthor: Author,
    vcsType: SupportedVCS,
    vcsApiHost: Uri,
    vcsLogin: String,
    gitAskPass: File,
    signCommits: Boolean,
    whitelistedDirectories: List[String],
    readOnlyDirectories: List[String],
    disableSandbox: Boolean,
    doNotFork: Boolean,
    ignoreOptsFiles: Boolean,
    envVars: List[EnvVar],
    processTimeout: FiniteDuration,
    scalafixMigrations: Option[File],
    groupMigrations: Option[File],
    cacheTtl: FiniteDuration,
    cacheMissDelay: FiniteDuration,
    bitbucketServerUseDefaultReviewers: Boolean
) {
  def vcsUser[F[_]](implicit F: Sync[F]): F[AuthenticatedUser] = {
    val urlWithUser = util.uri.withUserInfo.set(UserInfo(vcsLogin, None))(vcsApiHost).renderString
    val prompt = s"Password for '$urlWithUser': "
    F.delay {
      val password = Process(List(gitAskPass.pathAsString, prompt)).!!.trim
      AuthenticatedUser(vcsLogin, password)
    }
  }
}

object Config {
  def create[F[_]](args: Cli.Args)(implicit F: Sync[F]): F[Config] =
    F.delay {
      Config(
        workspace = args.workspace.toFile,
        reposFile = args.reposFile.toFile,
        defaultRepoConfigFile = args.defaultRepoConf.map(_.toFile),
        gitAuthor = Author(args.gitAuthorName, args.gitAuthorEmail),
        vcsType = args.vcsType,
        vcsApiHost = args.vcsApiHost,
        vcsLogin = args.vcsLogin,
        gitAskPass = args.gitAskPass.toFile,
        signCommits = args.signCommits,
        whitelistedDirectories = args.whitelist,
        readOnlyDirectories = args.readOnly,
        disableSandbox = args.disableSandbox,
        doNotFork = args.doNotFork,
        ignoreOptsFiles = args.ignoreOptsFiles,
        envVars = args.envVar,
        processTimeout = args.processTimeout,
        scalafixMigrations = args.scalafixMigrations.map(_.toFile),
        groupMigrations = args.groupMigrations.map(_.toFile),
        cacheTtl = args.cacheTtl,
        cacheMissDelay = args.cacheMissDelay,
        bitbucketServerUseDefaultReviewers = args.bitbucketServerUseDefaultReviewers
      )
    }
} 
Example 41
Source File: Runner.scala    From daml   with Apache License 2.0 3 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.navigator.test.runner

import com.typesafe.scalalogging.LazyLogging

import scala.sys.process.{Process, ProcessLogger}
import java.io.File

object Runner extends LazyLogging {

  class LazyProcessLogger(val prefix: String = "") extends ProcessLogger with LazyLogging {
    def out(s: => String): Unit = logger.info(prefix + s)
    def err(s: => String): Unit = logger.warn(prefix + s)
    def buffer[T](f: => T): T = f
  }

  def execute(
      command: Seq[String],
      log: Option[ProcessLogger] = None,
      cwd: Option[File] = None): Int = {
    logger.info(s"Executing `${command.mkString(" ")}`${cwd.map(f => s" in `$f`").getOrElse("")}")
    log.fold(Process(command, cwd).!)(l => Process(command, cwd).!(l))
  }

  def executeAsync(
      command: Seq[String],
      log: Option[ProcessLogger] = None,
      cwd: Option[File] = None): Process = {
    logger.info(s"Executing `${command.mkString(" ")}`${cwd.map(f => s" in `$f`").getOrElse("")}")
    val process = log.fold(Process(command, cwd).run())(l => Process(command, cwd).run(l))
    sys addShutdownHook {
      if (process.isAlive()) {
        process.destroy()
      }
    }
    process
  }
}