scala.tools.nsc.Settings Scala Examples

The following examples show how to use scala.tools.nsc.Settings. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: SCTags.scala    From sctags   with Apache License 2.0 5 votes vote down vote up
package sctags

import scala.tools.nsc.{Settings, Global}
import scala.tools.nsc.reporters.StoreReporter

import scala.collection.mutable.ListBuffer


import java.io.File
import java.io.PrintStream

object SCTags extends Parsing with TagGeneration
{

  import FileUtils._;

  var outputFile: String = "tags";
  var recurse = false;
  var etags = false

  def parseOpt(args:List[String]): List[String] =
    args match {
      case ("-f" |"-o")         :: file :: rest => outputFile = file; parseOpt(rest)
      case ("-R" |"--recurse" ) :: rest => recurse = true;            parseOpt(rest)
      case ("-e" |"--etags"   ) :: rest => etags = true;              parseOpt(rest)
      case files  => files
    }

  def error(str: String) = System.err.println("Error: " + str);
  val settings = new Settings(error);
  val reporter = new StoreReporter;
  val compiler = new Global(settings, reporter);


  def run(fnames: Seq[String]) {
    val files = new ListBuffer[File]
    fnames foreach { fname =>
      val file = new File(fname)
      if (file.isDirectory) {
        if (recurse)
          files ++= listFilesRecursive(file, {(f: File) => f.getName.endsWith(".scala")})
        else
          System.err.println("Skipping directory " + fname);
      } else {
        if (file.getName.endsWith(".scala"))
          files += file
        else
          System.err.println("Skipping file " + fname);
      }
    }

    if (files.nonEmpty) {
      val tags = files.map(f => (f.getPath, generateTags(parse(f))))
      val output = outputFile match {
        case "-" => Console.out
        case "tags" if etags =>  new PrintStream("TAGS")
        case x => new PrintStream(x)
      }

      if (etags) {
        ETags.generate(tags, output)
      } else {
        CTags.generate(tags, output)
      }
    }
  }

  def main(args: Array[String]): Unit = {
    val fnames = parseOpt(args.toList)
    run(fnames)
  }
} 
Example 2
Source File: UDFBuilder.scala    From sope   with Apache License 2.0 5 votes vote down vote up
package com.sope.etl.register

import java.io.File
import java.net.URLClassLoader

import com.sope.etl.getObjectInstance
import com.sope.etl.transform.exception.YamlDataTransformException
import com.sope.etl.utils.JarUtils
import com.sope.utils.Logging
import org.apache.commons.io.FileUtils
import org.apache.spark.sql.expressions.UserDefinedFunction

import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.IMain

object  UDFBuilder extends Logging {

  val DefaultClassLocation = "/tmp/sope/dynamic/"
  val DefaultJarLocation = "/tmp/sope/sope-dynamic-udf.jar"


  
  def buildDynamicUDFs(udfCodeMap: Map[String, String]): Map[String, UserDefinedFunction] = {
    val file = new java.io.File(UDFBuilder.DefaultClassLocation)
    FileUtils.deleteDirectory(file)
    file.mkdirs()
    val udfMap = evalUDF(udfCodeMap)
    JarUtils.buildJar(DefaultClassLocation, DefaultJarLocation)
    udfMap
  }

} 
Example 3
Source File: ILoop.scala    From scio   with Apache License 2.0 5 votes vote down vote up
package com.spotify.scio.repl.compat

import scala.tools.nsc.interpreter.shell
import scala.tools.nsc.CompilerCommand
import scala.tools.nsc.Settings

abstract class ILoop(command: CompilerCommand)
    extends shell.ILoop(shell.ShellConfig(command.settings)) {

  def initCommand(): Unit

  override def createInterpreter(interpreterSettings: Settings): Unit = {
    super.createInterpreter(interpreterSettings)
    initCommand()
    out.print(prompt)
    out.flush()
  }
} 
Example 4
Source File: InterpreterSpec.scala    From polynote   with Apache License 2.0 5 votes vote down vote up
package polynote.testing

import java.io.File

import cats.data.StateT
import cats.syntax.traverse._
import cats.instances.list._
import org.scalatest.Suite
import polynote.config.PolynoteConfig
import polynote.kernel.environment.Config
import polynote.kernel.{Output, Result, ScalaCompiler}
import polynote.kernel.interpreter.{Interpreter, State}
import polynote.kernel.logging.Logging
import polynote.testing.kernel.MockEnv
import zio.{RIO, ZIO}
import zio.blocking.Blocking
import zio.clock.Clock
import zio.console.Console
import zio.random.Random
import zio.system.System
import zio.interop.catz._

import scala.reflect.internal.util.AbstractFileClassLoader
import scala.reflect.io.VirtualDirectory
import scala.tools.nsc.Settings
import scala.tools.nsc.io.AbstractFile

trait InterpreterSpec extends ZIOSpec {
  import runtime.{unsafeRun, unsafeRunSync}
  val classpath: List[File] = sys.props("java.class.path").split(File.pathSeparator).toList.map(new File(_))
  val settings: Settings = ScalaCompiler.defaultSettings(new Settings(), classpath)

  def outDir: AbstractFile = new VirtualDirectory("(memory)", None)
  settings.outputDirs.setSingleOutput(outDir)

  val classLoader: AbstractFileClassLoader = unsafeRun(ScalaCompiler.makeClassLoader(settings, Nil).provide(Config.of(PolynoteConfig())))
  val compiler: ScalaCompiler = ScalaCompiler(settings, classLoader).runIO()

  def interpreter: Interpreter

  lazy val initialState: State = unsafeRun(interpreter.init(State.Root).provideSomeLayer[Environment](MockEnv.layer(State.Root.id + 1)))
  def cellState: State = State.id(1, initialState)

  def assertOutput(code: String)(assertion: (Map[String, Any], Seq[Result]) => Unit): Unit =
    assertOutput(List(code))(assertion)

  def assertOutput(code: Seq[String])(assertion: (Map[String, Any], Seq[Result]) => Unit): Unit= {
    val (finalState, interpResults) = code.toList.map(interp).sequence.run(cellState).runIO()
    val terminalResults = interpResults.foldLeft((Map.empty[String, Any], List.empty[Result])) {
      case ((vars, results), next) =>
        val nextVars = vars ++ next.state.values.map(v => v.name -> v.value).toMap
        val nextOutputs = results ++ next.env.publishResult.toList.runIO()
        (nextVars, nextOutputs)
    }
    assertion.tupled(terminalResults)
  }

  case class InterpResult(state: State, env: MockEnv)

  type ITask[A] = RIO[Clock with Console with System with Random with Blocking with Logging, A]

  def interp(code: String): StateT[ITask, State, InterpResult] = StateT[ITask, State, InterpResult] {
    state => MockEnv(state.id).flatMap {
      env => interpreter.run(code, state).map {
        newState => State.id(newState.id + 1, newState) -> InterpResult(newState, env)
      }.provideSomeLayer[Environment](env.toCellEnv(classLoader))
    }
  }

  def interp1(code: String): InterpResult = unsafeRun {
    MockEnv(cellState.id).flatMap {
      env =>
        interpreter.run(code, cellState).provideSomeLayer(env.toCellEnv(getClass.getClassLoader)).map {
          state => InterpResult(state, env)
        }
    }
  }

  def stdOut(results: Seq[Result]): String = results.foldLeft("") {
    case (accum, Output("text/plain; rel=stdout", next)) => accum + next.mkString
    case (accum, _) => accum
  }

} 
Example 5
Source File: KernelReporter.scala    From polynote   with Apache License 2.0 5 votes vote down vote up
package polynote.kernel.util

import cats.data.Ior
import polynote.kernel.{CompileErrors, KernelReport, Pos}

import scala.collection.mutable.ListBuffer
import scala.reflect.internal.util.Position
import scala.tools.nsc.Settings
import scala.tools.nsc.reporters.AbstractReporter

case class KernelReporter(settings: Settings) extends AbstractReporter {

  private var _reports = new ListBuffer[KernelReport]()

  def display(pos: Position, msg: String, severity: Severity): Unit = _reports.synchronized {
    _reports += KernelReport(new Pos(pos), msg, severity.id)
  }

  def displayPrompt(): Unit = ()

  override def reset(): Unit = {
    super.reset()
    _reports.clear()
  }

  def reports: List[KernelReport] = _reports.synchronized(_reports.toList)

  private def captureState = State(_reports, INFO.count, WARNING.count, ERROR.count)
  private def restoreState(state: State): Unit = {
    _reports = state.reports
    INFO.count = state.infos
    WARNING.count = state.warns
    ERROR.count = state.warns
  }

  def attempt[T](fn: => T): Either[Throwable, T] = _reports.synchronized {
    val state = captureState
    reset()

    try {
      val result = Right(fn)

      if (hasErrors)
        throw CompileErrors(_reports.filter(_.severity == ERROR.id).toList)

      result
    } catch {
      case err: Throwable =>
        Left(err)
    } finally {
      restoreState(state)
    }
  }

  def attemptIor[T](fn: => T): Ior[Throwable, T] = _reports.synchronized {
    val state = captureState
    reset()

    try {
      val result = Ior.right(fn)

      if (hasErrors)
        result.putLeft(CompileErrors(_reports.filter(_.severity == ERROR.id).toList))
      else
        result

    } catch {
      case err: Throwable =>
        Ior.Left(err)
    } finally {
      restoreState(state)
    }
  }

  private case class State(reports: ListBuffer[KernelReport], infos: Int, warns: Int, errs: Int)
} 
Example 6
Source File: SparkILoop.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.repl

import java.io.{BufferedReader, FileReader}

import Predef.{println => _, _}
import scala.util.Properties.{jdkHome, javaVersion, versionString, javaVmName}

import scala.tools.nsc.interpreter.{JPrintWriter, ILoop}
import scala.tools.nsc.Settings
import scala.tools.nsc.util.stringFromStream


  def run(code: String, sets: Settings = new Settings): String = {
    import java.io.{ BufferedReader, StringReader, OutputStreamWriter }

    stringFromStream { ostream =>
      Console.withOut(ostream) {
        val input = new BufferedReader(new StringReader(code))
        val output = new JPrintWriter(new OutputStreamWriter(ostream), true)
        val repl = new SparkILoop(input, output)

        if (sets.classpath.isDefault)
          sets.classpath.value = sys.props("java.class.path")

        repl process sets
      }
    }
  }
  def run(lines: List[String]): String = run(lines.map(_ + "\n").mkString)
} 
Example 7
Source File: Main.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.repl

import java.io.File

import scala.tools.nsc.Settings

import org.apache.spark.util.Utils
import org.apache.spark._
import org.apache.spark.sql.SQLContext

object Main extends Logging {

  val conf = new SparkConf()
  val tmp = System.getProperty("java.io.tmpdir")
  val rootDir = conf.get("spark.repl.classdir", tmp)
  val outputDir = Utils.createTempDir(rootDir)
  val s = new Settings()
  s.processArguments(List("-Yrepl-class-based",
    "-Yrepl-outdir", s"${outputDir.getAbsolutePath}",
    "-classpath", getAddedJars.mkString(File.pathSeparator)), true)
  // the creation of SecurityManager has to be lazy so SPARK_YARN_MODE is set if needed
  lazy val classServer = new HttpServer(conf, outputDir, new SecurityManager(conf))
  var sparkContext: SparkContext = _
  var sqlContext: SQLContext = _
  var interp = new SparkILoop // this is a public var because tests reset it.

  def main(args: Array[String]) {
    if (getMaster == "yarn-client") System.setProperty("SPARK_YARN_MODE", "true")
    // Start the classServer and store its URI in a spark system property
    // (which will be passed to executors so that they can connect to it)
    classServer.start()
    interp.process(s) // Repl starts and goes in loop of R.E.P.L
    classServer.stop()
    Option(sparkContext).map(_.stop)
  }

  def getAddedJars: Array[String] = {
    val envJars = sys.env.get("ADD_JARS")
    if (envJars.isDefined) {
      logWarning("ADD_JARS environment variable is deprecated, use --jar spark submit argument instead")
    }
    val propJars = sys.props.get("spark.jars").flatMap { p => if (p == "") None else Some(p) }
    val jars = propJars.orElse(envJars).getOrElse("")
    Utils.resolveURIs(jars).split(",").filter(_.nonEmpty)
  }

  def createSparkContext(): SparkContext = {
    val execUri = System.getenv("SPARK_EXECUTOR_URI")
    val jars = getAddedJars
    val conf = new SparkConf()
      .setMaster(getMaster)
      .setJars(jars)
      .set("spark.repl.class.uri", classServer.uri)
      .setIfMissing("spark.app.name", "Spark shell")
    logInfo("Spark class server started at " + classServer.uri)
    if (execUri != null) {
      conf.set("spark.executor.uri", execUri)
    }
    if (System.getenv("SPARK_HOME") != null) {
      conf.setSparkHome(System.getenv("SPARK_HOME"))
    }
    sparkContext = new SparkContext(conf)
    logInfo("Created spark context..")
    sparkContext
  }

  def createSQLContext(): SQLContext = {
    val name = "org.apache.spark.sql.hive.HiveContext"
    val loader = Utils.getContextOrSparkClassLoader
    try {
      sqlContext = loader.loadClass(name).getConstructor(classOf[SparkContext])
        .newInstance(sparkContext).asInstanceOf[SQLContext]
      logInfo("Created sql context (with Hive support)..")
    } catch {
      case _: java.lang.ClassNotFoundException | _: java.lang.NoClassDefFoundError =>
        sqlContext = new SQLContext(sparkContext)
        logInfo("Created sql context..")
    }
    sqlContext
  }

  private def getMaster: String = {
    val master = {
      val envMaster = sys.env.get("MASTER")
      val propMaster = sys.props.get("spark.master")
      propMaster.orElse(envMaster).getOrElse("local[*]")
    }
    master
  }
} 
Example 8
Source File: SparkCommandLine.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.repl

import scala.tools.nsc.{Settings, CompilerCommand}
import scala.Predef._
import org.apache.spark.annotation.DeveloperApi


@DeveloperApi
class SparkCommandLine(args: List[String], override val settings: Settings)
    extends CompilerCommand(args, settings) {
  def this(args: List[String], error: String => Unit) {
    this(args, new SparkRunnerSettings(error))
  }

  def this(args: List[String]) {
    // scalastyle:off println
    this(args, str => Console.println("Error: " + str))
    // scalastyle:on println
  }
} 
Example 9
Source File: NsdbCli.scala    From NSDb   with Apache License 2.0 5 votes vote down vote up
package io.radicalbit.nsdb.cli

import io.radicalbit.nsdb.cli.console.NsdbILoop

import scala.tools.nsc.Settings


  case class Params(host: Option[String] = None, port: Option[Int] = None, db: String, width: Option[Int])

  val parser = new scopt.OptionParser[Params]("scopt") {
    head("scopt", "3.x")
    opt[String]("host") action { (x, c) =>
      c.copy(host = Some(x))
    } text "the remote host"
    opt[Int]("port") action { (x, c) =>
      c.copy(port = Some(x))
    } text "the remote port"
    opt[String]("database").required() action { (x, c) =>
      c.copy(db = x)
    } text "the database to select"
    opt[Int]("width") action { (x, c) =>
      c.copy(port = Some(x))
    } text "table max width"
  }

  parser.parse(args, Params(None, None, "root", None)) map { params =>
    val settings = new Settings
    settings.usejavacp.value = true
    settings.deprecation.value = true

    new NsdbILoop(params.host, params.port, params.db, params.width).process(settings)
  }
} 
Example 10
Source File: SparkCommandLine.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.repl

import scala.tools.nsc.{Settings, CompilerCommand}
import scala.Predef._
import org.apache.spark.annotation.DeveloperApi


@DeveloperApi
class SparkCommandLine(args: List[String], override val settings: Settings)
    extends CompilerCommand(args, settings) {
  def this(args: List[String], error: String => Unit) {
    this(args, new SparkRunnerSettings(error))
  }

  def this(args: List[String]) {
    // scalastyle:off println
    this(args, str => Console.println("Error: " + str))
    // scalastyle:on println
  }
} 
Example 11
Source File: SparkCommandLine.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.repl

import scala.tools.nsc.{Settings, CompilerCommand}
import scala.Predef._
import org.apache.spark.annotation.DeveloperApi


@DeveloperApi
class SparkCommandLine(args: List[String], override val settings: Settings)
    extends CompilerCommand(args, settings) {
  def this(args: List[String], error: String => Unit) {
    this(args, new SparkRunnerSettings(error))
  }

  def this(args: List[String]) {
    // scalastyle:off println
    this(args, str => Console.println("Error: " + str))
    // scalastyle:on println
  }
} 
Example 12
Source File: Main.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.repl

import org.apache.spark.util.Utils
import org.apache.spark._
import org.apache.spark.sql.SQLContext

import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.SparkILoop

object Main extends Logging {

  val conf = new SparkConf()
  val tmp = System.getProperty("java.io.tmpdir")
  val rootDir = conf.get("spark.repl.classdir", tmp)
  val outputDir = Utils.createTempDir(rootDir)
  val s = new Settings()
  s.processArguments(List("-Yrepl-class-based",
    "-Yrepl-outdir", s"${outputDir.getAbsolutePath}", "-Yrepl-sync"), true)
  val classServer = new HttpServer(conf, outputDir, new SecurityManager(conf))
  var sparkContext: SparkContext = _
  var sqlContext: SQLContext = _
  var interp = new SparkILoop // this is a public var because tests reset it.

  def main(args: Array[String]) {
    if (getMaster == "yarn-client") System.setProperty("SPARK_YARN_MODE", "true")
    // Start the classServer and store its URI in a spark system property
    // (which will be passed to executors so that they can connect to it)
    classServer.start()
    interp.process(s) // Repl starts and goes in loop of R.E.P.L
    classServer.stop()
    Option(sparkContext).map(_.stop)
  }


  def getAddedJars: Array[String] = {
    val envJars = sys.env.get("ADD_JARS")
    if (envJars.isDefined) {
      logWarning("ADD_JARS environment variable is deprecated, use --jar spark submit argument instead")
    }
    val propJars = sys.props.get("spark.jars").flatMap { p => if (p == "") None else Some(p) }
    val jars = propJars.orElse(envJars).getOrElse("")
    Utils.resolveURIs(jars).split(",").filter(_.nonEmpty)
  }

  def createSparkContext(): SparkContext = {
    val execUri = System.getenv("SPARK_EXECUTOR_URI")
    val jars = getAddedJars
    val conf = new SparkConf()
      .setMaster(getMaster)
      .setAppName("Spark shell")
      .setJars(jars)
      .set("spark.repl.class.uri", classServer.uri)
    logInfo("Spark class server started at " + classServer.uri)
    if (execUri != null) {
      conf.set("spark.executor.uri", execUri)
    }
    if (System.getenv("SPARK_HOME") != null) {
      conf.setSparkHome(System.getenv("SPARK_HOME"))
    }
    sparkContext = new SparkContext(conf)
    logInfo("Created spark context..")
    sparkContext
  }

  def createSQLContext(): SQLContext = {
    val name = "org.apache.spark.sql.hive.HiveContext"
    val loader = Utils.getContextOrSparkClassLoader
    try {
      sqlContext = loader.loadClass(name).getConstructor(classOf[SparkContext])
        .newInstance(sparkContext).asInstanceOf[SQLContext] 
      logInfo("Created sql context (with Hive support)..")
    }
    catch {
      case _: java.lang.ClassNotFoundException | _: java.lang.NoClassDefFoundError =>
        sqlContext = new SQLContext(sparkContext)
        logInfo("Created sql context..")
    }
    sqlContext
  }

  private def getMaster: String = {
    val master = {
      val envMaster = sys.env.get("MASTER")
      val propMaster = sys.props.get("spark.master")
      propMaster.orElse(envMaster).getOrElse("local[*]")
    }
    master
  }
} 
Example 13
Source File: SparkCommandLine.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.repl

import scala.tools.nsc.{Settings, CompilerCommand}
import scala.Predef._
import org.apache.spark.annotation.DeveloperApi


@DeveloperApi
class SparkCommandLine(args: List[String], override val settings: Settings)
    extends CompilerCommand(args, settings) {
  def this(args: List[String], error: String => Unit) {
    this(args, new SparkRunnerSettings(error))
  }

  def this(args: List[String]) {
    this(args, str => Console.println("Error: " + str))
  }
} 
Example 14
Source File: SparkCommandLine.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.repl

import scala.tools.nsc.{Settings, CompilerCommand}
import scala.Predef._
import org.apache.spark.annotation.DeveloperApi


@DeveloperApi
class SparkCommandLine(args: List[String], override val settings: Settings)
    extends CompilerCommand(args, settings) {
  def this(args: List[String], error: String => Unit) {
    this(args, new SparkRunnerSettings(error))
  }

  def this(args: List[String]) {
    // scalastyle:off println
    this(args, str => Console.println("Error: " + str))
    // scalastyle:on println
  }
} 
Example 15
Source File: AnalyzerTest.scala    From scala-commons   with MIT License 5 votes vote down vote up
package com.avsystem.commons
package analyzer

import org.scalactic.source.Position
import org.scalatest.Assertions

import scala.reflect.internal.util.BatchSourceFile
import scala.tools.nsc.plugins.Plugin
import scala.tools.nsc.{Global, Settings}

trait AnalyzerTest { this: Assertions =>
  val settings = new Settings
  settings.usejavacp.value = true
  settings.pluginOptions.value ++= List("AVSystemAnalyzer:+_")

  val compiler: Global = new Global(settings) { global =>
    override protected def loadRoughPluginsList(): List[Plugin] =
      new AnalyzerPlugin(global) :: super.loadRoughPluginsList()
  }

  def compile(source: String): Unit = {
    compiler.reporter.reset()
    val run = new compiler.Run
    run.compileSources(List(new BatchSourceFile("test.scala", source)))
  }

  def assertErrors(source: String)(implicit pos: Position): Unit = {
    compile(source)
    assert(compiler.reporter.hasErrors)
  }

  def assertErrors(errors: Int, source: String)(implicit pos: Position): Unit = {
    compile(source)
    assert(compiler.reporter.errorCount == errors)
  }

  def assertNoErrors(source: String)(implicit pos: Position): Unit = {
    compile(source)
    assert(!compiler.reporter.hasErrors)
  }
} 
Example 16
Source File: Runtimes.scala    From AppCrawler   with Apache License 2.0 5 votes vote down vote up
package com.testerhome.appcrawler

import java.io.File

import scala.reflect.internal.util.ScalaClassLoader.URLClassLoader
import scala.tools.nsc.interpreter.IMain
import scala.tools.nsc.{Global, Settings}


class Runtimes(val outputDir:String="") extends CommonLog{
  private val settingsCompile=new Settings()

  if(outputDir.nonEmpty){
    val tempDir=new File(outputDir)
    if(tempDir.exists()==false){
      tempDir.mkdir()
    }
    settingsCompile.outputDirs.setSingleOutput(this.outputDir)
  }

  settingsCompile.deprecation.value = true // enable detailed deprecation warnings
  settingsCompile.unchecked.value = true // enable detailed unchecked warnings
  settingsCompile.usejavacp.value = true

  val global = new Global(settingsCompile)
  val run = new global.Run

  private val settingsEval=new Settings()
  settingsEval.deprecation.value = true // enable detailed deprecation warnings
  settingsEval.unchecked.value = true // enable detailed unchecked warnings
  settingsEval.usejavacp.value = true

  val interpreter = new IMain(settingsEval)

  def compile(fileNames:List[String]): Unit ={
    run.compile(fileNames)
  }

  def eval(code:String): Unit ={
    interpreter.interpret(code)
  }
  def reset(): Unit ={

  }



}

object Runtimes extends CommonLog{
  var instance=new Runtimes()
  var isLoaded=false
  def apply(): Unit ={

  }
  def eval(code:String): Unit ={
    if(isLoaded==false){
      log.info("first import")
      instance.eval("val driver=com.testerhome.appcrawler.AppCrawler.crawler.driver")
      instance.eval("def crawl(depth:Int)=com.testerhome.appcrawler.AppCrawler.crawler.crawl(depth)")
      isLoaded=true
    }
    log.info(code)
    instance.eval(code)
    log.info("eval finish")
  }

  def compile(fileNames:List[String]): Unit ={
    instance.compile(fileNames)
    isLoaded=false
  }
  def init(classDir:String=""): Unit ={
    instance=new Runtimes(classDir)
  }
  def reset(): Unit ={

  }
  def loadPlugins(pluginDir:String=""): List[Plugin] ={
    val pluginDirFile=new java.io.File(pluginDir)
    if(pluginDirFile.exists()==false){
      log.warn(s"no ${pluginDir} directory, skip")
      return Nil
    }
    val pluginFiles=pluginDirFile.list().filter(_.endsWith(".scala")).toList
    val pluginClassNames=pluginFiles.map(_.split(".scala").head)
    log.info(s"find plugins in ${pluginDir}")
    log.info(pluginFiles)
    log.info(pluginClassNames)
    val runtimes=new Runtimes(pluginDir)
    runtimes.compile(pluginFiles.map(pluginDirFile.getCanonicalPath+File.separator+_))
    val urls=Seq(pluginDirFile.toURI.toURL, getClass.getProtectionDomain.getCodeSource.getLocation)
    val loader=new URLClassLoader(urls, Thread.currentThread().getContextClassLoader)
    pluginClassNames.map(loader.loadClass(_).newInstance().asInstanceOf[Plugin])
  }
} 
Example 17
Source File: SparkCommandLine.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.repl

import scala.tools.nsc.{Settings, CompilerCommand}
import scala.Predef._
import org.apache.spark.annotation.DeveloperApi


@DeveloperApi
class SparkCommandLine(args: List[String], override val settings: Settings)
    extends CompilerCommand(args, settings) {
  def this(args: List[String], error: String => Unit) {
    this(args, new SparkRunnerSettings(error))
  }

  def this(args: List[String]) {
    // scalastyle:off println
    this(args, str => Console.println("Error: " + str))
    // scalastyle:on println
  }
} 
Example 18
Source File: CompilerSetup.scala    From perf_tester   with Apache License 2.0 5 votes vote down vote up
package benchmarks

import java.io.File
import java.nio.file.{Files, Path}

import benchmarks.Main.rootPath

import scala.reflect.internal.util.Position
import scala.tools.nsc.{Global, Settings}
import scala.tools.nsc.reporters.Reporter
import scala.util.Try
import collection.JavaConverters._

case class CompilerSetup(rootPath: Path, providedScalacOptions: List[String]) {
  val outputDir: Path = rootPath.resolve("output")
  val currentOutput: Path = outputDir.resolve("classes")
	val scalacOptions = providedScalacOptions ++
    Try(Files.readAllLines(rootPath.resolve("scalac.opts")).asScala.flatMap(_.split(" +"))).getOrElse(Nil)

  IO.cleanDir(outputDir)
  Files.createDirectories(currentOutput)


  val cpJars = IO.jarsIn(rootPath.resolve("cpJars"))

  val reporter: Reporter = new Reporter { // We are ignoring all
    override protected def info0(pos: Position, msg: String, severity: this.Severity, force: Boolean): Unit = {
    //   println(s"[$severity] $pos: $msg") // Uncomment for to get compilation messages
    }
  }

  val settings: Settings = new Settings( msg => throw new RuntimeException(s"[ERROR] $msg") )
  configure(settings)

  val global: Global = new Global(settings, reporter)

  def configure(settings: Settings): Unit = {
    settings.outputDirs.setSingleOutput(currentOutput.toAbsolutePath.toString)
    settings.classpath.append(cpJars.mkString(File.pathSeparator))
    settings.processArguments(scalacOptions, processAll = true)
  }
} 
Example 19
Source File: IntegrationTest.scala    From scala-sculpt   with Apache License 2.0 5 votes vote down vote up
// Copyright (C) 2015-2020 Lightbend Inc. <http://lightbend.com>

package com.lightbend.tools.sculpt

import scala.tools.nsc.{ Settings, Global }
import scala.tools.nsc.io.VirtualDirectory
import scala.reflect.internal.util.BatchSourceFile

object Scaffold {

  val classes: String = {
    // this will be e.g. "2.11" or "2.12"
    val majorScalaVersion = {
      val v = scala.util.Properties.versionNumberString
      if (v matches ".*-(pre-\\w+|M\\d+|RC\\d+)") {
        v
      } else {
        v.split('.').take(2).mkString(".")
      }
    }
    val relative = s"./target/scala-$majorScalaVersion/classes"
    val file = new java.io.File(relative)
    assert(file.exists)
    file.getAbsolutePath
  }

  def defaultSettings: Settings = {
    val settings = new Settings
    settings.processArgumentString(
      s"-usejavacp -Xplugin:$classes -Xplugin-require:sculpt")
    settings.outputDirs.setSingleOutput(
      new VirtualDirectory("(memory)", None))
    settings
  }

  def analyze(code: String, classMode: Boolean = false): String = {
    val out = java.io.File.createTempFile("sculpt", "json", null)
    val modeSetting =
      if (classMode)
        " -P:sculpt:mode=class"
      else
        ""
    val settings = defaultSettings
    settings.processArgumentString(s"-P:sculpt:out=$out$modeSetting")
    val sources = List(new BatchSourceFile("<test>", code))
    val compiler = new Global(settings)
    (new compiler.Run).compileSources(sources)
    scala.io.Source.fromFile(out).mkString
  }

}

class IntegrationTest extends munit.FunSuite {
  def check(s: Sample): Unit = {
    assert(s.json == Scaffold.analyze(s.source))
    assert(s.classJson == Scaffold.analyze(s.source, classMode = true))
  }
  for (sample <- Samples.samples)
    test(sample.name) {
      check(sample)
    }
} 
Example 20
Source File: SparkILoop.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.repl

import java.io.BufferedReader

import scala.Predef.{println => _, _}
import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.{ILoop, JPrintWriter}
import scala.tools.nsc.util.stringFromStream
import scala.util.Properties.{javaVersion, javaVmName, versionString}


  def run(code: String, sets: Settings = new Settings): String = {
    import java.io.{ BufferedReader, StringReader, OutputStreamWriter }

    stringFromStream { ostream =>
      Console.withOut(ostream) {
        val input = new BufferedReader(new StringReader(code))
        val output = new JPrintWriter(new OutputStreamWriter(ostream), true)
        val repl = new SparkILoop(input, output)

        if (sets.classpath.isDefault) {
          sets.classpath.value = sys.props("java.class.path")
        }
        repl process sets
      }
    }
  }
  def run(lines: List[String]): String = run(lines.map(_ + "\n").mkString)
}