java.util.Scanner Scala Examples

The following examples show how to use java.util.Scanner. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: Licence.scala    From slide-desktop   with GNU General Public License v2.0 5 votes vote down vote up
package gui

import java.awt.{BorderLayout, Insets}
import java.io.InputStream
import java.util.Scanner
import javax.swing.{JFrame, JScrollPane, JTextArea, ScrollPaneConstants}

object Licence extends JFrame {
    val istream: InputStream = getClass.getResourceAsStream("res/licence-gpl.txt")
    val licenseText: String = new Scanner(istream, "UTF-8").useDelimiter("\\A").next

    this.setTitle("Licence")
    this.setBounds(100, 100, 640, 800)

    val textField: JTextArea = new JTextArea
    textField.setEditable(false)
    textField.setMargin(new Insets(10, 10, 10, 10))
    textField.setAlignmentX(0)
    textField.setText(licenseText)
    textField.setCaretPosition(0)

    this.add(textField, BorderLayout.CENTER)

    val scroll: JScrollPane = new JScrollPane(textField,
        ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER)
    this.getContentPane.add(scroll)

    def showLicense(): Unit = this.setVisible(true)
} 
Example 2
Source File: PLYReadWriteTests.scala    From scalismo-faces   with Apache License 2.0 5 votes vote down vote up
package scalismo.faces.io

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, OutputStreamWriter}
import java.nio.ByteOrder
import java.util.Scanner

import scalismo.faces.FacesTestSuite
import scalismo.faces.io.ply._

class PLYReadWriteTests extends FacesTestSuite {

  describe("Write-read cycles to string, big- and little endian") {

    def testRWEndianCycle[A:StringWriter:StringReader:EndianWriter:EndianReader](toWrite: IndexedSeq[A], bo: ByteOrder): Unit = {
      val N = toWrite.size
      val os = new ByteArrayOutputStream()
      val writer = new SequenceWriter[A]
      writer.write(toWrite, os, bo)

      val ba = os.toByteArray

      val is = new ByteArrayInputStream(ba)
      val reader = new FixedLengthSequenceReader[A]
      val read = reader.read(N, is, bo)

      read.zip(toWrite).foreach { p =>
        p._1 shouldBe p._2
      }
    }

    def testRWStringCycle[A:StringWriter:StringReader:EndianWriter:EndianReader](toWrite: IndexedSeq[A]): Unit = {
      val N = toWrite.size
      val os = new ByteArrayOutputStream()
      val osw = new OutputStreamWriter(os)
      val writer = new SequenceWriter[A]
      writer.write(toWrite, osw)
      osw.flush()

      val is = new ByteArrayInputStream(os.toByteArray)
      val isr = new Scanner(is)
      val reader = new FixedLengthSequenceReader[A]
      val read = reader.read(N, isr)

      read.zip(toWrite).foreach { p =>
        p._1 shouldBe p._2
      }
    }

    def testAllThreeCycles[A:StringWriter:StringReader:EndianWriter:EndianReader](toWrite: IndexedSeq[A]): Unit = {
      testRWStringCycle(toWrite)
      testRWEndianCycle(toWrite, ByteOrder.BIG_ENDIAN)
      testRWEndianCycle(toWrite, ByteOrder.LITTLE_ENDIAN)
    }

    it("should result in the same sequence of bytes") {
      val toWrite = for (i <- 0 until 20) yield (randomDouble * 255).toByte
      testAllThreeCycles(toWrite)
    }
    it("should result in the same sequence of char") {
      val toWrite = for (i <- 0 until 20) yield (randomDouble * 255).toChar
      testAllThreeCycles(toWrite)
    }
    it("should result in the same sequence of short") {
      val toWrite = for (i <- 0 until 20) yield (randomDouble * 255).toShort
      testAllThreeCycles(toWrite)
    }
    it("should result in the same sequence of int") {
      val toWrite = for (i <- 0 until 20) yield (randomDouble * 255).toInt
      testAllThreeCycles(toWrite)
    }
    it("should result in the same sequence of long") {
      val toWrite = for (i <- 0 until 20) yield (randomDouble * 255).toLong
      testAllThreeCycles(toWrite)
    }
    it("should result in the same sequence of float") {
      val toWrite = for (i <- 0 until 20) yield (randomDouble * 255).toFloat
      testAllThreeCycles(toWrite)
    }
    it("should result in the same sequence of double") {
      val toWrite = for (i <- 0 until 20) yield (randomDouble * 255)
      testAllThreeCycles(toWrite)
    }

  }

} 
Example 3
Source File: MergeStrategySpec.scala    From daf   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package daf.filesystem

import java.io.{ Closeable, InputStream }
import java.util.Scanner

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{ FSDataInputStream, FSDataOutputStream, FileSystem, Path }
import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpec }

import scala.collection.convert.decorateAsScala._
import scala.util.{ Random, Try }

class MergeStrategySpec extends WordSpec with Matchers with BeforeAndAfterAll {

  private implicit val fileSystem = FileSystem.getLocal(new Configuration)

  private val numFiles = 10

  private val baseDir = "test-dir".asHadoop

  private val workingDir = baseDir / f"merge-strategy-spec-${Random.nextInt(10000)}%05d"

  private def safely[A <: Closeable, U](f: A => U) = { stream: A =>
    val attempt = Try { f(stream) }
    stream.close()
    attempt
  }

  private def readFile(path: Path) = safely[FSDataInputStream, Seq[String]] { _.scanner.asScala.toSeq } apply fileSystem.open(path)

  private def readFiles = Try {
    fileSystem.listStatus(workingDir).toSeq.flatMap { status => readFile(status.getPath).get }
  }

  private def openFiles = Try {
    fileSystem.listStatus(workingDir).toSeq.map { status => fileSystem.open(status.getPath) }
  }

  private def createFile(fileName: String) = safely[FSDataOutputStream, Unit] { stream =>
    Random.alphanumeric.grouped(200).take(10).map { randomSplits(_) }.foreach { row =>
      stream.writeUTF { row.mkString("", ",", "\n") }
    }
  } apply fileSystem.create { workingDir / fileName }

  private def randomSplits(chars: Stream[Char], strings: Seq[String] = Seq.empty): Seq[String] = chars.splitAt { Random.nextInt(10) + 5 } match {
    case (head, tail) if tail.isEmpty => head.drop(1).mkString +: strings
    case (head, tail)                 => randomSplits(tail, head.mkString +: strings)
  }

  private def createWorkingDir = Try { fileSystem.mkdirs(workingDir) }

  private def createFiles = Try {
    0 until numFiles foreach { index => createFile(s"test-file-$index").get } // this is relatively nasty, and should be handled in a `traverse`
  }

  private def prepareData = for {
    _ <- createWorkingDir
    _ <- createFiles
  } yield ()

  private def purgeData = Try { fileSystem.delete(workingDir, true) }

  override def beforeAll() = prepareData.get

  override def afterAll() = purgeData.get

  "MergeStrategies info" when {

    "given compressed format files" must {

      "throw an exception" in {
        an[IllegalArgumentException] must be thrownBy MergeStrategies.find { FileInfo(workingDir / "test-file-0", 0, FileDataFormats.raw, FileCompressionFormats.gzip) }
      }
    }

    "given data as csv" must {

      "drop one line and merge the rest" in {
        safely[InputStream, Seq[String]] { new Scanner(_).asScala.toList }.andThen { attempt =>
          for {
            merged   <- attempt
            expected <- readFiles
          } merged.size should be { expected.size - numFiles + 1 }
        } apply MergeStrategies.csv.merge { openFiles.get }
      }
    }

    "given data as json" must {

      "just merge the files into one" in {
        safely[InputStream, Seq[String]] { new Scanner(_).asScala.toList }.andThen { attempt =>
          for {
            merged   <- attempt
            expected <- readFiles
          } merged.size should be { expected.size }
        } apply MergeStrategies.json.merge { openFiles.get }
      }

    }
  }
} 
Example 4
Source File: BarChartPainter.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.painter

import java.io.File
import java.util.Scanner

import org.jfree.chart.{ChartFactory, ChartUtils}
import org.jfree.chart.plot.PlotOrientation
import org.jfree.data.category.DefaultCategoryDataset

import org.apache.spark.util.Utils

class BarChartPainter(dataPath: String, picturePath: String)
  extends Painter(dataPath, picturePath) {

  def createDataset(): DefaultCategoryDataset = {
    fw.flush()
    fw.close()
    val dataset = new DefaultCategoryDataset
    val scaner = new Scanner(new File(dataPath))
    while (scaner.hasNext()) {
      val cols = scaner.next().split(",")
      dataset.addValue(Utils.byteStringAsMb(cols(1) + "b"), "peak", cols(0))
      dataset.addValue(Utils.byteStringAsMb(cols(2) + "b"), "majority", cols(0))
    }
    dataset
  }

  def paint(
      width: Int,
      height: Int,
      chartTitle: String,
      categoryAxisLabel: String,
      valueAxisLabel: String,
      yLB: Double,
      yUB: Double): Unit = {
    val barChart = ChartFactory.createBarChart(
      chartTitle,
      categoryAxisLabel,
      valueAxisLabel,
      createDataset,
      PlotOrientation.VERTICAL,
      true,
      false,
      false)
    barChart.getCategoryPlot.getRangeAxis.setRange(yLB, yUB)
    ChartUtils.saveChartAsJPEG(new File(picturePath), barChart, width, height)
  }

  override def paint(
      width: Int,
      height: Int,
      chartTitle: String,
      categoryAxisLabel: String,
      valueAxisLabel: String): Unit = {}
} 
Example 5
Source File: TimeSeriesChartPainter.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.painter

import java.io.File
import java.util.Scanner

import org.jfree.chart.{ChartFactory, ChartUtils}
import org.jfree.data.time.{FixedMillisecond, TimeSeries, TimeSeriesCollection}
import org.jfree.data.xy.XYDataset

class TimeSeriesChartPainter(dataPath: String, picturePath: String)
  extends Painter(dataPath, picturePath) {

  def createDataset(): XYDataset = {
    fw.flush()
    fw.close()
    val dataset = new TimeSeriesCollection
    val timeSeries = new TimeSeries("default")
    val scaner = new Scanner(new File(dataPath))
    while (scaner.hasNext()) {
      val cols = scaner.next().split(",")
      timeSeries.addOrUpdate(new FixedMillisecond(cols(1).toLong), cols(0).toLong)
    }
    dataset.addSeries(timeSeries)
    dataset
  }

  def paint(
      width: Int,
      height: Int,
      chartTitle: String,
      categoryAxisLabel: String,
      valueAxisLabel: String): Unit = {
    val lineChart = ChartFactory.createTimeSeriesChart(
      chartTitle,
      categoryAxisLabel,
      valueAxisLabel,
      createDataset,
      false,
      false,
      false)
    ChartUtils.saveChartAsJPEG(new File(picturePath), lineChart, width, height)
  }
} 
Example 6
Source File: LineChartPainter.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.painter

import java.io.File
import java.util.Scanner

import org.jfree.chart.{ChartFactory, ChartUtils}
import org.jfree.chart.plot.PlotOrientation
import org.jfree.data.category.DefaultCategoryDataset

class LineChartPainter(dataPath: String, picturePath: String)
  extends Painter(dataPath, picturePath) {

  def createDataset(): DefaultCategoryDataset = {
    fw.flush()
    fw.close()
    val dataset = new DefaultCategoryDataset
    val scaner = new Scanner(new File(dataPath))
    while (scaner.hasNext()) {
      val cols = scaner.next().split(",")
      dataset.addValue(cols(0).toLong, "default", cols(1))
    }
    dataset
  }

  def paint(
      width: Int,
      height: Int,
      chartTitle: String,
      categoryAxisLabel: String,
      valueAxisLabel: String): Unit = {
    val lineChart = ChartFactory.createLineChart(
      chartTitle,
      categoryAxisLabel,
      valueAxisLabel,
      createDataset,
      PlotOrientation.VERTICAL,
      false,
      false,
      false)
    ChartUtils.saveChartAsJPEG(new File(picturePath), lineChart, width, height)
  }
} 
Example 7
Source File: EnrichTruckData.scala    From trucking-iot   with Apache License 2.0 5 votes vote down vote up
package com.orendainx.trucking.nifi.processors

import java.io.{InputStream, OutputStream}
import java.nio.charset.StandardCharsets
import java.util.concurrent.atomic.AtomicReference
import java.util.Scanner

import com.orendainx.trucking.commons.models.{EnrichedTruckData, TruckData}
import com.orendainx.trucking.enrichment.WeatherAPI
import org.apache.nifi.annotation.behavior._
import org.apache.nifi.annotation.documentation.{CapabilityDescription, Tags}
import org.apache.nifi.components.PropertyDescriptor
import org.apache.nifi.logging.ComponentLog
import org.apache.nifi.processor.io.InputStreamCallback
import org.apache.nifi.processor.io.OutputStreamCallback
import org.apache.nifi.processor._

import scala.collection.JavaConverters._


@Tags(Array("trucking", "data", "event", "enrich", "iot"))
@CapabilityDescription("Enriches simulated truck sensor data. Find the master project and its code, documentation and corresponding tutorials at: https://github.com/orendain/trucking-iot")
@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
@TriggerSerially
@WritesAttributes(Array(
  new WritesAttribute(attribute = "dataType", description = "The class name of the resulting enriched data type.")
))
class EnrichTruckData extends AbstractProcessor {

  private var log: ComponentLog = _
  private val RelSuccess = new Relationship.Builder().name("success").description("All generated data is routed to this relationship.").build

  override def init(context: ProcessorInitializationContext): Unit = {
    log = context.getLogger
  }

  override def onTrigger(context: ProcessContext, session: ProcessSession): Unit = {

    var flowFile = session.get
    log.debug(s"Flowfile received: $flowFile")

    // Convert the entire stream of bytes from the flow file into a string
    val content = new AtomicReference[String]
    session.read(flowFile, new InputStreamCallback {
      override def process(inputStream: InputStream) = {
        val scanner = new Scanner(inputStream).useDelimiter("\\A")
        val result = if (scanner.hasNext()) scanner.next() else ""
        log.debug(s"Parsed content: $result")
        content.set(result)
      }
    })

    // Form a TruckData object from content, then creating an EnrichedTruckData object by making the appropriate
    // calls to WeatherAPI
    val truckData = TruckData.fromCSV(content.get())
    val enrichedTruckData = EnrichedTruckData(truckData, WeatherAPI.default.getFog(truckData.eventType),
      WeatherAPI.default.getRain(truckData.eventType), WeatherAPI.default.getWind(truckData.eventType))

    log.debug(s"EnrichedData generated: $enrichedTruckData")

    // Add the new data type as a flow file attribute
    flowFile = session.putAttribute(flowFile, "dataType", enrichedTruckData.getClass.getSimpleName)

    // Replace the flow file, writing in the new content
    flowFile = session.write(flowFile, new OutputStreamCallback {
      override def process(outputStream: OutputStream) =
        outputStream.write(enrichedTruckData.toCSV.getBytes(StandardCharsets.UTF_8))
    })

    // TODO: document what this does
    session.getProvenanceReporter.route(flowFile, RelSuccess)
    session.transfer(flowFile, RelSuccess)
    session.commit()
  }

  // Define properties and relationships
  override def getSupportedPropertyDescriptors: java.util.List[PropertyDescriptor] = List.empty[PropertyDescriptor].asJava

  override def getRelationships: java.util.Set[Relationship] = Set(RelSuccess).asJava
} 
Example 8
Source File: Io.scala    From sbt-flaky   with Apache License 2.0 5 votes vote down vote up
package flaky

import java.io._
import java.net.{HttpURLConnection, URL}
import java.util.Scanner

import sbt.{File, Logger}

import scala.language.postfixOps
import scala.util.{Failure, Success, Try}

object Io {

  def writeToFile(file: File, content: String): Unit = {
    new PrintWriter(file) {
      write(content)
      close()
    }
  }

  def writeToFile(file: File, content: Array[Byte]): Unit = {
    new FileOutputStream(file) {
      write(content)
      close()
    }
  }

  def writeToFile(file: File, is: InputStream): Unit = {
    val array: Array[Byte] = Stream.continually(is.read).takeWhile(_ != -1).map(_.toByte).toArray
    writeToFile(file, array)
  }


  def sendToSlack(webHook: String, jsonMsg: String, log: Logger, backupFile: File): Unit = {
    log.info("Sending report to slack")
    log.debug("Dumping slack msg to file")
    new PrintWriter(backupFile) {
      write(jsonMsg)
      close()
    }

    val send: Try[Unit] = Try {
      val url = new URL(webHook)
      val urlConnection = url.openConnection().asInstanceOf[HttpURLConnection]
      // Indicate that we want to write to the HTTP request body
      urlConnection.setDoOutput(true)
      urlConnection.setRequestMethod("POST")

      // Writing the post data to the HTTP request body
      log.debug(jsonMsg)
      val httpRequestBodyWriter = new BufferedWriter(new OutputStreamWriter(urlConnection.getOutputStream))
      httpRequestBodyWriter.write(jsonMsg)
      httpRequestBodyWriter.close()

      val scanner = new Scanner(urlConnection.getInputStream)
      log.debug("Response from SLACK:")
      while (scanner.hasNextLine) {
        log.debug(s"Response from SLACK: ${scanner.nextLine()}")
      }
      scanner.close()
    }
    send match {
      case Success(_) => log.info("Notification successfully send to Slack")
      case Failure(e) => log.error(s"Can't send message to slack: ${e.getMessage}")
    }

  }
} 
Example 9
Source File: SafeThread.scala    From eidos   with Apache License 2.0 5 votes vote down vote up
package org.clulab.wm.wmexchanger.utils

import java.util.Scanner

import org.apache.kafka.common.errors.InterruptException
import org.slf4j.Logger

abstract class SafeThread(logger: Logger) extends Thread {

  def runSafely(): Unit

  override def run(): Unit = {
    try {
      runSafely()
    }
    catch {
      case exception: InterruptException =>
        // This usually happens during consumer.poll().
        logger.info("Kafka interruption") // This is expected.
      case exception: InterruptedException =>
        logger.info("Java interruption") // This is expected.
      case exception: Throwable =>
        logger.error("Consumer interruption", exception)
    }
    finally {
      // This seems to be the only way to "cancel" the scanner.nextLine.
      System.exit(0)
    }
  }

  def waitSafely(duration: Long): Unit = SafeThread.waitSafely(this, logger, duration)

  start
}

object SafeThread {

  def waitSafely(thread: Thread, logger: Logger, duration: Long): Unit = {
    try {
      println("Press ENTER to exit...")
      new Scanner(System.in).nextLine()
      logger.info("User interruption")
      ThreadUtils.stop(thread, duration)
      logger.info("Exiting...")
    }
    catch {
      case _: Throwable => logger.info("Exiting...")
    }
  }
} 
Example 10
Source File: LineReader.scala    From eidos   with Apache License 2.0 5 votes vote down vote up
package org.clulab.wm.eidos.utils

import java.io.File



abstract class LineReader {
  def readLine(): String
}

class CliReader(prompt: String, parentProperty: String, child: String) extends LineReader {
  import jline.console.ConsoleReader
  import jline.console.history.FileHistory

  val reader = new ConsoleReader()
  val history = new FileHistory(new File(System.getProperty(parentProperty), child))

  reader.setPrompt(prompt)
  reader.setHistory(history)
  sys addShutdownHook {
    reader.getTerminal.restore()
    reader.shutdown()
    history.flush() // flush file before exiting
  }

  override def readLine(): String = reader.readLine
}

class IdeReader(protected val prompt: String) extends LineReader {
  import java.util.Scanner

  protected val reader = new Scanner(System.in)

  override def readLine(): String = {
    print(prompt)
    Console.flush()
    reader.nextLine
  }
} 
Example 11
Source File: ThirdPartyResource.scala    From scalismo-ui   with GNU General Public License v3.0 5 votes vote down vote up
package scalismo.ui.resources.thirdparty

import java.util.Scanner

import scalismo.ui.resources.thirdparty.breeze.Breeze
import scalismo.ui.resources.thirdparty.elusive.Elusive
import scalismo.ui.resources.thirdparty.entypo.Entypo
import scalismo.ui.resources.thirdparty.fontawesome.FontAwesome
import scalismo.ui.resources.thirdparty.interpreterpane.InterpreterPane
import scalismo.ui.resources.thirdparty.javagraphics.JavaGraphics
import scalismo.ui.resources.thirdparty.jhdf.Jhdf
import scalismo.ui.resources.thirdparty.jiconfont.JIconFont
import scalismo.ui.resources.thirdparty.jogl.Jogl
import scalismo.ui.resources.thirdparty.niftijio.NiftiJio
import scalismo.ui.resources.thirdparty.spire.Spire
import scalismo.ui.resources.thirdparty.spray.Spray
import scalismo.ui.resources.thirdparty.vtk.Vtk

object ThirdPartyResource {
  val All: List[ThirdPartyResource] = List(Vtk,
                                           Jogl,
                                           Jhdf,
                                           Breeze,
                                           NiftiJio,
                                           Spire,
                                           Spray,
                                           InterpreterPane,
                                           JavaGraphics,
                                           JIconFont,
                                           FontAwesome,
                                           Elusive,
                                           Entypo).sortBy(_.name.toLowerCase)
}

abstract class ThirdPartyResource {
  def name: String

  def authors: String

  def homepage: Option[String]

  def licenseText: Option[String] = readLicense()

  def licenseName: String

  def readLicense(): Option[String] = readResource("license.txt")

  def readResource(resourceName: String): Option[String] = {
    val resource = Option(this.getClass.getResourceAsStream(resourceName))
    resource.flatMap { stream =>
      val scanner = new Scanner(stream, "UTF-8").useDelimiter("\\A")
      if (scanner.hasNext) {
        Some(scanner.next())
      } else {
        None
      }
    }
  }
} 
Example 12
Source File: Confirm.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.cli.setup

import java.io.{InputStream, PrintStream}
import java.util.{Locale, Scanner}

import coursier.util.Task
import dataclass.data

import scala.annotation.tailrec

trait Confirm {
  def confirm(message: String, default: Boolean): Task[Boolean]
}

object Confirm {

  @data class ConsoleInput(
    in: InputStream = System.in,
    out: PrintStream = System.err,
    locale: Locale = Locale.getDefault,
    @since
    indent: Int = 0
  ) extends Confirm {
    private val marginOpt = if (indent > 0) Some(" " * indent) else None
    def confirm(message: String, default: Boolean): Task[Boolean] =
      Task.delay {

        val choice =
          if (default) "[Y/n]"
          else "[y/N]"

        val message0 = marginOpt match {
          case None => message
          case Some(margin) => message.linesIterator.map(margin + _).mkString(System.lineSeparator())
        }
        out.print(s"$message0 $choice ")

        @tailrec
        def loop(): Boolean = {
          val scanner = new Scanner(in)
          val resp = scanner.nextLine()
          val resp0 = resp
            .filter(!_.isSpaceChar)
            .toLowerCase(locale)
            .distinct

          resp0 match {
            case "y" => true
            case "n" => false
            case "" => default
            case _ =>
              out.print(s"Please answer Y or N. $choice ")
              loop()
          }
        }

        loop()
      }
  }

  @data class YesToAll(
    out: PrintStream = System.err
  ) extends Confirm {
    def confirm(message: String, default: Boolean): Task[Boolean] =
      Task.delay {
        out.println(message + " [Y/n] Y")
        true
      }
  }

  def default: Confirm =
    ConsoleInput()

} 
Example 13
Source File: SparkOperationsDocGenerator.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.docgen

import java.util.Scanner


object SparkOperationsDocGenerator
  extends DocPageCreator
  with SparkOperationsExtractor
  with RedirectCreator
  with LinkPrinter {

  val sparkVersion = org.apache.spark.SPARK_VERSION
  val scalaDocPrefix = s"https://spark.apache.org/docs/$sparkVersion/api/scala/index.html#"

  // scalastyle:off println
  def main(args: Array[String]): Unit = {

    val sc = new Scanner(System.in)

    println("==========================")
    println("= Seahorse doc generator =")
    println("==========================")
    println
    println("What do you want to do?")
    println("[P]rint links to new Spark operations")
    println("[C]reate documentation pages and redirects for Spark operations")
    print("> ")

    sc.nextLine().toLowerCase match {
      case "p" =>
        println("Do you want to print [A]ll links or only links to [N]ew operations?")
        print("> ")
        sc.nextLine().toLowerCase match {
          case "a" => printLinks(true)
          case "n" => printLinks(false)
          case _ => wrongInputExit()
        }
      case "c" =>
        println("Do you want to [R]ecreate all pages and redirects or [U]pdate for new operations?")
        print("> ")
        sc.nextLine().toLowerCase match {
          case "r" => createDocPagesAndRedirects(true)
          case "u" => createDocPagesAndRedirects(false)
          case _ => wrongInputExit()
        }
      case _ => wrongInputExit()
    }
  }

  private def wrongInputExit(): Unit = {
    println("Unexpected input. Exiting...")
    System.exit(1)
  }

  private def printLinks(printAll: Boolean): Unit = {
    val sparkOperationsByCategory = mapByCategory(sparkOperations())
    printOperationSiteLinks(sparkOperationsByCategory, printAll)
    printOperationMenuLinks(sparkOperationsByCategory, printAll)
  }

  private def createDocPagesAndRedirects(forceUpdate: Boolean): Unit = {
    val sparkOps = sparkOperations()
    val redirectCount = createRedirects(sparkOps, forceUpdate)
    val pageCount = createDocPages(sparkOps, forceUpdate)

    if(redirectCount == 0) {
      println("No redirects updated.")
    } else {
      println(s"Updated $redirectCount redirects.")
    }
    if(pageCount == 0) {
      println("No pages updated.")
    } else {
      println(s"Updated $pageCount pages.")
    }
  }
  // scalastyle:on println
} 
Example 14
Source File: Input.scala    From ScalphaGoZero   with Apache License 2.0 5 votes vote down vote up
package org.deeplearning4j.scalphagozero.input

import java.util.Scanner
import org.deeplearning4j.scalphagozero.board.Move


  def getNumber(default: Double, min: Double, max: Double): Double = {
    var answer = scanner.nextLine()
    if (answer.isEmpty) default
    else {
      while (validator.invalidNum(answer, min, max)) {
        println(s"Invalid number '$answer'. Enter a number between $min and $max, inclusive.")
        answer = scanner.nextLine()
      }
      answer.toDouble
    }
  }
} 
Example 15
Source File: SparkOperationsDocGenerator.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.docgen

import java.util.Scanner


object SparkOperationsDocGenerator
  extends DocPageCreator
  with SparkOperationsExtractor
  with RedirectCreator
  with LinkPrinter {

  val sparkVersion = org.apache.spark.SPARK_VERSION
  val scalaDocPrefix = s"https://spark.apache.org/docs/$sparkVersion/api/scala/index.html#"

  // scalastyle:off println
  def main(args: Array[String]): Unit = {

    val sc = new Scanner(System.in)

    println("==========================")
    println("= Seahorse doc generator =")
    println("==========================")
    println
    println("What do you want to do?")
    println("[P]rint links to new Spark operations")
    println("[C]reate documentation pages and redirects for Spark operations")
    print("> ")

    sc.nextLine().toLowerCase match {
      case "p" =>
        println("Do you want to print [A]ll links or only links to [N]ew operations?")
        print("> ")
        sc.nextLine().toLowerCase match {
          case "a" => printLinks(true)
          case "n" => printLinks(false)
          case _ => wrongInputExit()
        }
      case "c" =>
        println("Do you want to [R]ecreate all pages and redirects or [U]pdate for new operations?")
        print("> ")
        sc.nextLine().toLowerCase match {
          case "r" => createDocPagesAndRedirects(true)
          case "u" => createDocPagesAndRedirects(false)
          case _ => wrongInputExit()
        }
      case _ => wrongInputExit()
    }
  }

  private def wrongInputExit(): Unit = {
    println("Unexpected input. Exiting...")
    System.exit(1)
  }

  private def printLinks(printAll: Boolean): Unit = {
    val sparkOperationsByCategory = mapByCategory(sparkOperations())
    printOperationSiteLinks(sparkOperationsByCategory, printAll)
    printOperationMenuLinks(sparkOperationsByCategory, printAll)
  }

  private def createDocPagesAndRedirects(forceUpdate: Boolean): Unit = {
    val sparkOps = sparkOperations()
    val redirectCount = createRedirects(sparkOps, forceUpdate)
    val pageCount = createDocPages(sparkOps, forceUpdate)

    if(redirectCount == 0) {
      println("No redirects updated.")
    } else {
      println(s"Updated $redirectCount redirects.")
    }
    if(pageCount == 0) {
      println("No pages updated.")
    } else {
      println(s"Updated $pageCount pages.")
    }
  }
  // scalastyle:on println
}