java.io.InputStreamReader Scala Examples

The following examples show how to use java.io.InputStreamReader. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: BuildInfo.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.buildinfo

import java.io.{BufferedReader, InputStreamReader}

import scala.collection.JavaConverters.asScalaIteratorConverter

object BuildInfo {
  val Version: String =
    Option(getClass.getClassLoader.getResourceAsStream("MVN_VERSION")).fold {
      "{component version not found on classpath}"
    } { is =>
      try {
        val reader = new BufferedReader(new InputStreamReader(is))
        reader.lines.iterator.asScala.mkString.trim
      } finally {
        is.close()
      }
    }
} 
Example 2
Source File: ProxyServer.scala    From devbox   with Apache License 2.0 5 votes vote down vote up
package cmdproxy

import java.io.BufferedReader
import java.io.InputStreamReader
import java.io.OutputStreamWriter
import java.io.PrintWriter
import java.net.InetAddress
import java.net.ServerSocket
import java.net.Socket

import scala.util.Using

import devbox.logger.FileLogger
import os.RelPath
import ujson.ParseException
import upickle.default.{macroRW, ReadWriter}

case class Request(workingDir: String, cmd: Seq[String])
object Request {
  implicit val rw: ReadWriter[Request] = macroRW
}


  val localDir: Map[os.RelPath, os.Path] = dirMapping.map(_.swap).toMap

  def start(): Unit = {
    logger.info(s"Starting command proxy server, listening at ${socket.getInetAddress}:${socket.getLocalPort}")
    (new Thread("Git Proxy Thread") {
      override def run(): Unit = {
        while (!socket.isClosed) {
          Using(socket.accept()) { handleConnection } recover {
            case e: Exception =>
              logger.error(s"Error handling request ${e.getMessage}")
            case e: java.net.SocketException if e.getMessage == "Socket closed" =>
              logger.error(s"Git proxy socket closed")
          }
        }
      }
    }).start()

  }

  def handleConnection(conn: Socket): Unit = try {
    logger.info(s"Accepting connection from ${conn.getInetAddress}")
    val in = new BufferedReader(new InputStreamReader(conn.getInputStream, ProxyServer.CHARSET_NAME))
    val out = new PrintWriter(new OutputStreamWriter(conn.getOutputStream, ProxyServer.CHARSET_NAME))

    upickle.default.read[Request](in.readLine()) match {
      case Request(dir, args) =>
        val workingDir = localDir
          .collect{case (remote, local) if RelPath(dir).startsWith(remote) =>
            local / RelPath(dir).relativeTo(remote)
          }
          .head

        // being cautious here and only execute "git" commands
        if (args.headOption.exists((_ == "git"))) {
          logger.info(s"Executing `${args.mkString(" ")}` in $workingDir")

          val proc = os.proc(args).call(
            workingDir,
            mergeErrIntoOut = true,
            stdout = os.ProcessOutput.Readlines(str =>
              out.println(upickle.default.write(Left[String, Int](str)))
            ),
            check = false,
            timeout = 10000
          )

          out.println(upickle.default.write(Right[String, Int](proc.exitCode)))
        } else {
          val msg = s"Not executing non-git commend: `${args.mkString(" ")}`."
          logger.info(msg)
          out.println(upickle.default.write(Right[String, Int](1)))
        }

        out.flush()
    }
  } catch {
    case e: ParseException => logger.error(s"Error parsing incoming json request: ${e.getMessage}")
  }
}

object ProxyServer {
  val DEFAULT_PORT = 20280
  val CHARSET_NAME = "UTF-8"
} 
Example 3
Source File: CustomReceiver.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
// scalastyle:off println
package org.apache.spark.examples.streaming

import java.io.{BufferedReader, InputStreamReader}
import java.net.Socket
import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.receiver.Receiver


  private def receive() {
   var socket: Socket = null
   var userInput: String = null
   try {
     logInfo("Connecting to " + host + ":" + port)
     socket = new Socket(host, port)
     logInfo("Connected to " + host + ":" + port)
     val reader = new BufferedReader(
       new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))
     userInput = reader.readLine()
     while(!isStopped && userInput != null) {
       store(userInput)
       userInput = reader.readLine()
     }
     reader.close()
     socket.close()
     logInfo("Stopped receiving")
     restart("Trying to connect again")
   } catch {
     case e: java.net.ConnectException =>
       restart("Error connecting to " + host + ":" + port, e)
     case t: Throwable =>
       restart("Error receiving data", t)
   }
  }
}
// scalastyle:on println 
Example 4
Source File: ExampleFileTests.scala    From circe-yaml   with Apache License 2.0 5 votes vote down vote up
package io.circe.yaml

import java.io.{ File, InputStreamReader }

import org.scalatest.freespec.AnyFreeSpec
import scala.io.Source

class ExampleFileTests extends AnyFreeSpec {

  "yaml test files" - {

    val testFiles = new File(getClass.getClassLoader.getResource("test-yamls").getPath).listFiles
      .filter(_.getName.endsWith(".yml"))
      .map { file =>
        file.getName -> file.getName.replaceFirst("yml$", "json")
      }

    testFiles.foreach {
      case (yamlFile, jsonFile) =>
        yamlFile in {
          val jsonStream = getClass.getClassLoader.getResourceAsStream(s"test-yamls/$jsonFile")
          val json = Source.fromInputStream(jsonStream).mkString
          jsonStream.close()
          val parsedJson = io.circe.jawn.parse(json)
          def yamlStream = getClass.getClassLoader.getResourceAsStream(s"test-yamls/$yamlFile")
          def yamlReader = new InputStreamReader(yamlStream)
          val yaml = Source.fromInputStream(yamlStream).mkString
          val parsedYamlString = parser.parse(yaml)
          val parsedStreamString = parser.parseDocuments(yaml)
          val parsedYamlReader = parser.parse(yamlReader)
          val parsedStreamReader = parser.parseDocuments(yamlReader)
          assert(parsedJson == parsedYamlString)
          assert(parsedJson == parsedStreamString.head)
          assert(parsedJson == parsedYamlReader)
          assert(parsedJson == parsedStreamReader.head)
        }
    }
  }
} 
Example 5
Source File: Console.scala    From slide-desktop   with GNU General Public License v2.0 5 votes vote down vote up
package gui

import java.awt.{BorderLayout, Insets}
import java.io.{BufferedReader, InputStreamReader}
import javax.swing._

import connections.usb.Adb

class Console extends JFrame {
    private val consoleTextField: JTextArea = new JTextArea()

    {
        this.setTitle("Output")
        this.setBounds(100, 100, 400, 200)

        consoleTextField.setEditable(false)
        consoleTextField.setMargin(new Insets(10, 10, 10, 10))
        consoleTextField.setAlignmentX(0)
        this.getContentPane.add(consoleTextField, BorderLayout.CENTER)
        consoleTextField.setCaretPosition(0)

        val scroll: JScrollPane = new JScrollPane(consoleTextField,
            ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER)
        this.getContentPane.add(scroll)
    }

    def append(text: String): Unit = {
        if (consoleTextField.getText == "")
            consoleTextField.append(text)
        else
            consoleTextField.append("\n" + text)
    }

    def showConsole(): Unit = this.setVisible(true)

    def runProcess(pr: Process): Unit = {

        var consoleOut: String = null
        var stdInput: BufferedReader = null
        var stdError: BufferedReader = null
        if (pr != null) {
            stdInput = new BufferedReader(new InputStreamReader(pr.getInputStream))
            stdError = new BufferedReader(new InputStreamReader(pr.getErrorStream))
            while ( {
                consoleOut = stdInput.readLine()
                consoleOut != null
            }) {
                this.append(consoleOut)
            }

            var errorOut: String = null
            while ( {
                errorOut = stdError.readLine()
                errorOut != null
            }) {
                this.append(errorOut)
            }
        }

        showConsole()
    }

    def runAdbProcess(pr: Process): Unit = {
        var deviceAvailable: Boolean = false

        var consoleOut: String = null
        var stdInput: BufferedReader = null
        var stdError: BufferedReader = null
        if (Adb.isAdbFilePresent && pr != null) {
            stdInput = new BufferedReader(new InputStreamReader(pr.getInputStream))
            stdError = new BufferedReader(new InputStreamReader(pr.getErrorStream))

            while ( {
                consoleOut = stdInput.readLine()
                consoleOut != null
            }) {
                if (consoleOut.contains("	device")) {
                    deviceAvailable = true
                }
                this.append(consoleOut)
            }

            var errorOut: String = null
            while ( {
                errorOut = stdError.readLine()
                errorOut != null
            }) {
                this.append(errorOut)
            }
        } else {
            this.append("Error: ADB is not installed")
        }

        showConsole()
    }
} 
Example 6
Source File: PostUrl.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.http

import java.io.{BufferedReader, InputStreamReader}
import java.net.URI

import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.commons.httpclient.HttpClient
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FileSystem, Path}
import org.apache.http.client.methods.HttpPost
import org.apache.http.entity.StringEntity
import org.apache.http.impl.client.HttpClients
import org.apache.http.util.EntityUtils
import org.apache.spark.sql.SparkSession


class PostUrl extends ConfigurableStop{
  override val authorEmail: String = "[email protected]"
  override val inportList: List[String] = List(Port.DefaultPort)
  override val outportList: List[String] = List(Port.DefaultPort)
  override val description: String = "Send a post request to the specified http"

  var url : String= _
  var jsonPath : String = _


  override def perform(in: JobInputStream, out: JobOutputStream, pec: JobContext): Unit = {
    val spark = pec.get[SparkSession]()

    //read  json from hdfs
    val conf = new Configuration()
    val fs = FileSystem.get(URI.create(jsonPath),conf)
    val stream: FSDataInputStream = fs.open(new Path(jsonPath))
    val bufferReader = new BufferedReader(new InputStreamReader(stream))
    var lineTxt = bufferReader.readLine()
    val buffer = new StringBuffer()
    while (lineTxt != null ){
      buffer.append(lineTxt.mkString)
      lineTxt=bufferReader.readLine()
    }

    // post
    val client = HttpClients.createDefault()
    val httpClient = new HttpClient()
    httpClient.getParams().setContentCharset("utf-8")

    val post = new HttpPost(url)
    post.addHeader("content-Type","application/json")
    post.setEntity(new StringEntity(buffer.toString))
    val response = client.execute(post)
    val entity = response.getEntity
    val str = EntityUtils.toString(entity,"UTF-8")
    println("Code is " + str)

  }


  override def setProperties(map: Map[String, Any]): Unit = {
    url = MapUtil.get(map,key="url").asInstanceOf[String]
    jsonPath = MapUtil.get(map,key="jsonPath").asInstanceOf[String]
  }

  override def getPropertyDescriptor(): List[PropertyDescriptor] = {
    var descriptor : List[PropertyDescriptor] = List()
    val url = new PropertyDescriptor()
      .name("url")
      .displayName("Url")
      .defaultValue("")
      .description("http request address")
      .required(true)
      .example("http://master:8002/flow/start")

    val jsonPath = new PropertyDescriptor()
      .name("jsonPath")
      .displayName("JsonPath")
      .defaultValue("")
      .description("json parameter path for post request")
      .required(true)
        .example("hdfs://master:9000/work/flow.json")

    descriptor = url :: descriptor
    descriptor = jsonPath :: descriptor
    descriptor
  }

  override def getIcon(): Array[Byte] = {
    ImageUtil.getImage("icon/http/PostUrl.png")
  }

  override def getGroup(): List[String] = {
    List(StopGroup.HttpGroup.toString)
  }

  override def initialize(ctx: ProcessContext): Unit = {

  }

} 
Example 7
Source File: Pathway.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.microorganism

import java.io.{BufferedReader, InputStreamReader, OutputStreamWriter}

import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FSDataOutputStream, FileSystem, Path}
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.json.JSONObject


class Pathway extends ConfigurableStop{
  override val authorEmail: String = "[email protected]"
  override val description: String = "Parse Pathway data"
  override val inportList: List[String] =List(Port.DefaultPort.toString)
  override val outportList: List[String] = List(Port.DefaultPort.toString)


  var cachePath:String = _
  def setProperties(map: Map[String, Any]): Unit = {
    cachePath=MapUtil.get(map,key="cachePath").asInstanceOf[String]
  }

  override def getPropertyDescriptor(): List[PropertyDescriptor] = {
    var descriptor : List[PropertyDescriptor] = List()
    val cachePath = new PropertyDescriptor().name("cachePath").displayName("cachePath").description("Temporary Cache File Path")
      .defaultValue("/pathway").required(true)
    descriptor = cachePath :: descriptor
    descriptor
  }

  override def getIcon(): Array[Byte] = {
    ImageUtil.getImage("icon/microorganism/Pathway.png")
  }

  override def getGroup(): List[String] = {
    List(StopGroup.MicroorganismGroup)
  }

  override def initialize(ctx: ProcessContext): Unit = {

  }

  override def perform(in: JobInputStream, out: JobOutputStream, pec: JobContext): Unit = {
    val inDf: DataFrame = in.read()
    var pathStr: String =inDf.take(1)(0).get(0).asInstanceOf[String]

    val configuration: Configuration = new Configuration()
    val pathARR: Array[String] = pathStr.split("\\/")
    var hdfsUrl:String=""
    for (x <- (0 until 3)){
      hdfsUrl+=(pathARR(x) +"/")
    }
    configuration.set("fs.defaultFS",hdfsUrl)
    var fs: FileSystem = FileSystem.get(configuration)


    val hdfsPathTemporary = hdfsUrl+cachePath+"/pathwayCache/pathwayCache.json"
    val path: Path = new Path(hdfsPathTemporary)
    if(fs.exists(path)){
      fs.delete(path)
    }
    fs.create(path).close()
    val hdfsWriter: OutputStreamWriter = new OutputStreamWriter(fs.append(path))

    var fdis: FSDataInputStream = null
    var br: BufferedReader = null
    var doc: JSONObject = null
    var hasAnotherSequence:Boolean = true

    inDf.collect().foreach(row => {
      pathStr = row.get(0).asInstanceOf[String]

      fdis = fs.open(new Path(pathStr))
      br = new BufferedReader(new InputStreamReader(fdis))
      var count = 0
      while (hasAnotherSequence) {
          count += 1
          doc = new JSONObject
          hasAnotherSequence = util.KeggPathway.process(br, doc)

          doc.write(hdfsWriter)
          hdfsWriter.write("\n")
        }
      br.close()
      fdis.close()
    })
    hdfsWriter.close()

    val df: DataFrame = pec.get[SparkSession]().read.json(hdfsPathTemporary)
    df.schema.printTreeString()
    println(df.count)

    out.write(df)

  }
} 
Example 8
Source File: GetUrlTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.http

import java.io.{BufferedReader, InputStreamReader, PrintWriter}
import java.net.{HttpURLConnection, InetAddress, URL, URLConnection}

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.http.client.methods.{CloseableHttpResponse, HttpGet}
import org.apache.http.impl.client.HttpClients
import org.apache.http.util.EntityUtils
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class GetUrlTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/http/getUrl.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()


    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()
    //execute flow
    val spark = SparkSession.builder()
      .master("local[12]")
      .appName("hive")
      .config("spark.driver.memory", "4g")
      .config("spark.executor.memory", "8g")
      .config("spark.cores.max", "8")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }

} 
Example 9
Source File: SparkSqlUtils.scala    From HadoopLearning   with MIT License 5 votes vote down vote up
package com.c503.utils

import java.io.{BufferedInputStream, BufferedReader, FileInputStream, InputStreamReader}
import java.nio.file.Path

import com.google.common.io.Resources
import org.apache.log4j.{Level, Logger}
import org.apache.mesos.Protos.Resource
import org.apache.spark.sql.SparkSession

import scala.io.Source


  def readSqlByPath(sqlPath: String) = {
    val buf = new StringBuilder
    val path = this.getPathByName(sqlPath)
    val file = Source.fromFile(path)
    for (line <- file.getLines) {
      buf ++= line + "\n"
    }
    file.close
    buf.toString()
  }


} 
Example 10
Source File: ProcessInterpreter.scala    From Linkis   with Apache License 2.0 5 votes vote down vote up
package com.webank.wedatasphere.linkis.engine.Interpreter

import java.io.{BufferedReader, InputStreamReader, PrintWriter}
import java.util.concurrent.TimeUnit

import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
import com.webank.wedatasphere.linkis.engine.spark.common.{LineBufferedStream, Starting, State, _}
import com.webank.wedatasphere.linkis.scheduler.executer.{ErrorExecuteResponse, ExecuteResponse, SuccessExecuteResponse}
import org.apache.commons.io.IOUtils
import org.json4s._

import scala.concurrent.duration.Duration
import scala.concurrent.{Await, ExecutionContext, Future}


abstract class ProcessInterpreter(process: Process) extends Interpreter with Logging {

  implicit val executor: ExecutionContext = ExecutionContext.global

  protected[this] var _state: State = Starting()

  protected[this] val stdin = new PrintWriter(process.getOutputStream)
  protected[this] val stdout = new BufferedReader(new InputStreamReader(process.getInputStream()), 1)
  protected[this] val errOut = new LineBufferedStream(process.getErrorStream())

  override def state: State = _state

  override def execute(code: String): ExecuteResponse = {
    if(code == "sc.cancelAllJobs" || code == "sc.cancelAllJobs()") {
      sendExecuteRequest(code)
    }
    _state match {
      case (Dead() | ShuttingDown() | Error() | Success()) =>
        throw new IllegalStateException("interpreter is not running")
      case Idle() =>
        require(state == Idle())
        code match {
          case "SHUTDOWN" =>
            sendShutdownRequest()
            close()
            ErrorExecuteResponse("shutdown",new Exception("shutdown"))
          case _ =>
            _state = Busy()
            sendExecuteRequest(code) match {
              case Some(rep) =>
                _state = Idle()
               // ExecuteComplete(rep)
                SuccessExecuteResponse()
              case None =>
                _state = Error()
                val errorMsg = errOut.lines.mkString(", ")
                throw new Exception(errorMsg)
            }
        }
      case _ => throw new IllegalStateException(s"interpreter is in ${_state} state, cannot do query.")
    }
  }

  Future {
    val exitCode = process.waitFor()
    if (exitCode != 0) {
      errOut.lines.foreach(println)
      println(getClass.getSimpleName+" has stopped with exit code " + process.exitValue)
      _state = Error()
    } else {
      println(getClass.getSimpleName+" has finished.")
      _state = Success()
    }
  }

  protected def waitUntilReady(): Unit

  protected def sendExecuteRequest(request: String): Option[JValue]

  protected def sendShutdownRequest(): Unit = {}


  override def close(): Unit = {
    val future = Future {
      _state match {
        case (Dead() | ShuttingDown() | Success()) =>
          Future.successful()
        case _ =>
          sendShutdownRequest()
      }
    }
    _state = Dead()
    IOUtils.closeQuietly(stdin)
    IOUtils.closeQuietly(stdout)
    errOut.close

    // Give ourselves 10 seconds to tear down the process.
    Utils.tryFinally(Await.result(future, Duration(10, TimeUnit.SECONDS))){
      process.destroy()}
  }

} 
Example 11
Source File: ModelSource.scala    From spark-ml-serving   with Apache License 2.0 5 votes vote down vote up
package io.hydrosphere.spark_ml_serving.common

import java.io.{InputStreamReader, BufferedReader}

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{Path, FileSystem}

case class ModelSource(
  root: String,
  fs: FileSystem
) {

  def readFile(path: String): String = {
    val fsPath = filePath(path)
    val reader = new BufferedReader(new InputStreamReader(fs.open(fsPath)))

    val builder      = new StringBuilder()
    var line: String = null
    while ({ line = reader.readLine(); line != null }) {
      builder.append(line + "\n")
    }
    builder.mkString
  }

  def findFile(dir: String, recursive: Boolean, f: String => Boolean): Option[Path] = {
    val dirPath = filePath(dir)
    if (fs.exists(dirPath) & fs.isDirectory(dirPath)) {
      val iter = fs.listFiles(dirPath, recursive)
      while (iter.hasNext) {
        val st = iter.next()
        if (st.isFile && f(st.getPath.getName)) return Some(st.getPath)
      }
      None
    } else {
      None
    }
  }

  def filePath(path: String): Path = {
    new Path(s"$root/$path")
  }

}

object ModelSource {

  def local(path: String): ModelSource = {
    ModelSource(path, FileSystem.getLocal(new Configuration()))
  }

  def hadoop(path: String, conf: Configuration): ModelSource = {
    val fs = FileSystem.get(conf)
    ModelSource(path, fs)
  }

} 
Example 12
Source File: RpcMainchainNodeApi.scala    From Sidechains-SDK   with MIT License 5 votes vote down vote up
package com.horizen.mainchain.api

import java.io.{BufferedReader, InputStreamReader}

import com.fasterxml.jackson.databind.{ObjectMapper, SerializationFeature}
import com.horizen.SidechainSettings
import com.horizen.serialization.ApplicationJsonSerializer
import com.horizen.utils.BytesUtils

class RpcMainchainNodeApi(val sidechainSettings: SidechainSettings)
  extends MainchainNodeApi
{

  private lazy val isOsWindows = {
    val osname = System.getProperty("os.name", "generic").toLowerCase()
    osname.contains("win")
  }

  private val clientPath = sidechainSettings.websocket.zencliCommandLine + " " +
    (sidechainSettings.genesisData.mcNetwork match {
      case "regtest" => "-regtest "
      case "testnet" => "-testnet "
      case _ => ""
    })

  private def callRpc(params: String) : String = {
    System.out.println(clientPath + " " + params)
    val process = Runtime.getRuntime.exec(clientPath + " " + params)

    val stdInput: BufferedReader = new BufferedReader(new InputStreamReader(process.getInputStream))

    val stdError: BufferedReader = new BufferedReader(new InputStreamReader(process.getErrorStream))

    val error = stdError.readLine()
    if(error != null)
      throw new IllegalStateException("Error: " + error)

    stdInput.readLine
  }

  private def encloseJsonParameter(parameter: String): String = {
    if (isOsWindows)
      "\"" + parameter.replace("\"", "\\\"") + "\""
    else
      "'" + parameter + "'"
  }

  private def encloseStringParameter(parameter: String): String = {
    "\"" + parameter + "\""
  }

  override def getSidechainInfo: SidechainInfoResponse = {
    val objectMapper = new ObjectMapper()
    val response = callRpc("getscinfo")

    objectMapper.readValue(response, classOf[SidechainInfoResponse])
  }

  override def sendCertificate(certificateRequest: SendCertificateRequest): SendCertificateResponse = {
    val serializer = ApplicationJsonSerializer.getInstance() // TODO: maybe it's better to construct object mapper from scratch
    serializer.setDefaultConfiguration()
    val objectMapper = serializer.getObjectMapper
    objectMapper.disable(SerializationFeature.INDENT_OUTPUT)

    val response = callRpc("send_certificate "
      + encloseStringParameter(BytesUtils.toHexString(certificateRequest.sidechainId)) + " "
      + certificateRequest.epochNumber + " "
      + certificateRequest.quality + " "
      + encloseStringParameter(BytesUtils.toHexString(certificateRequest.endEpochBlockHash)) + " "
      + encloseStringParameter(BytesUtils.toHexString(certificateRequest.proofBytes)) + " "
      + encloseJsonParameter(objectMapper.writeValueAsString(certificateRequest.backwardTransfers)) + " "
      + certificateRequest.fee
      )

    SendCertificateResponse(BytesUtils.fromHexString(response))
  }
} 
Example 13
Source File: LocalizedString.scala    From wookiee   with Apache License 2.0 5 votes vote down vote up
package com.webtrends.harness.utils

import java.text.MessageFormat
import java.util.{ResourceBundle, Locale}


  def raw(msg: String)(implicit locale: Locale=Locale.getDefault, context:String="messages"): String = {
    val bundle = ResourceBundle.getBundle(context, locale, UTF8BundleControl)
    bundle.getString(msg)
  }

  def apply(msg: String, args: Any*)(locale: Locale=Locale.getDefault, context:String="messages"): String = {
    new MessageFormat(raw(msg)(locale, context), locale).format(args.map(_.asInstanceOf[java.lang.Object]).toArray)
  }
}


object LocalizedString extends LocalizedString


// @see https://gist.github.com/alaz/1388917
// @see http://stackoverflow.com/questions/4659929/how-to-use-utf-8-in-resource-properties-with-resourcebundle
private[utils] object UTF8BundleControl extends ResourceBundle.Control {

  val Format = "properties.utf8"

  override def getFormats(baseName: String): java.util.List[String] = {
    import collection.JavaConverters._

    Seq(Format).asJava
  }

  override def getFallbackLocale(baseName: String, locale: Locale) =
    if (locale == Locale.getDefault) null
    else Locale.getDefault

  override def newBundle(baseName: String, locale: Locale, fmt: String, loader: ClassLoader, reload: Boolean): ResourceBundle = {
    import java.util.PropertyResourceBundle
    import java.io.InputStreamReader

    // The below is an approximate copy of the default Java implementation
    def resourceName = toResourceName(toBundleName(baseName, locale), "properties")

    def stream =
      if (reload) {
        for {url <- Option(loader getResource resourceName)
             connection <- Option(url.openConnection)}
          yield {
            connection.setUseCaches(false)
            connection.getInputStream
          }
      } else
        Option(loader getResourceAsStream resourceName)

    (for {format <- Option(fmt) if format == Format
          is <- stream}
      yield new PropertyResourceBundle(new InputStreamReader(is, "UTF-8"))).orNull
  }
} 
Example 14
Source File: InternalHttpClient.scala    From wookiee   with Apache License 2.0 5 votes vote down vote up
package com.webtrends.harness.http

import java.io.{BufferedReader, InputStreamReader}
import java.net.{HttpURLConnection, URLConnection}
import java.util.zip.{GZIPInputStream, InflaterInputStream}


  case class HttpResponseData(
                               statusLine: String,
                               content: String,
                               headers: collection.mutable.Map[String, String]) {

    private val startIndex = statusLine.indexOf(" ") + 1
    val status = statusLine.substring(startIndex, startIndex + 3)

    override def toString: String = {
      val sb = new StringBuilder
      sb.append(statusLine + "\n")
      headers.foreach(m => sb.append(m._1 + "=" + m._2 + "\n"))
      sb.append("\n" + content + "\n")
      sb.toString()
    }

  }
} 
Example 15
Source File: CustomReceiver.scala    From Learning-Spark-SQL   with MIT License 5 votes vote down vote up
import java.io.{BufferedReader, InputStreamReader}
import java.net.Socket
import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.receiver.Receiver


  private def receive() {
   var socket: Socket = null
   var userInput: String = null
   try {
     println("Connecting to " + host + ":" + port)
     socket = new Socket(host, port)
     println("Connected to " + host + ":" + port)
     val reader = new BufferedReader(
       new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))
     userInput = reader.readLine()
     while(!isStopped && userInput != null) {
       store(userInput)
       userInput = reader.readLine()
     }
     reader.close()
     socket.close()
     println("Stopped receiving")
     restart("Trying to connect again")
   } catch {
     case e: java.net.ConnectException =>
       restart("Error connecting to " + host + ":" + port, e)
     case t: Throwable =>
       restart("Error receiving data", t)
   }
  }
} 
Example 16
Source File: CreateJacksonParser.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.catalyst.json

import java.io.{ByteArrayInputStream, InputStream, InputStreamReader}
import java.nio.channels.Channels
import java.nio.charset.Charset

import com.fasterxml.jackson.core.{JsonFactory, JsonParser}
import org.apache.hadoop.io.Text
import sun.nio.cs.StreamDecoder

import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.unsafe.types.UTF8String

private[sql] object CreateJacksonParser extends Serializable {
  def string(jsonFactory: JsonFactory, record: String): JsonParser = {
    jsonFactory.createParser(record)
  }

  def utf8String(jsonFactory: JsonFactory, record: UTF8String): JsonParser = {
    val bb = record.getByteBuffer
    assert(bb.hasArray)

    val bain = new ByteArrayInputStream(
      bb.array(), bb.arrayOffset() + bb.position(), bb.remaining())

    jsonFactory.createParser(new InputStreamReader(bain, "UTF-8"))
  }

  def text(jsonFactory: JsonFactory, record: Text): JsonParser = {
    jsonFactory.createParser(record.getBytes, 0, record.getLength)
  }

  // Jackson parsers can be ranked according to their performance:
  // 1. Array based with actual encoding UTF-8 in the array. This is the fastest parser
  //    but it doesn't allow to set encoding explicitly. Actual encoding is detected automatically
  //    by checking leading bytes of the array.
  // 2. InputStream based with actual encoding UTF-8 in the stream. Encoding is detected
  //    automatically by analyzing first bytes of the input stream.
  // 3. Reader based parser. This is the slowest parser used here but it allows to create
  //    a reader with specific encoding.
  // The method creates a reader for an array with given encoding and sets size of internal
  // decoding buffer according to size of input array.
  private def getStreamDecoder(enc: String, in: Array[Byte], length: Int): StreamDecoder = {
    val bais = new ByteArrayInputStream(in, 0, length)
    val byteChannel = Channels.newChannel(bais)
    val decodingBufferSize = Math.min(length, 8192)
    val decoder = Charset.forName(enc).newDecoder()

    StreamDecoder.forDecoder(byteChannel, decoder, decodingBufferSize)
  }

  def text(enc: String, jsonFactory: JsonFactory, record: Text): JsonParser = {
    val sd = getStreamDecoder(enc, record.getBytes, record.getLength)
    jsonFactory.createParser(sd)
  }

  def inputStream(jsonFactory: JsonFactory, is: InputStream): JsonParser = {
    jsonFactory.createParser(is)
  }

  def inputStream(enc: String, jsonFactory: JsonFactory, is: InputStream): JsonParser = {
    jsonFactory.createParser(new InputStreamReader(is, enc))
  }

  def internalRow(jsonFactory: JsonFactory, row: InternalRow): JsonParser = {
    val ba = row.getBinary(0)

    jsonFactory.createParser(ba, 0, ba.length)
  }

  def internalRow(enc: String, jsonFactory: JsonFactory, row: InternalRow): JsonParser = {
    val binary = row.getBinary(0)
    val sd = getStreamDecoder(enc, binary, binary.length)

    jsonFactory.createParser(sd)
  }
} 
Example 17
Source File: StreamMetadata.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import java.io.{InputStreamReader, OutputStreamWriter}
import java.nio.charset.StandardCharsets
import java.util.ConcurrentModificationException

import scala.util.control.NonFatal

import org.apache.commons.io.IOUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileAlreadyExistsException, FSDataInputStream, Path}
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization

import org.apache.spark.internal.Logging
import org.apache.spark.sql.execution.streaming.CheckpointFileManager.CancellableFSDataOutputStream
import org.apache.spark.sql.streaming.StreamingQuery


  def write(
      metadata: StreamMetadata,
      metadataFile: Path,
      hadoopConf: Configuration): Unit = {
    var output: CancellableFSDataOutputStream = null
    try {
      val fileManager = CheckpointFileManager.create(metadataFile.getParent, hadoopConf)
      output = fileManager.createAtomic(metadataFile, overwriteIfPossible = false)
      val writer = new OutputStreamWriter(output)
      Serialization.write(metadata, writer)
      writer.close()
    } catch {
      case e: FileAlreadyExistsException =>
        if (output != null) {
          output.cancel()
        }
        throw new ConcurrentModificationException(
          s"Multiple streaming queries are concurrently using $metadataFile", e)
      case e: Throwable =>
        if (output != null) {
          output.cancel()
        }
        logError(s"Error writing stream metadata $metadata to $metadataFile", e)
        throw e
    }
  }
} 
Example 18
Source File: DockerfileParser.scala    From rug   with GNU General Public License v3.0 5 votes vote down vote up
package com.atomist.rug.kind.docker

import _root_.java.util
import java.io.InputStreamReader
import javax.script.{Invocable, ScriptEngineManager}

import com.atomist.util.Utils.withCloseable
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import org.springframework.core.io.ClassPathResource

import scala.collection.JavaConverters._

object DockerfileParser {

  val mapper = new ObjectMapper().registerModule(DefaultScalaModule)
  val consoleJs =
    """
      |console = {
      |   log: print,
      |   warn: print,
      |   error: print
      |};
    """.stripMargin
  def parse(content: String): Dockerfile = {
    val param = Option(content).getOrElse("")
    val content1 = param.replace("\r\n", "\n").replace("\r", "\n")
    withCloseable(new ClassPathResource("docker/parser.js").getInputStream)(is => {
      withCloseable(new InputStreamReader(is))(reader => {
        try {
          val engine = new ScriptEngineManager(null).getEngineByName("nashorn")
          engine.eval(consoleJs)
          engine.eval(reader)
          val invocable = engine.asInstanceOf[Invocable]
          val result = invocable.invokeFunction("parse", content1, Map("includeComments" -> "true").asJava)
          val lines = result match {
            case map: util.Map[AnyRef @unchecked, AnyRef @unchecked] =>
              map.asScala.values.map(c => mapper.convertValue(c, classOf[DockerfileLine])).toSeq
            case _ => throw new IllegalArgumentException("Failed to parse content")
          }
          new Dockerfile(lines)
        } catch {
          case e: Exception =>
            throw DockerfileException("Failed to parse Dockerfile", e)
        }
      })
    })
  }
} 
Example 19
Source File: GitHubJavaParserExtractor.scala    From rug   with GNU General Public License v3.0 5 votes vote down vote up
package com.atomist.rug.kind.java

import java.io.InputStreamReader
import java.util.{List => JList}

import com.atomist.source.{ArtifactSourceException, FileArtifact}
import com.github.javaparser.ast.CompilationUnit
import com.github.javaparser.{JavaParser, ParseException}
import com.typesafe.scalalogging.LazyLogging

case class FileAndCompilationUnit(file: FileArtifact, compilationUnit: CompilationUnit)

import com.atomist.util.Utils.withCloseable

import scala.collection.JavaConverters._


object GitHubJavaParserExtractor extends Function[JList[FileArtifact], Seq[FileAndCompilationUnit]] with LazyLogging {

  override def apply(javaFiles: JList[FileArtifact]): Seq[FileAndCompilationUnit] = {
    javaFiles.asScala.map(f => {
      logger.debug(s"Looking at Java artifact $f using $this")
      withCloseable(f.inputStream())(is =>
        withCloseable(new InputStreamReader(is))(reader => {
          try {
            FileAndCompilationUnit(f, JavaParser.parse(reader))
          } catch {
            case pex: ParseException =>
              throw new ArtifactSourceException(s"Parsing error in ${f.path},content was\n${f.content}", pex)
          }
        })
      )
    })
  }
} 
Example 20
Source File: AmqpXPathCheckMaterializer.scala    From gatling-amqp-plugin   with Apache License 2.0 5 votes vote down vote up
package ru.tinkoff.gatling.amqp.checks

import java.io.{ByteArrayInputStream, InputStreamReader}

import io.gatling.commons.validation.{safely, _}
import io.gatling.core.check.xpath.{Dom, XPathCheckType, XmlParsers}
import io.gatling.core.check.{CheckMaterializer, Preparer}
import org.xml.sax.InputSource
import ru.tinkoff.gatling.amqp.AmqpCheck
import ru.tinkoff.gatling.amqp.request.AmqpProtocolMessage

class AmqpXPathCheckMaterializer(xmlParsers: XmlParsers)
    extends CheckMaterializer[XPathCheckType, AmqpCheck, AmqpProtocolMessage, Option[Dom]](identity) {
  private val ErrorMapper = "Could not parse response into a DOM Document: " + _

  override protected def preparer: Preparer[AmqpProtocolMessage, Option[Dom]] =
    message =>
      safely(ErrorMapper) {
        message match {
          case AmqpProtocolMessage(_, payload, _) =>
            val in = new ByteArrayInputStream(payload)
            Some(xmlParsers.parse(new InputSource(new InputStreamReader(in)))).success
          case _ => "Unsupported message type".failure
        }
      }
} 
Example 21
Source File: UpdateChecker.scala    From kotlin-plugin   with MIT License 5 votes vote down vote up
package kotlin

import java.io.{InputStreamReader, BufferedReader, StringWriter}

import argonaut._, Argonaut._

import scala.concurrent.Future
import scala.util.{Failure, Success}

object UpdateChecker {
  import scala.concurrent.ExecutionContext.Implicits.global
  type Result = (Set[String],String)
  type Callback[A] = Either[Throwable,Result] => A

  def apply[A](user: String, repo: String, name: String)(result: Callback[A]): Unit = {
    val bintray = new java.net.URL(
      s"https://api.bintray.com/packages/$user/$repo/$name")
    Future {
      val uc = bintray.openConnection()
      val in = new BufferedReader(new InputStreamReader(uc.getInputStream, "utf-8"))
      try {
        val sw = new StringWriter
        val buf = Array.ofDim[Char](8192)
        Stream.continually(in.read(buf, 0, 8192)) takeWhile (
          _ != -1) foreach (sw.write(buf, 0, _))
        sw.toString
      } finally {
        in.close()
      }
    } onComplete {
      case Success(json) =>
        val decoded = json.decode[PackageInfo]
        val res: Either[Throwable, Result] = decoded match {
          case Left(Left(str)) =>
            Left(new IllegalArgumentException(str))
          case Left(Right(cursorHistory)) =>
            Left(new IllegalArgumentException(cursorHistory._1))
          case Right(packageInfo) =>
            Right(packageInfo.versions.toSet -> packageInfo.version)
        }
        result(res)
      case Failure(t) => result(Left(t))
    }
  }

  implicit def PackageInfoCodecJson: CodecJson[PackageInfo] = casecodec3(
    PackageInfo.apply, PackageInfo.unapply)("name", "latest_version", "versions")

  case class PackageInfo(name: String, version: String, versions: List[String])
} 
Example 22
Source File: BufferedSource.scala    From perf_tester   with Apache License 2.0 5 votes vote down vote up
package scala.io

import java.io.{ InputStream, BufferedReader, InputStreamReader, PushbackReader }
import Source.DefaultBufSize
import scala.collection.{ Iterator, AbstractIterator }


  override def mkString = {
    // Speed up slurping of whole data set in the simplest cases.
    val allReader = decachedReader
    val sb = new StringBuilder
    val buf = new Array[Char](bufferSize)
    var n = 0
    while (n != -1) {
      n = allReader.read(buf)
      if (n>0) sb.appendAll(buf, 0, n)
    }
    sb.result
  }
} 
Example 23
Source File: AkkaBuild.scala    From perf_tester   with Apache License 2.0 5 votes vote down vote up
package akka

import java.io.{ FileInputStream, InputStreamReader }
import java.util.Properties

import sbt.Keys._
import sbt._
import scala.collection.breakOut

object AkkaBuild {

  val enableMiMa = true

  lazy val buildSettings = Dependencies.Versions ++ Seq(
    organization := "com.typesafe.akka",
    version := "2.5-SNAPSHOT")

  private def allWarnings: Boolean = System.getProperty("akka.allwarnings", "false").toBoolean

  lazy val defaultSettings = 
    Seq[Setting[_]](
      // compile options
      scalacOptions in Compile ++= Seq("-encoding", "UTF-8", "-target:jvm-1.8", "-feature", "-unchecked", "-Xlog-reflective-calls", "-Xlint"),
      scalacOptions in Compile ++= (if (allWarnings) Seq("-deprecation") else Nil),
      // -XDignore.symbol.file suppresses sun.misc.Unsafe warnings
      javacOptions in compile ++= Seq("-encoding", "UTF-8", "-source", "1.8", "-target", "1.8", "-Xlint:unchecked", "-XDignore.symbol.file"),
      javacOptions in compile ++= (if (allWarnings) Seq("-Xlint:deprecation") else Nil),
      javacOptions in doc ++= Seq(),

      crossVersion := CrossVersion.binary,

      ivyLoggingLevel in ThisBuild := UpdateLogging.Quiet,

      licenses := Seq(("Apache License, Version 2.0", url("http://www.apache.org/licenses/LICENSE-2.0"))),
      homepage := Some(url("http://akka.io/")),

      apiURL := Some(url(s"http://doc.akka.io/api/akka/${version.value}"))
  )

  def loadSystemProperties(fileName: String): Unit = {
    import scala.collection.JavaConverters._
    val file = new File(fileName)
    if (file.exists()) {
      println("Loading system properties from file `" + fileName + "`")
      val in = new InputStreamReader(new FileInputStream(file), "UTF-8")
      val props = new Properties
      props.load(in)
      in.close()
      sys.props ++ props.asScala
    }
  }

  def majorMinor(version: String): Option[String] = """\d+\.\d+""".r.findFirstIn(version)
} 
Example 24
Source File: VaultHelper.scala    From sparta   with Apache License 2.0 5 votes vote down vote up
package com.stratio.sparta.plugin.helper

import java.io.{BufferedReader, InputStreamReader}

import akka.event.slf4j.SLF4JLogging
import org.apache.http.client.HttpClient
import org.apache.http.client.methods.{HttpPost, HttpUriRequest}
import org.apache.http.entity.StringEntity
import org.apache.http.impl.client.HttpClientBuilder

import scala.util.parsing.json.JSON

object VaultHelper extends SLF4JLogging {

  lazy val client: HttpClient = HttpClientBuilder.create().build()
  lazy val jsonTemplate: String = "{ \"token\" : \"_replace_\" }"

  def getTemporalToken(vaultHost: String, token: String): String = {
    val requestUrl = s"$vaultHost/v1/sys/wrapping/wrap"

    log.debug(s"Requesting temporal token: $requestUrl")

    val post = new HttpPost(requestUrl)

    post.addHeader("X-Vault-Token", token)
    post.addHeader("X-Vault-Wrap-TTL", "2000s")
    post.setEntity(new StringEntity(jsonTemplate.replace("_replace_", token)))

    getContentFromResponse(post, "wrap_info")("token").asInstanceOf[String]
  }

  private def getContentFromResponse(uriRequest: HttpUriRequest,
                                     parentField: String): Map[String, Any] = {
    val response = client.execute(uriRequest)
    val rd = new BufferedReader(new InputStreamReader(response.getEntity.getContent))
    val json = JSON.parseFull(
      Stream.continually(rd.readLine()).takeWhile(_ != null).mkString).get.asInstanceOf[Map[String, Any]]

    log.debug(s"getFrom Vault ${json.mkString("\n")}")
    if (response.getStatusLine.getStatusCode != 200) {
      val errors = json("errors").asInstanceOf[List[String]].mkString("\n")
      throw new RuntimeException(errors)
    } else json(parentField).asInstanceOf[Map[String, Any]]
  }
} 
Example 25
Source File: Util.scala    From incubator-retired-gearpump   with Apache License 2.0 5 votes vote down vote up
package org.apache.gearpump.util

import java.io.{BufferedReader, File, FileInputStream, InputStreamReader}
import java.net.{ServerSocket, URI}
import scala.concurrent.forkjoin.ThreadLocalRandom
import scala.sys.process.Process
import scala.util.{Failure, Success, Try}

import com.typesafe.config.{Config, ConfigFactory}

import org.apache.gearpump.cluster.AppJar
import org.apache.gearpump.jarstore.JarStoreClient
import org.apache.gearpump.transport.HostPort

object Util {
  val LOG = LogUtil.getLogger(getClass)
  private val defaultUri = new URI("file:///")
  private val appNamePattern = "^[a-zA-Z_][a-zA-Z0-9_]+$".r.pattern

  def validApplicationName(appName: String): Boolean = {
    appNamePattern.matcher(appName).matches()
  }

  def getCurrentClassPath: Array[String] = {
    val classpath = System.getProperty("java.class.path")
    val classpathList = classpath.split(File.pathSeparator)
    classpathList
  }

  def version: String = {
    val home = System.getProperty(Constants.GEARPUMP_HOME)
    val version = Try {
      val versionFile = new FileInputStream(new File(home, "VERSION"))
      val reader = new BufferedReader(new InputStreamReader(versionFile))
      val version = reader.readLine().replace("version:=", "")
      versionFile.close()
      version
    }
    version match {
      case Success(version) =>
        version
      case Failure(ex) =>
        LOG.error("failed to read VERSION file, " + ex.getMessage)
        "Unknown-Version"
    }
  }

  def startProcess(options: Array[String], classPath: Array[String], mainClass: String,
      arguments: Array[String]): RichProcess = {
    val java = System.getProperty("java.home") + "/bin/java"

    val command = List(java) ++ options ++
      List("-cp", classPath.mkString(File.pathSeparator), mainClass) ++ arguments
    LOG.info(s"Starting executor process java $mainClass ${arguments.mkString(" ")} " +
      s"\n ${options.mkString(" ")}")
    val logger = new ProcessLogRedirector()
    val process = Process(command).run(logger)
    new RichProcess(process, logger)
  }

  
  def resolveJvmSetting(conf: Config): AppJvmSettings = {

    import org.apache.gearpump.util.Constants._

    val appMasterVMArgs = Try(conf.getString(GEARPUMP_APPMASTER_ARGS).split("\\s+")
      .filter(_.nonEmpty)).toOption
    val executorVMArgs = Try(conf.getString(GEARPUMP_EXECUTOR_ARGS).split("\\s+")
      .filter(_.nonEmpty)).toOption

    val appMasterClassPath = Try(
      conf.getString(GEARPUMP_APPMASTER_EXTRA_CLASSPATH)
        .split("[;:]").filter(_.nonEmpty)).toOption

    val executorClassPath = Try(
      conf.getString(GEARPUMP_EXECUTOR_EXTRA_CLASSPATH)
        .split(File.pathSeparator).filter(_.nonEmpty)).toOption

    AppJvmSettings(
      JvmSetting(appMasterVMArgs.getOrElse(Array.empty[String]),
        appMasterClassPath.getOrElse(Array.empty[String])),
      JvmSetting(executorVMArgs
        .getOrElse(Array.empty[String]), executorClassPath.getOrElse(Array.empty[String])))
  }

  def asSubDirOfGearpumpHome(dir: String): File = {
    new File(System.getProperty(Constants.GEARPUMP_HOME), dir)

  }
} 
Example 26
Source File: ExtractNodes.scala    From tensorframes   with Apache License 2.0 5 votes vote down vote up
package org.tensorframes.dsl

import java.io.{BufferedReader, InputStreamReader, File}
import java.nio.file.Files
import java.nio.charset.StandardCharsets
import org.tensorframes.Logging
import org.scalatest.Matchers

import scala.collection.JavaConverters._

object ExtractNodes extends Matchers with Logging {

  def executeCommand(py: String): Map[String, String] = {
    val content =
      s"""
         |from __future__ import print_function
         |import tensorflow as tf
         |
         |$py
         |g = tf.get_default_graph().as_graph_def()
         |for n in g.node:
         |    print(">>>>>", str(n.name), "<<<<<<")
         |    print(n)
       """.stripMargin
    val f = File.createTempFile("pythonTest", ".py")
    logTrace(s"Created temp file ${f.getAbsolutePath}")
    Files.write(f.toPath, content.getBytes(StandardCharsets.UTF_8))
    // Using the standard python installation in the PATH. It needs to have TensorFlow installed.
    val p = new ProcessBuilder("python", f.getAbsolutePath).start()
    val s = p.getInputStream
    val isr = new InputStreamReader(s)
    val br = new BufferedReader(isr)
    var res: String = ""
    var str: String = ""
    while(str != null) {
      str = br.readLine()
      if (str != null) {
        res = res + "\n" + str
      }
    }

    p.waitFor()
    assert(p.exitValue() === 0, (p.exitValue(),
      {
        println(content)
        s"===========\n$content\n==========="
      }))
    res.split(">>>>>").map(_.trim).filterNot(_.isEmpty).map { b =>
      val zs = b.split("\n")
      val node = zs.head.dropRight(7)
      val rest = zs.tail
      node -> rest.mkString("\n")
    } .toMap
  }

  def compareOutput(py: String, nodes: Operation*): Unit = {
    val g = TestUtilities.buildGraph(nodes.head, nodes.tail:_*)
    val m1 = g.getNodeList.asScala.map { n =>
      n.getName -> n.toString.trim
    } .toMap
    val pym = executeCommand(py)
    logTrace(s"m1 = '$m1'")
    logTrace(s"pym = '$pym'")
    assert((m1.keySet -- pym.keySet).isEmpty, {
      val diff = (m1.keySet -- pym.keySet).toSeq.sorted
      s"Found extra nodes in scala: $diff"
    })
    assert((pym.keySet -- m1.keySet).isEmpty, {
      val diff = (pym.keySet -- m1.keySet).toSeq.sorted
      s"Found extra nodes in python: $diff"
    })
    for (k <- m1.keySet) {
      assert(m1(k) === pym(k),
        s"scala=${m1(k)}\npython=${pym(k)}")
    }
  }
} 
Example 27
Source File: Utils.scala    From incubator-livy   with Apache License 2.0 5 votes vote down vote up
package org.apache.livy

import java.io.{Closeable, File, InputStreamReader}
import java.net.URL
import java.nio.charset.StandardCharsets.UTF_8
import java.security.SecureRandom
import java.util.Properties

import scala.annotation.tailrec
import scala.collection.JavaConverters._
import scala.concurrent.TimeoutException
import scala.concurrent.duration.Duration

import org.apache.commons.codec.binary.Base64

object Utils {
  def getPropertiesFromFile(file: File): Map[String, String] = {
    loadProperties(file.toURI().toURL())
  }

  def loadProperties(url: URL): Map[String, String] = {
    val inReader = new InputStreamReader(url.openStream(), UTF_8)
    try {
      val properties = new Properties()
      properties.load(inReader)
      properties.stringPropertyNames().asScala.map { k =>
        (k, properties.getProperty(k).trim())
      }.toMap
    } finally {
      inReader.close()
    }
  }

  
  def isProcessAlive(process: Process): Boolean = {
    try {
      process.exitValue()
      false
    } catch {
      case _: IllegalThreadStateException =>
        true
    }
  }

  def startDaemonThread(name: String)(f: => Unit): Thread = {
    val thread = new Thread(name) {
      override def run(): Unit = f
    }
    thread.setDaemon(true)
    thread.start()
    thread
  }

  def usingResource[A <: Closeable, B](resource: A)(f: A => B): B = {
    try {
      f(resource)
    } finally {
      resource.close()
    }
  }

  def createSecret(secretBitLength: Int): String = {
    val rnd = new SecureRandom()
    val secretBytes = new Array[Byte](secretBitLength / java.lang.Byte.SIZE)
    rnd.nextBytes(secretBytes)

    Base64.encodeBase64String(secretBytes)
  }
} 
Example 28
Source File: Build.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
import java.net.HttpURLConnection
import java.io.BufferedReader
import java.io.InputStreamReader

import sbt.IO
import sbt.File

object DevModeBuild {

  def waitForReloads(file: File, count: Int): Unit = {
    waitFor[Int](
      IO.readLines(file).count(_.nonEmpty),
      _ == count,
      actual => s"Expected $count reloads, but only got $actual"
    )
  }

  def waitFor[T](check: => T, assertion: T => Boolean, error: T => String): Unit = {
    var checks = 0
    var actual = check
    while (!assertion(actual) && checks < 10) {
      Thread.sleep(1000)
      actual = check
      checks += 1
    }
    if (!assertion(actual)) {
      throw new RuntimeException(error(actual))
    }
  }

} 
Example 29
Source File: Build.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
import java.net.HttpURLConnection
import java.io.BufferedReader
import java.io.InputStreamReader

import sbt.IO
import sbt.File

object DevModeBuild {

  def waitForReloads(file: File, count: Int): Unit = {
    waitFor[Int](
      IO.readLines(file).count(_.nonEmpty),
      _ == count,
      actual => s"Expected $count reloads, but only got $actual"
    )
  }

  def waitFor[T](check: => T, assertion: T => Boolean, error: T => String): Unit = {
    var checks = 0
    var actual = check
    while (!assertion(actual) && checks < 10) {
      Thread.sleep(1000)
      actual = check
      checks += 1
    }
    if (!assertion(actual)) {
      throw new RuntimeException(error(actual))
    }
  }

} 
Example 30
Source File: CustomReceiver.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
// scalastyle:off println
package org.apache.spark.examples.streaming

import java.io.{BufferedReader, InputStreamReader}
import java.net.Socket
import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.receiver.Receiver


  private def receive() {
   var socket: Socket = null
   var userInput: String = null
   try {
     logInfo("Connecting to " + host + ":" + port)
     socket = new Socket(host, port)
     logInfo("Connected to " + host + ":" + port)
     val reader = new BufferedReader(
       new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))
     userInput = reader.readLine()
     while(!isStopped && userInput != null) {
       store(userInput)
       userInput = reader.readLine()
     }
     reader.close()
     socket.close()
     logInfo("Stopped receiving")
     restart("Trying to connect again")
   } catch {
     case e: java.net.ConnectException =>
       restart("Error connecting to " + host + ":" + port, e)
     case t: Throwable =>
       restart("Error receiving data", t)
   }
  }
}
// scalastyle:on println 
Example 31
Source File: StreamMetadata.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import java.io.{InputStreamReader, OutputStreamWriter}
import java.nio.charset.StandardCharsets

import scala.util.control.NonFatal

import org.apache.commons.io.IOUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, FSDataInputStream, FSDataOutputStream, Path}
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization

import org.apache.spark.internal.Logging
import org.apache.spark.sql.streaming.StreamingQuery


  def write(
      metadata: StreamMetadata,
      metadataFile: Path,
      hadoopConf: Configuration): Unit = {
    var output: FSDataOutputStream = null
    try {
      val fs = FileSystem.get(hadoopConf)
      output = fs.create(metadataFile)
      val writer = new OutputStreamWriter(output)
      Serialization.write(metadata, writer)
      writer.close()
    } catch {
      case NonFatal(e) =>
        logError(s"Error writing stream metadata $metadata to $metadataFile", e)
        throw e
    } finally {
      IOUtils.closeQuietly(output)
    }
  }
} 
Example 32
Source File: TestCrLf.scala    From eidos   with Apache License 2.0 5 votes vote down vote up
package org.clulab.wm.eidos.system

import java.io.File
import java.io.InputStreamReader

import org.clulab.wm.eidos.test.TestUtils._
import org.clulab.wm.eidos.utils.Closer.AutoCloser
import org.clulab.wm.eidos.utils.FileUtils
import org.clulab.wm.eidos.utils.Sourcer

class TestCrLf extends Test {
  
  behavior of "resources"

  def test(file: File): Unit = {
    val path = file.getCanonicalPath
    val buffer = new Array[Char](1024)

    it should "not have any CrLf line endings in " + path in {
      val inputReader = new InputStreamReader(
        FileUtils.newBufferedInputStream(file),
        Sourcer.utf8
      )
      val hasCrLf = inputReader.autoClose { inputReader =>
        var hasCrLf = false
        var endedWithCr = false

        var readCount = inputReader.read(buffer)
        while (!hasCrLf && readCount > 0) {
          hasCrLf |= (endedWithCr && buffer(0) == '\n')
          hasCrLf |= buffer.containsSlice("\r\n")
          endedWithCr = buffer(readCount - 1) == '\r'
          readCount = inputReader.read(buffer)
        }
        hasCrLf
      }

      hasCrLf should be (false)
    }
  }
  
  // https://groups.google.com/forum/#!topic/scala-user/WrmYHHzcJPw  
  type Operation = File => Unit

  val wantedSuffixes: Seq[String] = Seq(".conf", ".yml", ".tsv", ".kb", ".txt")
  val unwantedSuffixes: Seq[String] = Seq.empty

  def fileMatches(file: File): Boolean = {
    val canonicalPath = file.getCanonicalPath.replace('\\', '/')

    wantedSuffixes.exists(suffix => canonicalPath.endsWith(suffix)) &&
    !unwantedSuffixes.exists(suffix => canonicalPath.endsWith(suffix))
  }

  def directoryMatches(file: File): Boolean = true
  
  def doOperation(path: String)(operation: Operation): Unit = {
    for (files <- Option(new File(path).listFiles); file <- files) {
        if (file.isFile && fileMatches(file))
          operation(file)
        if (file.isDirectory && directoryMatches(file))
          doOperation(file.getAbsolutePath)(operation)
    }
  }
  
  doOperation(new File("./src/main/resources").getCanonicalPath)(test)
} 
Example 33
Source File: BlockingIoExample.scala    From netty-in-action-scala   with Apache License 2.0 5 votes vote down vote up
package nia.chapter1.scaladsl

import java.io.{ BufferedReader, IOException, InputStreamReader, PrintWriter }
import java.net.ServerSocket


  // #snip
  @throws[IOException]
  def serve(portNumber: Int): Unit = {
    //创建一个新的 ServerSocket,用以监听指定端口上的连接请求
    val serverSocket = new ServerSocket(portNumber)
    //对accept()方法的调用将被阻塞,直到一个连接建立
    val clientSocket = serverSocket.accept
    //这些流对象都派生于该套接字的流对象
    val in = new BufferedReader(new InputStreamReader(clientSocket.getInputStream))
    val out = new PrintWriter(clientSocket.getOutputStream, true)
    var request: String = in.readLine
    var response: String = null
    //处理循环开始
    while (request ne null) {
      if ("Done" != request) {
        //请求被传递给服务器的处理方法
        response = processRequest(request)
        //服务器的响应被发送给了客户端
        out.println(response)
        //继续执行处理循环
      }
      request = in.readLine
    }
    // #snip
  }
  private def processRequest(request: String): String = "Processed"
} 
Example 34
Source File: PreprocessSusy.scala    From DynaML   with Apache License 2.0 5 votes vote down vote up
package io.github.mandar2812.dynaml.examples

import java.io.{BufferedReader, FileInputStream, InputStreamReader}
import java.util.zip.GZIPInputStream

import com.github.tototoshi.csv.CSVWriter

import scala.util.Random

case class BufferedReaderIterator(reader: BufferedReader) extends Iterator[String] {
  override def hasNext() = reader.ready
  override def next() = reader.readLine()
}

object GzFileIterator {
  def apply(file: java.io.File, encoding: String): BufferedReader = {
    new BufferedReader(
      new InputStreamReader(
        new GZIPInputStream(
          new FileInputStream(file)), encoding))
  }
}

object PreprocessSusy {
  def apply(args: String = "") = {
    val iterator:BufferedReader = GzFileIterator(new java.io.File(args+"SUSY.csv.gz"),
      "US-ASCII")
    var line = iterator.readLine()
    val writer = CSVWriter.open(args+"susy.csv")
    val writert = CSVWriter.open(args+"susytest.csv")

    println("Outputting train and test csv files ...")
    while(line != null || line != "\n") {

      val row = line.split(',').reverse
      val procrow = Array.tabulate(row.length)((i) => {
        if(i == row.length-1) {
          val label = if(row(i).toDouble == 1.0) row(i).toDouble else -1.0
          label.toString
        } else {
          row(i)
        }
      })

      if(Random.nextDouble() <= 0.9)
      {
        writer.writeRow(procrow)
      } else {
        writert.writeRow(procrow)
      }
      line = iterator.readLine()
    }
    writer.close()
    writert.close()
    println("Done ...")
  }
} 
Example 35
Source File: PreprocessForestCover.scala    From DynaML   with Apache License 2.0 5 votes vote down vote up
package io.github.mandar2812.dynaml.examples

import java.io.{BufferedReader, FileInputStream, InputStreamReader}

import com.github.tototoshi.csv.CSVWriter

import scala.util.Random

object FileIterator {
  def apply(file: java.io.File, encoding: String): BufferedReader = {
    new BufferedReader(
      new InputStreamReader(
        new FileInputStream(file), encoding))
  }
}

object PreprocessForestCover {
  def apply(args: String = "") = {
    val iterator:BufferedReader = FileIterator(new java.io.File(args+"covtype.data"),
      "US-ASCII")
    var line = iterator.readLine()
    val writer = CSVWriter.open(args+"cover.csv")
    val writert = CSVWriter.open(args+"covertest.csv")

    println("Outputting train and test csv files ...")
    while(line != null) {

      val row = line.split(',')
      val procrow = Array.tabulate(row.length)((i) => {
        if(i == row.length-1) {
          val label = if(row(i).toDouble == 2.0) 1.0 else -1.0
          label.toString
        } else {
          row(i)
        }
      })

      if(Random.nextDouble() <= 0.9)
      {
        writer.writeRow(procrow)
      } else {
        writert.writeRow(procrow)
      }
      line = iterator.readLine()
    }
    writer.close()
    writert.close()
    println("Done ...")
  }
} 
Example 36
Source File: index_ceeaus_all.scala    From attic-nlp4l   with Apache License 2.0 5 votes vote down vote up
import java.io.File
import java.io.FileInputStream
import java.io.InputStreamReader
import java.io.BufferedReader
import java.nio.file.FileSystems
import org.apache.lucene.index._
import org.apache.lucene.search.TermQuery
import org.nlp4l.core.analysis.Analyzer
import org.nlp4l.core.analysis.AnalyzerBuilder
import org.nlp4l.core._

import scalax.file.Path
import scalax.file.PathSet

val index = "/tmp/index-ceeaus-all"

def lines(fl: Path, encoding: String): List[String] = {
  val is = new FileInputStream(fl.path)
  val r = new InputStreamReader(is, encoding)
  val br = new BufferedReader(r)
  var result: List[String] = Nil

  try{
    var line = br.readLine()
    while(line != null){
      result = result :+ line
      line = br.readLine()
    }
    result
  }
  finally{
    br.close
    r.close
    is.close
  }
}

def document(fl: Path, ja: Boolean): Document = {
  val ps: Array[String] = fl.path.split(File.separator)
  // for Windows
  // val ps: Array[String] = file.path.split("\\\\")
  val file = ps(3)
  val typ = ps(2)
  val cat = "all"
  val encoding = if(ja) "sjis" else "UTF-8"
  val body = lines(fl, encoding)
  val body_set = if(ja) Set(Field("body_ja", body)) else Set(Field("body_en", body), Field("body_ws", body))
  Document(Set(
    Field("file", file), Field("type", typ), Field("cat", cat)) ++ body_set
  )
}

// delete existing Lucene index
val p = Path(new File(index))
p.deleteRecursively()

// write documents into an index
val schema = SchemaLoader.loadFile("examples/schema/ceeaus.conf")
val writer = IWriter(index, schema)

val c: PathSet[Path] = Path("corpora", "CEEAUS", "PLAIN").children()
// write English docs
c.filter(e => e.name.indexOf("cjejus")<0 && e.name.endsWith(".txt")).toList.sorted.foreach(g => writer.write(document(g, false)))
// write English docs
c.filter(e => e.name.indexOf("cjejus")>=0 && e.name.endsWith(".txt")).toList.sorted.foreach(g => writer.write(document(g, true)))
writer.close

// search test
val searcher = ISearcher(index)
val results = searcher.search(query=new TermQuery(new Term("body_ja", "喫煙")), rows=10)

results.foreach(doc => {
  printf("[DocID] %d: %s\n", doc.docId, doc.get("file"))
})

// search test for ch4
val results2 = searcher.search(query=new TermQuery(new Term("body_ws", "still,")), rows=10)

results2.foreach(doc => {
  printf("[DocID] %d: %s\n", doc.docId, doc.get("file"))
}) 
Example 37
Source File: index_ceeaus.scala    From attic-nlp4l   with Apache License 2.0 5 votes vote down vote up
import java.io.File
import java.io.FileInputStream
import java.io.InputStreamReader
import java.io.BufferedReader
import java.nio.file.FileSystems
import org.apache.lucene.index._
import org.apache.lucene.search.TermQuery
import org.nlp4l.core.analysis.Analyzer
import org.nlp4l.core._

import scalax.file.Path
import scalax.file.PathSet

val index = "/tmp/index-ceeaus"

def lines(fl: Path, encoding: String): List[String] = {
  val is = new FileInputStream(fl.path)
  val r = new InputStreamReader(is, encoding)
  val br = new BufferedReader(r)
  var result: List[String] = Nil

  try{
    var line = br.readLine()
    while(line != null){
      result = result :+ line
      line = br.readLine()
    }
    result
  }
  finally{
    br.close
    r.close
    is.close
  }
}

def document(fl: Path, ja: Boolean): Document = {
  val ps: Array[String] = fl.path.split(File.separator)
  // for Windows
  // val ps: Array[String] = file.path.split("\\\\")
  val file = ps(3)
  val typ = ps(2)
  val cat = if(file.indexOf("smk") >= 0) "smk" else "ptj"   // smoking or part time job
  val encoding = if(ja) "sjis" else "UTF-8"
  val body = lines(fl, encoding)
  Document(Set(
    Field("file", file), Field("type", typ), Field("cat", cat),
    Field(if(ja) "body_ja" else "body_en", body)
  ))
}

// delete existing Lucene index
val p = Path(new File(index))
p.deleteRecursively()

// write documents into an index
val schema = SchemaLoader.loadFile("examples/schema/ceeaus.conf")
val writer = IWriter(index, schema)

val c: PathSet[Path] = Path("corpora", "CEEAUS").children()
// write English docs
c.toList.sorted.filter(e => e.name.indexOf("CJEJUS")<0 && e.name.indexOf("PLAIN")<0).foreach( f =>
  f.children().toList.sorted.filter( g => g.name.indexOf("(1)") < 0 && g.name.endsWith(".txt")).foreach(h => writer.write(document(h, false)))
)
// write Japanese docs
c.toList.sorted.filter(e => e.name.indexOf("CJEJUS")>=0).foreach( f =>
  f.children().toList.sorted.filter( g => g.name.indexOf("(1)") < 0 && g.name.endsWith(".txt")).foreach(h => writer.write(document(h, true)))
)
writer.close

// search
val searcher = ISearcher(index)
val results = searcher.search(query=new TermQuery(new Term("body_ja", "喫煙")), rows=10)

results.foreach(doc => {
  printf("[DocID] %d: %s\n", doc.docId, doc.get("file"))
}) 
Example 38
Source File: PullExample.scala    From Hands-On-Data-Analysis-with-Scala   with MIT License 5 votes vote down vote up
package handson.example.extract

import java.io.{BufferedReader, InputStreamReader}
import java.util.function.Consumer

import scala.collection.mutable.ListBuffer


class DataConsumer extends Consumer[String] {
  val buf = ListBuffer[String]()
  override def accept(t: String): Unit = {
    buf += t
  }
}
object PullExample {
  def main(args: Array[String]): Unit = {
    val reader = new BufferedReader(
      new InputStreamReader(
        new java.net.URL("https://data.lacity.org/api/views/nxs9-385f/rows.csv?accessType=DOWNLOAD").openStream()
      )
    )
    val dataConsumer = new DataConsumer
    reader.lines().forEach(dataConsumer)
    dataConsumer.buf.toList.take(5).foreach(println)
  }

} 
Example 39
Source File: SaddlExample.scala    From Hands-On-Data-Analysis-with-Scala   with MIT License 5 votes vote down vote up
package handson.example.saddle

import java.io.{BufferedReader, InputStreamReader}

import org.saddle.io._

class SaddleCsvSource(url: String) extends CsvSource {
  val reader = new BufferedReader(new InputStreamReader(new java.net.URL(url).openStream()))
  override def readLine: String = {
    reader.readLine()
  }
}

object SaddlExample {
  def main(args: Array[String]): Unit = {
    val file = new SaddleCsvSource("https://data.lacity.org/api/views/nxs9-385f/rows.csv?accessType=DOWNLOAD")
    val frameOrig = CsvParser.parse(file)
    // Get the header
    val head = frameOrig.rowSlice(0,1).rowAt(0)
    // Remove header row and attach the header back as column names
    val frame = frameOrig.rowSlice(1, frameOrig.numRows).mapColIndex(i => head.at(i).get)
    // Get random sample of 2% of dataset
    val sample = frame.rfilter(_ => scala.util.Random.nextDouble() < 0.02)
    sample.print()
  }
} 
Example 40
Source File: CsvParserExample.scala    From Hands-On-Data-Analysis-with-Scala   with MIT License 5 votes vote down vote up
package handson.example.csv

import java.io.{BufferedReader, InputStreamReader}
import java.util.function.Consumer

import org.apache.commons.csv.{CSVFormat, CSVPrinter, CSVRecord}

import scala.collection.mutable.ListBuffer


object CsvParserExample {
  def main(args: Array[String]): Unit = {
    val reader = new BufferedReader(
      new InputStreamReader(
        new java.net.URL("https://data.lacity.org/api/views/nxs9-385f/rows.csv?accessType=DOWNLOAD").openStream()
      )
    )
    val csvParser = CSVFormat.RFC4180.withFirstRecordAsHeader().parse(reader)
    val dataConsumer = new DataConsumer
    csvParser.forEach(dataConsumer)
    val allRecords = dataConsumer.buf.toList
    allRecords.take(3).foreach(println)

    val csvPrinter = new CSVPrinter(System.out, CSVFormat.RFC4180.withHeader("fname", "lname", "age"))
    csvPrinter.printRecord("Jon", "Doe", "21")
    csvPrinter.printRecord("James", "Bond", "39")
    csvPrinter.flush()

  }

} 
Example 41
Source File: Markdown.scala    From docspell   with GNU General Public License v3.0 5 votes vote down vote up
package docspell.convert.flexmark

import java.io.{InputStream, InputStreamReader}
import java.nio.charset.Charset
import java.util

import scala.util.Try

import cats.effect.Sync
import cats.implicits._
import fs2.Stream

import docspell.common._

import com.vladsch.flexmark.ext.gfm.strikethrough.StrikethroughExtension
import com.vladsch.flexmark.ext.tables.TablesExtension
import com.vladsch.flexmark.html.HtmlRenderer
import com.vladsch.flexmark.parser.Parser
import com.vladsch.flexmark.util.data.{DataKey, MutableDataSet}

object Markdown {

  def toHtml(
      is: InputStream,
      cfg: MarkdownConfig,
      cs: Charset
  ): Either[Throwable, String] = {
    val p = createParser()
    val r = createRenderer()
    Try {
      val reader = new InputStreamReader(is, cs)
      val doc    = p.parseReader(reader)
      wrapHtml(r.render(doc), cfg)
    }.toEither
  }

  def toHtml(md: String, cfg: MarkdownConfig): String = {
    val p   = createParser()
    val r   = createRenderer()
    val doc = p.parse(md)
    wrapHtml(r.render(doc), cfg)
  }

  def toHtml[F[_]: Sync](
      data: Stream[F, Byte],
      cfg: MarkdownConfig,
      cs: Charset
  ): F[String] =
    data.through(Binary.decode(cs)).compile.foldMonoid.map(str => toHtml(str, cfg))

  private def wrapHtml(body: String, cfg: MarkdownConfig): String =
    s"""<!DOCTYPE html>
       |<html>
       |<head>
       |<meta charset="utf-8"/>
       |<style>
       |${cfg.internalCss}
       |</style>
       |</head>
       |<body>
       |$body
       |</body>
       |</html>
       |""".stripMargin

  private def createParser(): Parser = {
    val opts = new MutableDataSet()
    opts.set(
      Parser.EXTENSIONS.asInstanceOf[DataKey[util.Collection[_]]],
      util.Arrays.asList(TablesExtension.create(), StrikethroughExtension.create())
    );

    Parser.builder(opts).build()
  }

  private def createRenderer(): HtmlRenderer = {
    val opts = new MutableDataSet()
    HtmlRenderer.builder(opts).build()
  }
} 
Example 42
Source File: DataFinder.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.behavioral.template

import java.io.{InputStreamReader, ByteArrayInputStream}

import com.github.tototoshi.csv.CSVReader
import com.ivan.nikolov.behavioral.template.model.Person
import org.json4s.{StringInput, DefaultFormats}
import org.json4s.jackson.JsonMethods

abstract class DataFinder[T, Y] {

  def find(f: T => Option[Y]): Option[Y] =
    try {
      val data = readData()
      val parsed = parse(data)
      f(parsed)
    } finally {
      cleanup()
    }

  def readData(): Array[Byte]

  def parse(data: Array[Byte]): T

  def cleanup()
}

class JsonDataFinder extends DataFinder[List[Person], Person] {
  implicit val formats = DefaultFormats

  override def readData(): Array[Byte] = {
    val stream = this.getClass.getResourceAsStream("people.json")
    Stream.continually(stream.read).takeWhile(_ != -1).map(_.toByte).toArray
  }

  override def cleanup(): Unit = {
    System.out.println("Reading json: nothing to do.")
  }

  override def parse(data: Array[Byte]): List[Person] =
    JsonMethods.parse(StringInput(new String(data, "UTF-8"))).extract[List[Person]]
}

class CSVDataFinder extends DataFinder[List[Person], Person] {
  override def readData(): Array[Byte] = {
    val stream = this.getClass.getResourceAsStream("people.csv")
    Stream.continually(stream.read).takeWhile(_ != -1).map(_.toByte).toArray
  }

  override def cleanup(): Unit = {
    System.out.println("Reading csv: nothing to do.")
  }

  override def parse(data: Array[Byte]): List[Person] =
    CSVReader.open(new InputStreamReader(new ByteArrayInputStream(data))).all().map {
      case List(name, age, address) =>
        Person(name, age.toInt, address)
    }
}


object DataFinderExample {
  def main(args: Array[String]): Unit = {
    val jsonDataFinder: DataFinder[List[Person], Person] = new JsonDataFinder
    val csvDataFinder: DataFinder[List[Person], Person] = new CSVDataFinder

    System.out.println(s"Find a person with name Ivan in the json: ${jsonDataFinder.find(_.find(_.name == "Ivan"))}")
    System.out.println(s"Find a person with name James in the json: ${jsonDataFinder.find(_.find(_.name == "James"))}")

    System.out.println(s"Find a person with name Maria in the csv: ${csvDataFinder.find(_.find(_.name == "Maria"))}")
    System.out.println(s"Find a person with name Alice in the csv: ${csvDataFinder.find(_.find(_.name == "Alice"))}")
  }
} 
Example 43
Source File: Parser.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.behavioral.strategy

import java.io.InputStreamReader

import com.github.tototoshi.csv.CSVReader
import com.ivan.nikolov.behavioral.strategy.model.Person

import org.json4s._
import org.json4s.jackson.JsonMethods

trait Parser[T] {
  def parse(file: String): List[T]
}

class CSVParser extends Parser[Person] {
  override def parse(file: String): List[Person] =
    CSVReader.open(new InputStreamReader(this.getClass.getResourceAsStream(file))).all().map {
      case List(name, age, address) =>
        Person(name, age.toInt, address)
    }
}

class JsonParser extends Parser[Person] {
  implicit val formats = DefaultFormats
  
  override def parse(file: String): List[Person] =
    JsonMethods.parse(StreamInput(this.getClass.getResourceAsStream(file))).extract[List[Person]]
}

object Parser {
  def apply(filename: String): Parser[Person] =
    filename match {
      case f if f.endsWith(".json") => new JsonParser
      case f if f.endsWith(".csv") => new CSVParser
      case f => throw new RuntimeException(s"Unknown format: $f")
    }
}

class PersonApplication[T](parser: Parser[T]) {
  
  def write(file: String): Unit = {
    System.out.println(s"Got the following data ${parser.parse(file)}")
  }
}

object ParserExample {
  def main(args: Array[String]): Unit = {
    val csvPeople = Parser("people.csv")
    val jsonPeople = Parser("people.json")
    
    val applicationCsv = new PersonApplication(csvPeople)
    val applicationJson = new PersonApplication(jsonPeople)
    
    System.out.println("Using the csv: ")
    applicationCsv.write("people.csv")
    
    System.out.println("Using the json: ")
    applicationJson.write("people.json")
  }
} 
Example 44
Source File: ParsingStrategy.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.behavioral.strategy

import java.io.InputStreamReader

import com.github.tototoshi.csv.CSVReader
import com.ivan.nikolov.behavioral.strategy.model.Person
import org.json4s.{StreamInput, DefaultFormats}
import org.json4s.jackson.JsonMethods

class Application[T](strategy: (String) => List[T]) {
  def write(file: String): Unit = {
    System.out.println(s"Got the following data ${strategy(file)}")
  }
}

object StrategyFactory {
  implicit val formats = DefaultFormats
  
  def apply(filename: String): (String) => List[Person] =
    filename match {
      case f if f.endsWith(".json") => parseJson
      case f if f.endsWith(".csv") => parseCsv
      case f => throw new RuntimeException(s"Unknown format: $f")
    }
  
  def parseJson(file: String): List[Person] =
    JsonMethods.parse(StreamInput(this.getClass.getResourceAsStream(file))).extract[List[Person]]
  
  def parseCsv(file: String): List[Person] =
    CSVReader.open(new InputStreamReader(this.getClass.getResourceAsStream(file))).all().map {
      case List(name, age, address) =>
        Person(name, age.toInt, address)
    }
}

object StrategyExample {
  def main(args: Array[String]): Unit = {
    val applicationCsv = new Application[Person](StrategyFactory("people.csv"))
    val applicationJson = new Application[Person](StrategyFactory("people.json"))

    System.out.println("Using the csv: ")
    applicationCsv.write("people.csv")

    System.out.println("Using the json: ")
    applicationJson.write("people.json")
  }
} 
Example 45
Source File: DataFinder.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.behavioral.template

import java.io.{InputStreamReader, ByteArrayInputStream}

import com.github.tototoshi.csv.CSVReader
import com.ivan.nikolov.behavioral.template.model.Person
import org.json4s.{StringInput, DefaultFormats}
import org.json4s.jackson.JsonMethods

abstract class DataFinder[T, Y] {

  def find(f: T => Option[Y]): Option[Y] =
    try {
      val data = readData()
      val parsed = parse(data)
      f(parsed)
    } finally {
      cleanup()
    }

  def readData(): Array[Byte]

  def parse(data: Array[Byte]): T

  def cleanup()
}

class JsonDataFinder extends DataFinder[List[Person], Person] {
  implicit val formats = DefaultFormats

  override def readData(): Array[Byte] = {
    val stream = this.getClass.getResourceAsStream("people.json")
    Stream.continually(stream.read).takeWhile(_ != -1).map(_.toByte).toArray
  }

  override def cleanup(): Unit = {
    System.out.println("Reading json: nothing to do.")
  }

  override def parse(data: Array[Byte]): List[Person] =
    JsonMethods.parse(StringInput(new String(data, "UTF-8"))).extract[List[Person]]
}

class CSVDataFinder extends DataFinder[List[Person], Person] {
  override def readData(): Array[Byte] = {
    val stream = this.getClass.getResourceAsStream("people.csv")
    Stream.continually(stream.read).takeWhile(_ != -1).map(_.toByte).toArray
  }

  override def cleanup(): Unit = {
    System.out.println("Reading csv: nothing to do.")
  }

  override def parse(data: Array[Byte]): List[Person] =
    CSVReader.open(new InputStreamReader(new ByteArrayInputStream(data))).all().map {
      case List(name, age, address) =>
        Person(name, age.toInt, address)
    }
}


object DataFinderExample {
  def main(args: Array[String]): Unit = {
    val jsonDataFinder: DataFinder[List[Person], Person] = new JsonDataFinder
    val csvDataFinder: DataFinder[List[Person], Person] = new CSVDataFinder

    System.out.println(s"Find a person with name Ivan in the json: ${jsonDataFinder.find(_.find(_.name == "Ivan"))}")
    System.out.println(s"Find a person with name James in the json: ${jsonDataFinder.find(_.find(_.name == "James"))}")

    System.out.println(s"Find a person with name Maria in the csv: ${csvDataFinder.find(_.find(_.name == "Maria"))}")
    System.out.println(s"Find a person with name Alice in the csv: ${csvDataFinder.find(_.find(_.name == "Alice"))}")
  }
} 
Example 46
Source File: Parser.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.behavioral.strategy

import java.io.InputStreamReader

import com.github.tototoshi.csv.CSVReader
import com.ivan.nikolov.behavioral.strategy.model.Person

import org.json4s._
import org.json4s.jackson.JsonMethods

trait Parser[T] {
  def parse(file: String): List[T]
}

class CSVParser extends Parser[Person] {
  override def parse(file: String): List[Person] =
    CSVReader.open(new InputStreamReader(this.getClass.getResourceAsStream(file))).all().map {
      case List(name, age, address) =>
        Person(name, age.toInt, address)
    }
}

class JsonParser extends Parser[Person] {
  implicit val formats = DefaultFormats
  
  override def parse(file: String): List[Person] =
    JsonMethods.parse(StreamInput(this.getClass.getResourceAsStream(file))).extract[List[Person]]
}

object Parser {
  def apply(filename: String): Parser[Person] =
    filename match {
      case f if f.endsWith(".json") => new JsonParser
      case f if f.endsWith(".csv") => new CSVParser
      case f => throw new RuntimeException(s"Unknown format: $f")
    }
}

class PersonApplication[T](parser: Parser[T]) {
  
  def write(file: String): Unit = {
    System.out.println(s"Got the following data ${parser.parse(file)}")
  }
}

object ParserExample {
  def main(args: Array[String]): Unit = {
    val csvPeople = Parser("people.csv")
    val jsonPeople = Parser("people.json")
    
    val applicationCsv = new PersonApplication(csvPeople)
    val applicationJson = new PersonApplication(jsonPeople)
    
    System.out.println("Using the csv: ")
    applicationCsv.write("people.csv")
    
    System.out.println("Using the json: ")
    applicationJson.write("people.json")
  }
} 
Example 47
Source File: ParsingStrategy.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.behavioral.strategy

import java.io.InputStreamReader

import com.github.tototoshi.csv.CSVReader
import com.ivan.nikolov.behavioral.strategy.model.Person
import org.json4s.{StreamInput, DefaultFormats}
import org.json4s.jackson.JsonMethods

class Application[T](strategy: (String) => List[T]) {
  def write(file: String): Unit = {
    System.out.println(s"Got the following data ${strategy(file)}")
  }
}

object StrategyFactory {
  implicit val formats = DefaultFormats
  
  def apply(filename: String): (String) => List[Person] =
    filename match {
      case f if f.endsWith(".json") => parseJson
      case f if f.endsWith(".csv") => parseCsv
      case f => throw new RuntimeException(s"Unknown format: $f")
    }
  
  def parseJson(file: String): List[Person] =
    JsonMethods.parse(StreamInput(this.getClass.getResourceAsStream(file))).extract[List[Person]]
  
  def parseCsv(file: String): List[Person] =
    CSVReader.open(new InputStreamReader(this.getClass.getResourceAsStream(file))).all().map {
      case List(name, age, address) =>
        Person(name, age.toInt, address)
    }
}

object StrategyExample {
  def main(args: Array[String]): Unit = {
    val applicationCsv = new Application[Person](StrategyFactory("people.csv"))
    val applicationJson = new Application[Person](StrategyFactory("people.json"))

    System.out.println("Using the csv: ")
    applicationCsv.write("people.csv")

    System.out.println("Using the json: ")
    applicationJson.write("people.json")
  }
} 
Example 48
Source File: DataFinder.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.behavioral.template

import java.io.{InputStreamReader, ByteArrayInputStream}

import com.github.tototoshi.csv.CSVReader
import com.ivan.nikolov.behavioral.template.model.Person
import org.json4s.{StringInput, DefaultFormats}
import org.json4s.jackson.JsonMethods

abstract class DataFinder[T, Y] {

  def find(f: T => Option[Y]): Option[Y] =
    try {
      val data = readData()
      val parsed = parse(data)
      f(parsed)
    } finally {
      cleanup()
    }

  def readData(): Array[Byte]

  def parse(data: Array[Byte]): T

  def cleanup()
}

class JsonDataFinder extends DataFinder[List[Person], Person] {
  implicit val formats = DefaultFormats

  override def readData(): Array[Byte] = {
    val stream = this.getClass.getResourceAsStream("people.json")
    Stream.continually(stream.read).takeWhile(_ != -1).map(_.toByte).toArray
  }

  override def cleanup(): Unit = {
    System.out.println("Reading json: nothing to do.")
  }

  override def parse(data: Array[Byte]): List[Person] =
    JsonMethods.parse(StringInput(new String(data, "UTF-8"))).extract[List[Person]]
}

class CSVDataFinder extends DataFinder[List[Person], Person] {
  override def readData(): Array[Byte] = {
    val stream = this.getClass.getResourceAsStream("people.csv")
    Stream.continually(stream.read).takeWhile(_ != -1).map(_.toByte).toArray
  }

  override def cleanup(): Unit = {
    System.out.println("Reading csv: nothing to do.")
  }

  override def parse(data: Array[Byte]): List[Person] =
    CSVReader.open(new InputStreamReader(new ByteArrayInputStream(data))).all().map {
      case List(name, age, address) =>
        Person(name, age.toInt, address)
    }
}


object DataFinderExample {
  def main(args: Array[String]): Unit = {
    val jsonDataFinder: DataFinder[List[Person], Person] = new JsonDataFinder
    val csvDataFinder: DataFinder[List[Person], Person] = new CSVDataFinder

    System.out.println(s"Find a person with name Ivan in the json: ${jsonDataFinder.find(_.find(_.name == "Ivan"))}")
    System.out.println(s"Find a person with name James in the json: ${jsonDataFinder.find(_.find(_.name == "James"))}")

    System.out.println(s"Find a person with name Maria in the csv: ${csvDataFinder.find(_.find(_.name == "Maria"))}")
    System.out.println(s"Find a person with name Alice in the csv: ${csvDataFinder.find(_.find(_.name == "Alice"))}")
  }
} 
Example 49
Source File: Parser.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.behavioral.strategy

import java.io.InputStreamReader

import com.github.tototoshi.csv.CSVReader
import com.ivan.nikolov.behavioral.strategy.model.Person

import org.json4s._
import org.json4s.jackson.JsonMethods

trait Parser[T] {
  def parse(file: String): List[T]
}

class CSVParser extends Parser[Person] {
  override def parse(file: String): List[Person] =
    CSVReader.open(new InputStreamReader(this.getClass.getResourceAsStream(file))).all().map {
      case List(name, age, address) =>
        Person(name, age.toInt, address)
    }
}

class JsonParser extends Parser[Person] {
  implicit val formats = DefaultFormats
  
  override def parse(file: String): List[Person] =
    JsonMethods.parse(StreamInput(this.getClass.getResourceAsStream(file))).extract[List[Person]]
}

object Parser {
  def apply(filename: String): Parser[Person] =
    filename match {
      case f if f.endsWith(".json") => new JsonParser
      case f if f.endsWith(".csv") => new CSVParser
      case f => throw new RuntimeException(s"Unknown format: $f")
    }
}

class PersonApplication[T](parser: Parser[T]) {
  
  def write(file: String): Unit = {
    System.out.println(s"Got the following data ${parser.parse(file)}")
  }
}

object ParserExample {
  def main(args: Array[String]): Unit = {
    val csvPeople = Parser("people.csv")
    val jsonPeople = Parser("people.json")
    
    val applicationCsv = new PersonApplication(csvPeople)
    val applicationJson = new PersonApplication(jsonPeople)
    
    System.out.println("Using the csv: ")
    applicationCsv.write("people.csv")
    
    System.out.println("Using the json: ")
    applicationJson.write("people.json")
  }
} 
Example 50
Source File: ParsingStrategy.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.behavioral.strategy

import java.io.InputStreamReader

import com.github.tototoshi.csv.CSVReader
import com.ivan.nikolov.behavioral.strategy.model.Person
import org.json4s.{StreamInput, DefaultFormats}
import org.json4s.jackson.JsonMethods

class Application[T](strategy: (String) => List[T]) {
  def write(file: String): Unit = {
    System.out.println(s"Got the following data ${strategy(file)}")
  }
}

object StrategyFactory {
  implicit val formats = DefaultFormats
  
  def apply(filename: String): (String) => List[Person] =
    filename match {
      case f if f.endsWith(".json") => parseJson
      case f if f.endsWith(".csv") => parseCsv
      case f => throw new RuntimeException(s"Unknown format: $f")
    }
  
  def parseJson(file: String): List[Person] =
    JsonMethods.parse(StreamInput(this.getClass.getResourceAsStream(file))).extract[List[Person]]
  
  def parseCsv(file: String): List[Person] =
    CSVReader.open(new InputStreamReader(this.getClass.getResourceAsStream(file))).all().map {
      case List(name, age, address) =>
        Person(name, age.toInt, address)
    }
}

object StrategyExample {
  def main(args: Array[String]): Unit = {
    val applicationCsv = new Application[Person](StrategyFactory("people.csv"))
    val applicationJson = new Application[Person](StrategyFactory("people.json"))

    System.out.println("Using the csv: ")
    applicationCsv.write("people.csv")

    System.out.println("Using the json: ")
    applicationJson.write("people.json")
  }
} 
Example 51
Source File: DataFinder.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.behavioral.template

import java.io.{InputStreamReader, ByteArrayInputStream}

import com.github.tototoshi.csv.CSVReader
import com.ivan.nikolov.behavioral.template.model.Person
import org.json4s.{StringInput, DefaultFormats}
import org.json4s.jackson.JsonMethods

abstract class DataFinder[T, Y] {

  def find(f: T => Option[Y]): Option[Y] =
    try {
      val data = readData()
      val parsed = parse(data)
      f(parsed)
    } finally {
      cleanup()
    }

  def readData(): Array[Byte]

  def parse(data: Array[Byte]): T

  def cleanup()
}

class JsonDataFinder extends DataFinder[List[Person], Person] {
  implicit val formats = DefaultFormats

  override def readData(): Array[Byte] = {
    val stream = this.getClass.getResourceAsStream("people.json")
    Stream.continually(stream.read).takeWhile(_ != -1).map(_.toByte).toArray
  }

  override def cleanup(): Unit = {
    System.out.println("Reading json: nothing to do.")
  }

  override def parse(data: Array[Byte]): List[Person] =
    JsonMethods.parse(StringInput(new String(data, "UTF-8"))).extract[List[Person]]
}

class CSVDataFinder extends DataFinder[List[Person], Person] {
  override def readData(): Array[Byte] = {
    val stream = this.getClass.getResourceAsStream("people.csv")
    Stream.continually(stream.read).takeWhile(_ != -1).map(_.toByte).toArray
  }

  override def cleanup(): Unit = {
    System.out.println("Reading csv: nothing to do.")
  }

  override def parse(data: Array[Byte]): List[Person] =
    CSVReader.open(new InputStreamReader(new ByteArrayInputStream(data))).all().map {
      case List(name, age, address) =>
        Person(name, age.toInt, address)
    }
}


object DataFinderExample {
  def main(args: Array[String]): Unit = {
    val jsonDataFinder: DataFinder[List[Person], Person] = new JsonDataFinder
    val csvDataFinder: DataFinder[List[Person], Person] = new CSVDataFinder

    System.out.println(s"Find a person with name Ivan in the json: ${jsonDataFinder.find(_.find(_.name == "Ivan"))}")
    System.out.println(s"Find a person with name James in the json: ${jsonDataFinder.find(_.find(_.name == "James"))}")

    System.out.println(s"Find a person with name Maria in the csv: ${csvDataFinder.find(_.find(_.name == "Maria"))}")
    System.out.println(s"Find a person with name Alice in the csv: ${csvDataFinder.find(_.find(_.name == "Alice"))}")
  }
} 
Example 52
Source File: Parser.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.behavioral.strategy

import java.io.InputStreamReader

import com.github.tototoshi.csv.CSVReader
import com.ivan.nikolov.behavioral.strategy.model.Person

import org.json4s._
import org.json4s.jackson.JsonMethods

trait Parser[T] {
  def parse(file: String): List[T]
}

class CSVParser extends Parser[Person] {
  override def parse(file: String): List[Person] =
    CSVReader.open(new InputStreamReader(this.getClass.getResourceAsStream(file))).all().map {
      case List(name, age, address) =>
        Person(name, age.toInt, address)
    }
}

class JsonParser extends Parser[Person] {
  implicit val formats = DefaultFormats
  
  override def parse(file: String): List[Person] =
    JsonMethods.parse(StreamInput(this.getClass.getResourceAsStream(file))).extract[List[Person]]
}

object Parser {
  def apply(filename: String): Parser[Person] =
    filename match {
      case f if f.endsWith(".json") => new JsonParser
      case f if f.endsWith(".csv") => new CSVParser
      case f => throw new RuntimeException(s"Unknown format: $f")
    }
}

class PersonApplication[T](parser: Parser[T]) {
  
  def write(file: String): Unit = {
    System.out.println(s"Got the following data ${parser.parse(file)}")
  }
}

object ParserExample {
  def main(args: Array[String]): Unit = {
    val csvPeople = Parser("people.csv")
    val jsonPeople = Parser("people.json")
    
    val applicationCsv = new PersonApplication(csvPeople)
    val applicationJson = new PersonApplication(jsonPeople)
    
    System.out.println("Using the csv: ")
    applicationCsv.write("people.csv")
    
    System.out.println("Using the json: ")
    applicationJson.write("people.json")
  }
} 
Example 53
Source File: ParsingStrategy.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.behavioral.strategy

import java.io.InputStreamReader

import com.github.tototoshi.csv.CSVReader
import com.ivan.nikolov.behavioral.strategy.model.Person
import org.json4s.{StreamInput, DefaultFormats}
import org.json4s.jackson.JsonMethods

class Application[T](strategy: (String) => List[T]) {
  def write(file: String): Unit = {
    System.out.println(s"Got the following data ${strategy(file)}")
  }
}

object StrategyFactory {
  implicit val formats = DefaultFormats
  
  def apply(filename: String): (String) => List[Person] =
    filename match {
      case f if f.endsWith(".json") => parseJson
      case f if f.endsWith(".csv") => parseCsv
      case f => throw new RuntimeException(s"Unknown format: $f")
    }
  
  def parseJson(file: String): List[Person] =
    JsonMethods.parse(StreamInput(this.getClass.getResourceAsStream(file))).extract[List[Person]]
  
  def parseCsv(file: String): List[Person] =
    CSVReader.open(new InputStreamReader(this.getClass.getResourceAsStream(file))).all().map {
      case List(name, age, address) =>
        Person(name, age.toInt, address)
    }
}

object StrategyExample {
  def main(args: Array[String]): Unit = {
    val applicationCsv = new Application[Person](StrategyFactory("people.csv"))
    val applicationJson = new Application[Person](StrategyFactory("people.json"))

    System.out.println("Using the csv: ")
    applicationCsv.write("people.csv")

    System.out.println("Using the json: ")
    applicationJson.write("people.json")
  }
} 
Example 54
Source File: FileReader.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.structural.proxy

import java.io.{BufferedReader, InputStreamReader}
import scala.collection.JavaConverters._

trait FileReader {
  def readFileContents(): String
}

class FileReaderReal(filename: String) extends FileReader {
  val contents = {
    val stream = this.getClass.getResourceAsStream(filename) 
    val reader = new BufferedReader(
      new InputStreamReader(
        stream
      )
    )
    try {
      reader.lines().iterator().asScala.mkString(System.getProperty("line.separator"))
    } finally {
      reader.close()
      stream.close()
    }
  }
  
  System.out.println(s"Finished reading the actual file: $filename")
  
  override def readFileContents(): String = contents
}

class FileReaderProxy(filename: String) extends FileReader {
  private var fileReader: FileReaderReal = null
  
  override def readFileContents(): String = {
    if (fileReader == null) {
      fileReader = new FileReaderReal(filename)
    }
    fileReader.readFileContents()
  }
}

object ProxyExample {
  def main(args: Array[String]): Unit = {
    val fileMap = Map(
      "file1.txt" -> new FileReaderProxy("file1.txt"),
      "file2.txt" -> new FileReaderProxy("file2.txt"),
      "file3.txt" -> new FileReaderProxy("file3.txt"),
      "file4.txt" -> new FileReaderReal("file1.txt")
    )
    System.out.println("Created the map. You should have seen file1.txt read because it wasn't used in a proxy.")
    System.out.println(s"Reading file1.txt from the proxy: ${fileMap("file1.txt").readFileContents()}")
    System.out.println(s"Reading file3.txt from the proxy: ${fileMap("file3.txt").readFileContents()}")
  }
} 
Example 55
package com.ivan.nikolov.structural.decorator

import java.io.{BufferedInputStream, InputStreamReader, BufferedReader, ByteArrayOutputStream}
import java.nio.charset.Charset
import java.util.Base64
import java.util.zip.GZIPOutputStream

import com.ivan.nikolov.structural.decorator.common.{AdvancedInputReader, InputReader}
import com.typesafe.scalalogging.LazyLogging

trait CapitalizedInputReaderTrait extends InputReader {
  abstract override def readLines(): Stream[String] = super.readLines().map(_.toUpperCase)
}

trait CompressingInputReaderTrait extends InputReader with LazyLogging {
  abstract override def readLines(): Stream[String] = super.readLines().map {
    case line =>
      val text = line.getBytes(Charset.forName("UTF-8"))
      logger.info("Length before compression: {}", text.length.toString)
      val output = new ByteArrayOutputStream()
      val compressor = new GZIPOutputStream(output)
      try {
        compressor.write(text, 0, text.length)
        val outputByteArray = output.toByteArray
        logger.info("Length after compression: {}", outputByteArray.length.toString)
        new String(outputByteArray, Charset.forName("UTF-8"))
      } finally {
        compressor.close()
        output.close()
      }
  }
}

trait Base64EncoderInputReaderTrait extends InputReader {
  abstract override def readLines(): Stream[String] = super.readLines().map {
    case line => Base64.getEncoder.encodeToString(line.getBytes(Charset.forName("UTF-8")))
  }
}

object StackableTraitsExample {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new AdvancedInputReader(stream) with CapitalizedInputReaderTrait
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
}

object StackableTraitsBigExample {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new AdvancedInputReader(stream) with CapitalizedInputReaderTrait with Base64EncoderInputReaderTrait with CompressingInputReaderTrait
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
} 
Example 56
Source File: InputReaderDecorator.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.structural.decorator

import java.io.{InputStreamReader, BufferedInputStream, ByteArrayOutputStream, BufferedReader}
import java.nio.charset.Charset
import java.util.Base64
import java.util.zip.GZIPOutputStream

import com.ivan.nikolov.structural.decorator.common.{AdvancedInputReader, InputReader}
import com.typesafe.scalalogging.LazyLogging

abstract class InputReaderDecorator(inputReader: InputReader) extends InputReader {
  override def readLines(): Stream[String] = inputReader.readLines()
}

class CapitalizedInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) {
  override def readLines(): Stream[String] = super.readLines().map(_.toUpperCase)
}

class CompressingInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) with LazyLogging {
  override def readLines(): Stream[String] = super.readLines().map {
    case line =>
      val text = line.getBytes(Charset.forName("UTF-8"))
      logger.info("Length before compression: {}", text.length.toString)
      val output = new ByteArrayOutputStream()
      val compressor = new GZIPOutputStream(output)
      try {
        compressor.write(text, 0, text.length)
        val outputByteArray = output.toByteArray
        logger.info("Length after compression: {}", outputByteArray.length.toString)
        new String(outputByteArray, Charset.forName("UTF-8"))
      } finally {
        compressor.close()
        output.close()
      }
  }
}

class Base64EncoderInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) {
  override def readLines(): Stream[String] = super.readLines().map {
    case line => Base64.getEncoder.encodeToString(line.getBytes(Charset.forName("UTF-8")))
  }
}

object DecoratorExample {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new CapitalizedInputReader(new AdvancedInputReader(stream))
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
}

object DecoratorExampleBig {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new CompressingInputReader(
        new Base64EncoderInputReader(
          new CapitalizedInputReader(
            new AdvancedInputReader(stream)
          )
        )
      )
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
} 
Example 57
Source File: FileReader.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.structural.proxy

import java.io.{BufferedReader, InputStreamReader}
import scala.collection.JavaConverters._

trait FileReader {
  def readFileContents(): String
}

class FileReaderReal(filename: String) extends FileReader {
  val contents = {
    val stream = this.getClass.getResourceAsStream(filename) 
    val reader = new BufferedReader(
      new InputStreamReader(
        stream
      )
    )
    try {
      reader.lines().iterator().asScala.mkString(System.getProperty("line.separator"))
    } finally {
      reader.close()
      stream.close()
    }
  }
  
  System.out.println(s"Finished reading the actual file: $filename")
  
  override def readFileContents(): String = contents
}

class FileReaderProxy(filename: String) extends FileReader {
  private var fileReader: FileReaderReal = null
  
  override def readFileContents(): String = {
    if (fileReader == null) {
      fileReader = new FileReaderReal(filename)
    }
    fileReader.readFileContents()
  }
}

object ProxyExample {
  def main(args: Array[String]): Unit = {
    val fileMap = Map(
      "file1.txt" -> new FileReaderProxy("file1.txt"),
      "file2.txt" -> new FileReaderProxy("file2.txt"),
      "file3.txt" -> new FileReaderProxy("file3.txt"),
      "file4.txt" -> new FileReaderReal("file1.txt")
    )
    System.out.println("Created the map. You should have seen file1.txt read because it wasn't used in a proxy.")
    System.out.println(s"Reading file1.txt from the proxy: ${fileMap("file1.txt").readFileContents()}")
    System.out.println(s"Reading file3.txt from the proxy: ${fileMap("file3.txt").readFileContents()}")
  }
} 
Example 58
package com.ivan.nikolov.structural.decorator

import java.io.{BufferedInputStream, InputStreamReader, BufferedReader, ByteArrayOutputStream}
import java.nio.charset.Charset
import java.util.Base64
import java.util.zip.GZIPOutputStream

import com.ivan.nikolov.structural.decorator.common.{AdvancedInputReader, InputReader}
import com.typesafe.scalalogging.LazyLogging

trait CapitalizedInputReaderTrait extends InputReader {
  abstract override def readLines(): Stream[String] = super.readLines().map(_.toUpperCase)
}

trait CompressingInputReaderTrait extends InputReader with LazyLogging {
  abstract override def readLines(): Stream[String] = super.readLines().map {
    case line =>
      val text = line.getBytes(Charset.forName("UTF-8"))
      logger.info("Length before compression: {}", text.length.toString)
      val output = new ByteArrayOutputStream()
      val compressor = new GZIPOutputStream(output)
      try {
        compressor.write(text, 0, text.length)
        val outputByteArray = output.toByteArray
        logger.info("Length after compression: {}", outputByteArray.length.toString)
        new String(outputByteArray, Charset.forName("UTF-8"))
      } finally {
        compressor.close()
        output.close()
      }
  }
}

trait Base64EncoderInputReaderTrait extends InputReader {
  abstract override def readLines(): Stream[String] = super.readLines().map {
    case line => Base64.getEncoder.encodeToString(line.getBytes(Charset.forName("UTF-8")))
  }
}

object StackableTraitsExample {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new AdvancedInputReader(stream) with CapitalizedInputReaderTrait
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
}

object StackableTraitsBigExample {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new AdvancedInputReader(stream) with CapitalizedInputReaderTrait with Base64EncoderInputReaderTrait with CompressingInputReaderTrait
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
} 
Example 59
Source File: InputReaderDecorator.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.structural.decorator

import java.io.{InputStreamReader, BufferedInputStream, ByteArrayOutputStream, BufferedReader}
import java.nio.charset.Charset
import java.util.Base64
import java.util.zip.GZIPOutputStream

import com.ivan.nikolov.structural.decorator.common.{AdvancedInputReader, InputReader}
import com.typesafe.scalalogging.LazyLogging

abstract class InputReaderDecorator(inputReader: InputReader) extends InputReader {
  override def readLines(): Stream[String] = inputReader.readLines()
}

class CapitalizedInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) {
  override def readLines(): Stream[String] = super.readLines().map(_.toUpperCase)
}

class CompressingInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) with LazyLogging {
  override def readLines(): Stream[String] = super.readLines().map {
    case line =>
      val text = line.getBytes(Charset.forName("UTF-8"))
      logger.info("Length before compression: {}", text.length.toString)
      val output = new ByteArrayOutputStream()
      val compressor = new GZIPOutputStream(output)
      try {
        compressor.write(text, 0, text.length)
        val outputByteArray = output.toByteArray
        logger.info("Length after compression: {}", outputByteArray.length.toString)
        new String(outputByteArray, Charset.forName("UTF-8"))
      } finally {
        compressor.close()
        output.close()
      }
  }
}

class Base64EncoderInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) {
  override def readLines(): Stream[String] = super.readLines().map {
    case line => Base64.getEncoder.encodeToString(line.getBytes(Charset.forName("UTF-8")))
  }
}

object DecoratorExample {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new CapitalizedInputReader(new AdvancedInputReader(stream))
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
}

object DecoratorExampleBig {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new CompressingInputReader(
        new Base64EncoderInputReader(
          new CapitalizedInputReader(
            new AdvancedInputReader(stream)
          )
        )
      )
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
} 
Example 60
Source File: IO.scala    From scala-commons   with MIT License 5 votes vote down vote up
package com.avsystem.commons
package testutil

import java.io.{BufferedReader, File, FileWriter, InputStreamReader}

import scala.annotation.tailrec

object IO {
  def readTestResource(path: String): String = {
    val reader = new BufferedReader(new InputStreamReader(getClass.getResourceAsStream(path)))
    val buf = new Array[Char](2048)
    @tailrec def loop(sb: JStringBuilder): String = {
      reader.read(buf) match {
        case -1 => sb.toString
        case count => loop(sb.append(buf, 0, count))
      }
    }
    try loop(new JStringBuilder) finally reader.close()
  }

  def writeTestResource(path: String, data: String): Unit = {
    // assuming working dir used by intellij
    val writer = new FileWriter(s"src/test/resources$path".replaceAllLiterally("/", File.separator))
    try writer.write(data) finally writer.close()
  }
} 
Example 61
Source File: Util.scala    From effpi   with MIT License 5 votes vote down vote up
// Effpi - verified message-passing programs in Dotty
// Copyright 2019 Alceste Scalas and Elias Benussi
// Released under the MIT License: https://opensource.org/licenses/MIT
package effpi.verifier

import java.io.{BufferedReader, InputStreamReader}

package object util {
  
  def runCommand(cmd: String, args: scala.Seq[String],
                 logger: String => Unit = _ => ()): StdOutErrAndTime = {
    logger(s"""Executing: ${cmd} ${args.mkString(" ")}""")
    val pbargs: java.util.List[String] = {
      val lst = new java.util.ArrayList[String]()
      List((cmd +: args):_*).foreach { lst.add(_) }
      lst
    }
    val builder = new ProcessBuilder(pbargs)
    val startTime: Long = System.nanoTime()
    val p = builder.start()

    val outReader = new BufferedReader(new InputStreamReader(p.getInputStream))
    val outStr = {
      Iterator.continually(outReader.readLine()).takeWhile(_ != null).mkString("\n")
    }

    val errReader = new BufferedReader(new InputStreamReader(p.getErrorStream))
    val errStr = {
      Iterator.continually(errReader.readLine()).takeWhile(_ != null).mkString("\n")
    }

    val r = p.waitFor()
    val endTime: Long = System.nanoTime()

    if (r != 0) {
      throw new RuntimeException(
        s"""Command failed with code ${r}: ${cmd} ${args.mkString(" ")}"""
          + s"\nStandard error:\n" + errStr)
    }
    StdOutErrAndTime(outStr, errStr, endTime - startTime)
  }
} 
Example 62
Source File: ChangesReceiver.scala    From bahir   with Apache License 2.0 5 votes vote down vote up
package org.apache.bahir.cloudant.internal

import java.io.{BufferedReader, InputStreamReader}
import java.util.concurrent.TimeUnit

import com.google.gson.JsonParser
import okhttp3._

import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.receiver.Receiver

import org.apache.bahir.cloudant.CloudantChangesConfig
import org.apache.bahir.cloudant.common._

class ChangesReceiver(config: CloudantChangesConfig)
  extends Receiver[String](StorageLevel.MEMORY_AND_DISK) {

  def onStart() {
    // Start the thread that receives data over a connection
    new Thread("Cloudant Receiver") {
      override def run() { receive() }
    }.start()
  }

  private def receive(): Unit = {
    val okHttpClient: OkHttpClient = new OkHttpClient.Builder()
        .connectTimeout(5, TimeUnit.SECONDS)
        .readTimeout(60, TimeUnit.SECONDS)
        .build
    val url = config.getChangesReceiverUrl.toString

    val builder = new Request.Builder().url(url)
    if (config.username != null) {
      val credential = Credentials.basic(config.username, config.password)
      builder.header("Authorization", credential)
    }
    if(config.getSelector != null) {
      val jsonType = MediaType.parse("application/json; charset=utf-8")
      val selector = "{\"selector\":" + config.getSelector + "}"
      val selectorBody = RequestBody.create(jsonType, selector)
      builder.post(selectorBody)
    }

    val request = builder.build
    val response = okHttpClient.newCall(request).execute
    val status_code = response.code

    if (status_code == 200) {
      val changesInputStream = response.body.byteStream
      var json = new ChangesRow()
      if (changesInputStream != null) {
        val bufferedReader = new BufferedReader(new InputStreamReader(changesInputStream))
        while ((json = ChangesRowScanner.readRowFromReader(bufferedReader)) != null) {
          if (!isStopped() && json != null && !json.getDoc.has("_deleted")) {
            store(json.getDoc.toString)
          }
        }
      }
    } else {
      val responseAsJson = new JsonParser().parse(response.body.string)
      val errorMsg = "Error retrieving _changes feed data from database " + "'" +
        config.getDbname + "' with response code " + status_code + ": " + responseAsJson.toString
      reportError(errorMsg, new CloudantException(errorMsg))
      CloudantChangesConfig.receiverErrorMsg = errorMsg
    }
  }

  override def onStop(): Unit = {
  }
} 
Example 63
Source File: CloudantReceiver.scala    From bahir   with Apache License 2.0 5 votes vote down vote up
package org.apache.bahir.cloudant

import java.io.{BufferedReader, InputStreamReader}
import java.util.concurrent.TimeUnit

import okhttp3._

import org.apache.spark.SparkConf
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.receiver.Receiver

import org.apache.bahir.cloudant.common._

class CloudantReceiver(sparkConf: SparkConf, cloudantParams: Map[String, String])
    extends Receiver[String](StorageLevel.MEMORY_AND_DISK) {
  // CloudantChangesConfig requires `_changes` endpoint option
  lazy val config: CloudantChangesConfig = {
    JsonStoreConfigManager.getConfig(sparkConf, cloudantParams
      + ("cloudant.endpoint" -> JsonStoreConfigManager.CHANGES_INDEX)
    ).asInstanceOf[CloudantChangesConfig]
  }

  def onStart() {
    // Start the thread that receives data over a connection
    new Thread("Cloudant Receiver") {
      override def run() { receive() }
    }.start()
  }

  private def receive(): Unit = {
    val okHttpClient: OkHttpClient = new OkHttpClient.Builder()
      .connectTimeout(5, TimeUnit.SECONDS)
      .readTimeout(60, TimeUnit.SECONDS)
      .build
    val url = config.getChangesReceiverUrl.toString

    val builder = new Request.Builder().url(url)
    if (config.username != null) {
      val credential = Credentials.basic(config.username, config.password)
      builder.header("Authorization", credential)
    }
    if(config.getSelector != null) {
      val jsonType = MediaType.parse("application/json; charset=utf-8")
      val selector = "{\"selector\":" + config.getSelector + "}"
      val selectorBody = RequestBody.create(jsonType, selector)
      builder.post(selectorBody)
    }

    val request = builder.build
    val response = okHttpClient.newCall(request).execute
    val status_code = response.code

    if (status_code == 200) {
      val changesInputStream = response.body.byteStream
      var json = new ChangesRow()
      if (changesInputStream != null) {
        val bufferedReader = new BufferedReader(new InputStreamReader(changesInputStream))
        while ((json = ChangesRowScanner.readRowFromReader(bufferedReader)) != null) {
          if (!isStopped() && json != null && !json.getDoc.has("_deleted")) {
            store(json.getDoc.toString)
          }
        }
      }
    } else {
      val errorMsg = "Error retrieving _changes feed " + config.getDbname + ": " + status_code
      reportError(errorMsg, new CloudantException(errorMsg))
    }
  }

  def onStop(): Unit = {
  }
} 
Example 64
Source File: Conf.scala    From CkoocNLP   with Apache License 2.0 5 votes vote down vote up
package config

import java.io.{File, FileInputStream, InputStreamReader}
import java.util.Properties

import scala.collection.mutable


  def loadConf(filePath: String): mutable.LinkedHashMap[String, String] = {
    val kvMap = mutable.LinkedHashMap[String, String]()

    val properties = new Properties()
    properties.load(new InputStreamReader(new FileInputStream(filePath), "UTF-8"))
    val propertyNameArray = properties.stringPropertyNames().toArray(new Array[String](0))

    val fileName = new File(filePath).getName

    println(s"============ 加载配置文件 $fileName ================")
    for (propertyName <- propertyNameArray) {
      val property = properties.getProperty(propertyName).replaceAll("\"", "").trim
      println(propertyName + ": " + property)
      kvMap.put(propertyName, property)
    }
    println("==========================================================")

    kvMap
  }
} 
Example 65
Source File: IoTest.scala    From fgbio   with MIT License 5 votes vote down vote up
package com.fulcrumgenomics.util

import java.io.{BufferedInputStream, BufferedReader, FileInputStream, InputStreamReader}
import java.util.zip.GZIPInputStream

import com.fulcrumgenomics.testing.UnitSpec
import htsjdk.samtools.util.BlockCompressedInputStream


class IoTest extends UnitSpec {

  Seq(".bgz", ".bgzip").foreach { ext =>
    it should s"round trip data to a bgzipped file with extension ${ext}" in {
      val text = "This is a stupid little text fragment for compression. Yay compression!"
      val data = Seq.fill(10)(text)
      val f = makeTempFile("test.", ext)
      Io.writeLines(f, data)

      val stream = new BufferedInputStream(new FileInputStream(f.toFile))
      BlockCompressedInputStream.isValidFile(stream) shouldBe true
      val reread = Io.readLines(f).toIndexedSeq

      reread shouldBe data
    }
  }
} 
Example 66
Source File: Version.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.cli.publish.version

import java.io.{BufferedReader, File, InputStreamReader}

import caseapp._

object Version extends CaseApp[Options] {

  private def output(cmd: Seq[String], dir: File): String = {
    // adapted from https://stackoverflow.com/a/16714180/3714539
    val b = new ProcessBuilder(cmd: _*).directory(dir)
    val p = b.start()
    val reader = new BufferedReader(new InputStreamReader(p.getInputStream))
    val builder = new StringBuilder
    var line: String = null
    while ({
      line = reader.readLine()
      line != null
    }) {
      builder.append(line)
      builder.append(System.getProperty("line.separator"))
    }
    val retCode = p.waitFor()
    if (retCode == 0)
      builder.toString
    else
      throw new Exception(s"Command ${cmd.mkString(" ")} exited with code $retCode")
  }

  def version(dir: File): Either[String, String] = {

    val tag = output(Seq("git", "describe", "--tags", "--match", "v[0-9]*", "--abbrev=0"), dir).trim

    if (tag.isEmpty)
      Left("No git tag like v[0-9]* found")
    else {
      val dist = output(Seq("git", "rev-list", "--count", s"$tag...HEAD"), dir).trim.toInt // can throw…

      if (dist == 0)
        Right(tag)
      else {

        val previousVersion = tag.stripPrefix("v")

        // Tweak coursier.core.Version.Tokenizer to help here?

        val versionOpt =
          if (previousVersion.forall(c => c == '.' || c.isDigit)) {
            val l = previousVersion.split('.')
            Some((l.init :+ (l.last.toInt + 1).toString).mkString(".") + "-SNAPSHOT")
          } else {
            val idx = previousVersion.indexOf("-M")
            if (idx < 0)
              None
            else {
              Some(previousVersion.take(idx) + "-SNAPSHOT")
            }
          }

        versionOpt.toRight {
          s"Don't know how to handle version $previousVersion"
        }
      }
    }
  }

  def run(options: Options, remainingArgs: RemainingArgs): Unit = {

    val dir = remainingArgs.all match {
      case Seq() => new File(".")
      case Seq(path) => new File(path)
      case other =>
        Console.err.println(s"Too many arguments specified: ${other.mkString(" ")}\nExpected 0 or 1 argument.")
        sys.exit(1)
    }

    version(dir) match {
      case Left(msg) =>
        Console.err.println(msg)
        sys.exit(1)
      case Right(v) =>

        if (options.isSnapshot) {
          val retCode =
            if (v.endsWith("-SNAPSHOT"))
              0
            else
              1
          if (!options.quiet)
            Console.err.println(v)
          sys.exit(retCode)
        } else
          println(v)
    }
  }
} 
Example 67
Source File: HadoopFileSystemLogStore.scala    From delta   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.delta.storage

import java.io.{BufferedReader, FileNotFoundException, InputStreamReader}
import java.nio.charset.StandardCharsets.UTF_8
import java.nio.file.FileAlreadyExistsException
import java.util.UUID

import scala.collection.JavaConverters._

import org.apache.commons.io.IOUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileStatus, FileSystem, Path}

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SparkSession


  protected def writeWithRename(
      path: Path, actions: Iterator[String], overwrite: Boolean = false): Unit = {
    val fs = path.getFileSystem(getHadoopConfiguration)

    if (!fs.exists(path.getParent)) {
      throw new FileNotFoundException(s"No such file or directory: ${path.getParent}")
    }
    if (overwrite) {
      val stream = fs.create(path, true)
      try {
        actions.map(_ + "\n").map(_.getBytes(UTF_8)).foreach(stream.write)
      } finally {
        stream.close()
      }
    } else {
      if (fs.exists(path)) {
        throw new FileAlreadyExistsException(path.toString)
      }
      val tempPath = createTempPath(path)
      var streamClosed = false // This flag is to avoid double close
      var renameDone = false // This flag is to save the delete operation in most of cases.
      val stream = fs.create(tempPath)
      try {
        actions.map(_ + "\n").map(_.getBytes(UTF_8)).foreach(stream.write)
        stream.close()
        streamClosed = true
        try {
          if (fs.rename(tempPath, path)) {
            renameDone = true
          } else {
            if (fs.exists(path)) {
              throw new FileAlreadyExistsException(path.toString)
            } else {
              throw new IllegalStateException(s"Cannot rename $tempPath to $path")
            }
          }
        } catch {
          case _: org.apache.hadoop.fs.FileAlreadyExistsException =>
            throw new FileAlreadyExistsException(path.toString)
        }
      } finally {
        if (!streamClosed) {
          stream.close()
        }
        if (!renameDone) {
          fs.delete(tempPath, false)
        }
      }
    }
  }

  protected def createTempPath(path: Path): Path = {
    new Path(path.getParent, s".${path.getName}.${UUID.randomUUID}.tmp")
  }

  override def invalidateCache(): Unit = {}
} 
Example 68
Source File: StreamReadingThread.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.nashorn

import java.io.{BufferedReader, IOException, InputStream, InputStreamReader}

import scala.util.control.NonFatal

class StreamReadingThread(in: InputStream, appender: (String) => Unit) extends Thread {
  override def run(): Unit = {
    try {
      val reader = new BufferedReader(new InputStreamReader(in))
      var str = ""
      while (str != null) {
        str = reader.readLine()
        Option(str).foreach(appender)
      }
    } catch {
      case _: InterruptedException =>
        // ok
      case ex: IOException if isStreamClosed(ex) =>
        // ok
      case NonFatal(t) =>
        t.printStackTrace(System.err)
    }
  }

  private def isStreamClosed(ex: IOException) = ex.getMessage.toLowerCase == "stream closed"
} 
Example 69
Source File: MultiThreadingTest.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.nashorn
import java.io.{BufferedReader, InputStreamReader}
import java.util.concurrent.TimeoutException
import java.util.concurrent.atomic.AtomicInteger

import com.programmaticallyspeaking.ncd.host._
import com.programmaticallyspeaking.ncd.messaging.Observer
import com.programmaticallyspeaking.ncd.testing.{SharedInstanceActorTesting, UnitTest}
import jdk.nashorn.api.scripting.NashornScriptEngineFactory
import org.scalatest.concurrent.{Eventually, ScalaFutures}
import org.scalatest.exceptions.TestFailedException
import org.slf4s.Logging

import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContext, Future, Promise}

trait MultiThreadingTestFixture extends UnitTest with Logging with SharedInstanceActorTesting with VirtualMachineLauncher with ScalaFutures with FairAmountOfPatience with Eventually {
  override val scriptExecutor: ScriptExecutorBase = MultiThreadedScriptExecutor
  override implicit val executionContext: ExecutionContext = ExecutionContext.global
}

class MultiThreadingTest extends MultiThreadingTestFixture {
  def location(ln: Int) = ScriptLocation(ln, None)

  "Breakpoint requests from other threads should be ignore in a paused state" in {
    val scriptAddedPromise = Promise[Script]()
    val hitBreakpointPromise = Promise[String]()
    val breakpointCounter = new AtomicInteger()
    val host = getHost
    observeScriptEvents(new Observer[ScriptEvent] {

      override def onNext(item: ScriptEvent): Unit = item match {
        case ScriptAdded(script) =>
          scriptAddedPromise.success(script)
        case hb: HitBreakpoint =>
          breakpointCounter.incrementAndGet()
          hitBreakpointPromise.trySuccess("")
        case _ =>
      }

      override def onError(error: Throwable): Unit = {}

      override def onComplete(): Unit = {}
    })

    whenReady(scriptAddedPromise.future) { script =>
      val scriptLocation = eventually {
        host.getBreakpointLocations(ScriptIdentity.fromId(script.id), location(1), None).headOption.getOrElse(fail(s"No line numbers for script ${script.id}"))
      }
      host.setBreakpoint(ScriptIdentity.fromURL(script.url), scriptLocation, BreakpointOptions.empty)

      try {
        whenReady(hitBreakpointPromise.future) { _ =>
          // Ugly, but wait for a while to see if the counter increases over 1 (which it shouldn't).
          Thread.sleep(200)
          breakpointCounter.get() should be(1)
        }
      } catch {
        case t: TestFailedException if t.getMessage().contains("timeout") =>
          val progress = summarizeProgress()
          throw new TimeoutException("Timed out: " + progress)
      }
    }
  }
}

object MultiThreadedScriptExecutor extends App with ScriptExecutorBase {
  println("MultiThreadedScriptExecutor starting. Java version: " + System.getProperty("java.version"))
  val scriptEngine = new NashornScriptEngineFactory().getScriptEngine("--no-syntax-extensions")
  val reader = new BufferedReader(new InputStreamReader(System.in))
  println(Signals.ready)
  waitForSignal(Signals.go)

  // Used a compiled script here before, stopped working with JDK 10
  var src =
    """(function () {
      |  return Math.floor(5.5);
      |})();
    """.stripMargin

  implicit val ec = ExecutionContext.global

  val futures = (1 to 5).map { _ =>
    Future {
      while (true) {
        scriptEngine.eval(src)
      }
    }
  }

  Await.result(Future.sequence(futures), 30.seconds)
} 
Example 70
Source File: ScriptExecutor.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.nashorn

import java.io.{BufferedReader, InputStreamReader}

import com.programmaticallyspeaking.ncd.testing.StringUtils
import jdk.nashorn.api.scripting.NashornScriptEngineFactory

import scala.util.control.NonFatal

abstract class ScriptExecutorC extends App with ScriptExecutorBase {
  println("ScriptExecutor starting. Java version: " + System.getProperty("java.version"))
  val scriptEngine = new NashornScriptEngineFactory().getScriptEngine(nashornArgs: _*)
  val reader = new BufferedReader(new InputStreamReader(System.in))
  println(Signals.ready)
  waitForSignal(Signals.go)
  println("Got the go signal!")

  scriptEngine.eval(
    """this.createInstance = function (typeName) {
      |  var Type = Java.type(typeName);
      |  if (!Type) throw new Error("No such type: " + typeName);
      |  return new Type();
      |};
    """.stripMargin)

  while (true) {
    println("Awaiting script on stdin...")
    val script = StringUtils.fromBase64(readStdin())
    println("Got script: " + script)
    try {
      scriptEngine.eval(script)
      println("Script evaluation completed without errors")
    } catch {
      case NonFatal(t) =>
        t.printStackTrace(System.err)
    }
    println(Signals.scriptDone)
  }

  protected def nashornArgs: Array[String]
}

object ScriptExecutorNoJava extends ScriptExecutorC {
  override protected def nashornArgs = Array("--no-syntax-extensions", "--no-java")
}

object ScriptExecutor extends ScriptExecutorC {
  override protected def nashornArgs = Array("--no-syntax-extensions")
} 
Example 71
Source File: ScriptIdentificationInRunningAppTest.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.nashorn

import java.io.{BufferedReader, InputStreamReader}

import com.programmaticallyspeaking.ncd.testing.StringUtils
import jdk.nashorn.api.scripting.NashornScriptEngineFactory

import scala.concurrent.duration._


class ScriptIdentificationInRunningAppTest extends ScriptAddedTestFixture {

  "Recompilation of eval script should be merged with the original - regardless of observed order" in {
    val script = "'dummy';"

    whenReady(testAddScriptWithWait(script, 500.millis)) { _ =>
      getHost.scripts.filterNot(_.contents.contains("dummy")).map(_.url).distinct should have size 1
    }
  }

  override val scriptExecutor = RunningApp
}

object RunningApp extends App with ScriptExecutorBase {
  println("RunningApp starting. Java version: " + System.getProperty("java.version"))
  val scriptEngine = new NashornScriptEngineFactory().getScriptEngine("--no-syntax-extensions")
  val reader = new BufferedReader(new InputStreamReader(System.in))

  val script1 =
    """function fun() {
      |  return '42';
      |}
      |var i = 0;
      |while (i++ < 10) {
      |  fun();
      |}
    """.stripMargin
  scriptEngine.eval(script1)

  // Stuff required by the test infra
  println(Signals.ready)
  waitForSignal(Signals.go)
  val script = StringUtils.fromBase64(readStdin())
  scriptEngine.eval(script)
  println(Signals.scriptDone)
} 
Example 72
Source File: JVMUtil.scala    From Argus-SAF   with Apache License 2.0 5 votes vote down vote up
package org.argus.jawa.core.util

import java.io.{BufferedReader, InputStreamReader}
import java.net.URLClassLoader
import java.text.NumberFormat

 
object JVMUtil {
	def startSecondJVM[C](clazz: Class[C], jvmArgs: List[String], args: List[String], redirectStream: Boolean): Int = {
    val separator = System.getProperty("file.separator")
    val classpath = Thread.currentThread().getContextClassLoader.asInstanceOf[URLClassLoader].getURLs.map(_.getPath()).reduce((c1, c2) => c1 + java.io.File.pathSeparator + c2)
    val path = System.getProperty("java.home") + separator + "bin" + separator + "java"
    val commands: IList[String] = List(path) ::: jvmArgs ::: List("-cp", classpath, clazz.getCanonicalName.stripSuffix("$")) ::: args
    import scala.collection.JavaConverters._
    val processBuilder = new ProcessBuilder(commands.asJava)
    processBuilder.redirectErrorStream(redirectStream)
    val process = processBuilder.start()
    val is = process.getInputStream
    val isr = new InputStreamReader(is)
    val br = new BufferedReader(isr)
    var line = br.readLine()
    while (line != null) {
      println(line)
      line = br.readLine()
    }
    process.waitFor()
  }
  
  def showMemoryUsage(): Unit = {
    val runtime = Runtime.getRuntime
    val format = NumberFormat.getInstance()
    
    val sb = new StringBuilder()
    val maxMemory = runtime.maxMemory()
    val allocatedMemory = runtime.totalMemory()
    val freeMemory = runtime.freeMemory()
    
    sb.append("free memory: " + format.format(freeMemory / 1024 / 1024) + " ")
    sb.append("allocated memory: " + format.format(allocatedMemory / 1024 / 1024) + " ")
    sb.append("max memory: " + format.format(maxMemory / 1024 / 1024) + " ")
    sb.append("total free memory: " + format.format((freeMemory + (maxMemory - allocatedMemory)) / 1024 / 1024) + " ")
    println(sb.toString())
  }
} 
Example 73
Source File: Using.scala    From Argus-SAF   with Apache License 2.0 5 votes vote down vote up
package org.argus.jawa.core.compiler.compile.io

import java.io.{Closeable, FileInputStream, FileOutputStream, InputStream, OutputStream, File => JavaFile}
import java.io.{BufferedInputStream, BufferedOutputStream, InputStreamReader, OutputStreamWriter}
import java.io.{BufferedReader, BufferedWriter}
import java.util.zip.GZIPInputStream
import java.net.URL
import java.nio.channels.FileChannel
import java.nio.charset.Charset
import java.util.jar.{JarFile, JarInputStream, JarOutputStream}
import java.util.zip.{GZIPOutputStream, ZipEntry, ZipFile, ZipInputStream, ZipOutputStream}

import ErrorHandling.translate

import scala.reflect.{Manifest => SManifest}

abstract class Using[Source, T]
{
  protected def open(src: Source): T
  def apply[R](src: Source)(f: T => R): R =
  {
    val resource = open(src)
    try { f(resource) }
    finally { close(resource) }
  }
  protected def close(out: T): Unit
}
abstract class WrapUsing[Source, T](implicit srcMf: SManifest[Source], targetMf: SManifest[T]) extends Using[Source, T]
{
  protected def label[S](m: SManifest[S]): String = m.runtimeClass.getSimpleName
  protected def openImpl(source: Source): T
  protected final def open(source: Source): T =
    translate("Error wrapping " + label(srcMf) + " in " + label(targetMf) + ": ") { openImpl(source) }
}
trait OpenFile[T] extends Using[JavaFile, T]
{
  protected def openImpl(file: JavaFile): T
  protected final def open(file: JavaFile): T =
  {
    val parent = file.getParentFile
    if(parent != null)
      IO.createDirectory(parent)
    openImpl(file)
  }
}
object Using
{
  def wrap[Source, T<: Closeable](openF: Source => T)(implicit srcMf: SManifest[Source], targetMf: SManifest[T]): Using[Source,T] =
    wrap(openF, closeCloseable)
  def wrap[Source, T](openF: Source => T, closeF: T => Unit)(implicit srcMf: SManifest[Source], targetMf: SManifest[T]): Using[Source,T] =
    new WrapUsing[Source, T]
    {
      def openImpl(source: Source): T = openF(source)
      def close(t: T): Unit = closeF(t)
    }

  def resource[Source, T <: Closeable](openF: Source => T): Using[Source,T] =
    resource(openF, closeCloseable)
  def resource[Source, T](openF: Source => T, closeF: T => Unit): Using[Source,T] =
    new Using[Source,T]
    {
      def open(s: Source): T = openF(s)
      def close(s: T): Unit = closeF(s)
    }
  def file[T <: Closeable](openF: JavaFile => T): OpenFile[T] = file(openF, closeCloseable)
  def file[T](openF: JavaFile => T, closeF: T => Unit): OpenFile[T] =
    new OpenFile[T]
    {
      def openImpl(file: JavaFile): T = openF(file)
      def close(t: T): Unit = closeF(t)
    }
  private def closeCloseable[T <: Closeable]: T => Unit = _.close()

  def bufferedOutputStream: Using[OutputStream, BufferedOutputStream] = wrap((out: OutputStream) => new BufferedOutputStream(out) )
  def bufferedInputStream: Using[InputStream, BufferedInputStream] = wrap((in: InputStream) => new BufferedInputStream(in) )
  def fileOutputStream(append: Boolean = false): OpenFile[BufferedOutputStream] = file(f => new BufferedOutputStream(new FileOutputStream(f, append)))
  def fileInputStream: OpenFile[BufferedInputStream] = file(f => new BufferedInputStream(new FileInputStream(f)))
  def urlInputStream: Using[URL, BufferedInputStream] = resource((u: URL) => translate("Error opening " + u + ": ")(new BufferedInputStream(u.openStream)))
  def fileOutputChannel: OpenFile[FileChannel] = file(f => new FileOutputStream(f).getChannel)
  def fileInputChannel: OpenFile[FileChannel] = file(f => new FileInputStream(f).getChannel)
  def fileWriter(charset: Charset = IO.utf8, append: Boolean = false): OpenFile[BufferedWriter] =
    file(f => new BufferedWriter(new OutputStreamWriter(new FileOutputStream(f, append), charset)) )
  def fileReader(charset: Charset): OpenFile[BufferedReader] = file(f => new BufferedReader(new InputStreamReader(new FileInputStream(f), charset)) )
  def urlReader(charset: Charset): Using[URL, BufferedReader] = resource((u: URL) => new BufferedReader(new InputStreamReader(u.openStream, charset)))
  def jarFile(verify: Boolean): OpenFile[JarFile] = file(f => new JarFile(f, verify), (_: JarFile).close())
  def zipFile: OpenFile[ZipFile] = file(f => new ZipFile(f), (_: ZipFile).close())
  def streamReader: Using[(InputStream, Charset), InputStreamReader] = wrap{ (_: (InputStream, Charset)) match { case (in, charset) => new InputStreamReader(in, charset) } }
  def gzipInputStream: Using[InputStream, GZIPInputStream] = wrap((in: InputStream) => new GZIPInputStream(in, 8192) )
  def zipInputStream: Using[InputStream, ZipInputStream] = wrap((in: InputStream) => new ZipInputStream(in))
  def zipOutputStream: Using[OutputStream, ZipOutputStream] = wrap((out: OutputStream) => new ZipOutputStream(out))
  def gzipOutputStream: Using[OutputStream, GZIPOutputStream] = wrap((out: OutputStream) => new GZIPOutputStream(out, 8192), (_: GZIPOutputStream).finish())
  def jarOutputStream: Using[OutputStream, JarOutputStream] = wrap((out: OutputStream) => new JarOutputStream(out))
  def jarInputStream: Using[InputStream, JarInputStream] = wrap((in: InputStream) => new JarInputStream(in))
  def zipEntry(zip: ZipFile): Using[ZipEntry, InputStream] = resource((entry: ZipEntry) =>
    translate("Error opening " + entry.getName + " in " + zip + ": ") { zip.getInputStream(entry) } )
} 
Example 74
Source File: BlockLang.scala    From jgo   with GNU General Public License v3.0 5 votes vote down vote up
package jgo.tools.compiler
package parser

import scala.util.parsing.input.Reader

import lexer._
import scope._
import interm._
import interm.types._

import stmts._
import funcs._


class BlockLang(in: Reader[Token], res: List[Type] = Nil, resNamed: Boolean = false) extends FuncContext with Statements {
  //def, not val.  See comment in StackScoped
  def initialEnclosing = UniverseScope
  
  def targetFuncType = FuncType(Nil, res)
  def hasNamedResults = resNamed
  
  lazy val result = phrase(block)(in)
}

object BlockLang {
  import java.io.{File, InputStream, FileInputStream, InputStreamReader}
  import scala.collection.immutable.PagedSeq
  
  def apply(in: Reader[Char]):  BlockLang = new BlockLang(Scanner(in))
  def apply(inStr: String):     BlockLang = new BlockLang(Scanner(inStr))
  def apply(in: InputStream):   BlockLang = new BlockLang(Scanner(in))
  def apply(file: File):        BlockLang = new BlockLang(Scanner(file))
  
  def from(fileName: String):   BlockLang = new BlockLang(Scanner.from(fileName))
} 
Example 75
Source File: Scanner.scala    From jgo   with GNU General Public License v3.0 5 votes vote down vote up
package jgo.tools.compiler
package lexer

import scala.util.parsing._
import input._
import combinator._

//portions of this class taken from scala.util.parsing.combinator.lexical.Scanners#Scanner
final class Scanner private(prev: Option[Token], in: Reader[Char]) extends Reader[Token] {
  private def this(in: Reader[Char]) = this(None, in)
  
  private val (tok, remainingIn) = Lexical.token(prev, in)
  
  def      first = {  tok }
  lazy val rest  = new Scanner(Some(tok), remainingIn)
  lazy val pos   = Lexical.stripWhitespace(in).pos
  def      atEnd = tok == EOF
  
  override def source = in.source
  override def offset = in.offset
  
  def foreach[U](f: Token => U) {
    var cur = this
    while (!cur.atEnd) {
      f(cur.first)
      cur = cur.rest
    }
  }
}

object Scanner {
  import java.io.{File, InputStream, FileInputStream, InputStreamReader}
  import scala.collection.immutable.PagedSeq
  
  def apply(in: Reader[Char]): Scanner = new Scanner(None, in)
  def apply(inStr: String):    Scanner = new Scanner(new CharArrayReader(inStr.toCharArray()))
  def apply(in: File):         Scanner = apply(new FileInputStream(in))
  def apply(in: InputStream):  Scanner =
    new Scanner(None, new PagedSeqReader(PagedSeq.fromReader(new InputStreamReader(in , "UTF-8"))))
  
  def from(fileName: String): Scanner = apply(new FileInputStream(fileName))
} 
Example 76
Source File: LexTestAll.scala    From jgo   with GNU General Public License v3.0 5 votes vote down vote up
import jgo.tools.compiler._
import parser.BlockLang
import parser.combinatorExten._
import lexer._

import interm.codeseq._

import java.io.{File, InputStream, FileInputStream, InputStreamReader}

object LexTestAll {
  def main(args: Array[String]) {
    if (args.isEmpty)
      testAll(new File(System.getProperty("user.home") + "/Desktop/gotest/"))
    else
      testAll(new File(args(0)))
  }
  
  def testAll(dir: File) {
    for (file <- dir.listFiles)
      if (file.isDirectory)
        testAll(file)
      else if (file.isFile && !file.isHidden)
        test(file)
  }
  
  def test(file: File) {
    println("testing: " + file.getCanonicalPath)
    println()
    
    var cur = Scanner(file)
    print("tokenization: ")
    while (!cur.atEnd) {
      print(cur.first + " ")
      cur = cur.rest
    }
    println()
    println()
  }
} 
Example 77
Source File: Json4sSerialization.scala    From kafka-serialization   with Apache License 2.0 5 votes vote down vote up
package com.ovoenergy.kafka.serialization.json4s

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, InputStreamReader, OutputStreamWriter}
import java.nio.charset.StandardCharsets

import com.ovoenergy.kafka.serialization.core._
import org.apache.kafka.common.serialization.{Deserializer => KafkaDeserializer, Serializer => KafkaSerializer}
import org.json4s.Formats
import org.json4s.native.Serialization.{read, write}

import scala.reflect.ClassTag
import scala.reflect.runtime.universe._

trait Json4sSerialization {

  def json4sSerializer[T <: AnyRef](implicit jsonFormats: Formats): KafkaSerializer[T] = serializer { (_, data) =>
    val bout = new ByteArrayOutputStream()
    val writer = new OutputStreamWriter(bout, StandardCharsets.UTF_8)

    // TODO Use scala-arm
    try {
      write(data, writer)
      writer.flush()
    } finally {
      writer.close()
    }
    bout.toByteArray
  }

  def json4sDeserializer[T: TypeTag](implicit jsonFormats: Formats): KafkaDeserializer[T] = deserializer { (_, data) =>
    val tt = implicitly[TypeTag[T]]
    implicit val cl = ClassTag[T](tt.mirror.runtimeClass(tt.tpe))
    read[T](new InputStreamReader(new ByteArrayInputStream(data), StandardCharsets.UTF_8))
  }

} 
Example 78
Source File: HadoopFSHelpers.scala    From morpheus   with Apache License 2.0 5 votes vote down vote up
package org.opencypher.morpheus.api.io.fs

import java.io.{BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter}

import org.apache.hadoop.fs.{FileSystem, Path}
import org.opencypher.morpheus.api.io.util.FileSystemUtils.using

object HadoopFSHelpers {

  implicit class RichHadoopFileSystem(fileSystem: FileSystem) {

    protected def createDirectoryIfNotExists(path: Path): Unit = {
      if (!fileSystem.exists(path)) {
        fileSystem.mkdirs(path)
      }
    }

    def listDirectories(path: String): List[String] = {
      val p = new Path(path)
      createDirectoryIfNotExists(p)
      fileSystem.listStatus(p)
        .filter(_.isDirectory)
        .map(_.getPath.getName)
        .toList
    }

    def deleteDirectory(path: String): Unit = {
      fileSystem.delete(new Path(path),  true)
    }

    def readFile(path: String): String = {
      using(new BufferedReader(new InputStreamReader(fileSystem.open(new Path(path)), "UTF-8"))) { reader =>
        def readLines = Stream.cons(reader.readLine(), Stream.continually(reader.readLine))
        readLines.takeWhile(_ != null).mkString
      }
    }

    def writeFile(path: String, content: String): Unit = {
      val p = new Path(path)
      val parentDirectory = p.getParent
      createDirectoryIfNotExists(parentDirectory)
      using(fileSystem.create(p)) { outputStream =>
        using(new BufferedWriter(new OutputStreamWriter(outputStream, "UTF-8"))) { bufferedWriter =>
          bufferedWriter.write(content)
        }
      }
    }
  }

} 
Example 79
Source File: CustomReceiver.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
// scalastyle:off println
package org.apache.spark.examples.streaming

import java.io.{BufferedReader, InputStreamReader}
import java.net.Socket
import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.receiver.Receiver


  private def receive() {
   var socket: Socket = null
   var userInput: String = null
   try {
     logInfo("Connecting to " + host + ":" + port)
     socket = new Socket(host, port)
     logInfo("Connected to " + host + ":" + port)
     val reader = new BufferedReader(
       new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))
     userInput = reader.readLine()
     while(!isStopped && userInput != null) {
       store(userInput)
       userInput = reader.readLine()
     }
     reader.close()
     socket.close()
     logInfo("Stopped receiving")
     restart("Trying to connect again")
   } catch {
     case e: java.net.ConnectException =>
       restart("Error connecting to " + host + ":" + port, e)
     case t: Throwable =>
       restart("Error receiving data", t)
   }
  }
}
// scalastyle:on println 
Example 80
Source File: StreamMetadata.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import java.io.{InputStreamReader, OutputStreamWriter}
import java.nio.charset.StandardCharsets

import scala.util.control.NonFatal

import org.apache.commons.io.IOUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, FSDataInputStream, FSDataOutputStream, Path}
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization

import org.apache.spark.internal.Logging
import org.apache.spark.sql.streaming.StreamingQuery


  def write(
      metadata: StreamMetadata,
      metadataFile: Path,
      hadoopConf: Configuration): Unit = {
    var output: FSDataOutputStream = null
    try {
      val fs = FileSystem.get(hadoopConf)
      output = fs.create(metadataFile)
      val writer = new OutputStreamWriter(output)
      Serialization.write(metadata, writer)
      writer.close()
    } catch {
      case NonFatal(e) =>
        logError(s"Error writing stream metadata $metadata to $metadataFile", e)
        throw e
    } finally {
      IOUtils.closeQuietly(output)
    }
  }
} 
Example 81
Source File: CustomReceiver.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.examples.streaming

import java.io.{InputStreamReader, BufferedReader, InputStream}
import java.net.Socket

import org.apache.spark.{SparkConf, Logging}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.receiver.Receiver


  private def receive() {
   var socket: Socket = null
   var userInput: String = null
   try {
     logInfo("Connecting to " + host + ":" + port)
     socket = new Socket(host, port)
     logInfo("Connected to " + host + ":" + port)
     val reader = new BufferedReader(new InputStreamReader(socket.getInputStream(), "UTF-8"))
     userInput = reader.readLine()
     while(!isStopped && userInput != null) {
       store(userInput)
       userInput = reader.readLine()
     }
     reader.close()
     socket.close()
     logInfo("Stopped receiving")
     restart("Trying to connect again")
   } catch {
     case e: java.net.ConnectException =>
       restart("Error connecting to " + host + ":" + port, e)
     case t: Throwable =>
       restart("Error receiving data", t)
   }
  }
} 
Example 82
Source File: HDFSUtil.scala    From aerosolve   with Apache License 2.0 5 votes vote down vote up
package com.airbnb.common.ml.util

import java.io.{BufferedReader, IOException, InputStreamReader}
import java.net.URI

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}


object HDFSUtil extends ScalaLogging {

  private lazy val hadoopConfiguration = new Configuration()

  
  def lastTaskSucceed(path: String): Boolean = {
    if (dirExists(path)) {
      if (dirExists(path + "/_temporary")) {
        logger.info(s"Deleting partial data for $path.")
        deleteDirWithoutThrow(path)
        false
      } else {
        logger.info(s"$path exists")
        true
      }
    } else {
      logger.info(s"$path does not exist")
      false
    }
  }

  def dirExists(dir: String): Boolean = {
    val path = new Path(dir)
    val hdfs = FileSystem.get(
      new java.net.URI(dir), hadoopConfiguration)

    hdfs.exists(path)
  }

  def deleteDirWithoutThrow(dir: String): Unit = {
    val path = new Path(dir)
    val hdfs = FileSystem.get(
      new java.net.URI(dir), hadoopConfiguration)
    if (hdfs.exists(path)) {
      logger.warn(s"$dir exists, DELETING")
      try {
        hdfs.delete(path, true)
      } catch {
        case e: IOException => logger.error(s" exception $e")
      }
    }
  }

  def createPath(path: String): Unit = {
    val remotePath = new Path(path)
    val remoteFS = remotePath.getFileSystem(hadoopConfiguration)
    remoteFS.mkdirs(new Path(path))
  }

  def readStringFromFile(inputFile : String): String = {
    val fs = FileSystem.get(new URI(inputFile), hadoopConfiguration)
    val path = new Path(inputFile)
    val stream = fs.open(path)
    val reader = new BufferedReader(new InputStreamReader(stream))
    val str = Stream.continually(reader.readLine()).takeWhile(_ != null).mkString("\n")
    str
  }

} 
Example 83
Source File: LogFile.scala    From kyuubi   with Apache License 2.0 5 votes vote down vote up
package yaooqinn.kyuubi.operation

import java.io.{BufferedReader, File, FileInputStream, FileNotFoundException, FileOutputStream, InputStreamReader, IOException, PrintStream}
import java.util.ArrayList

import scala.collection.JavaConverters._

import org.apache.commons.io.FileUtils
import org.apache.hadoop.io.IOUtils
import org.apache.kyuubi.Logging
import org.apache.spark.sql.Row

import yaooqinn.kyuubi.KyuubiSQLException

class LogFile private (
    file: File,
    private var reader: Option[BufferedReader],
    writer: PrintStream,
    @volatile private var isRemoved: Boolean = false) extends Logging {

  def this(file: File) = {
    this(file,
      LogFile.createReader(file, isRemoved = false),
      new PrintStream(new FileOutputStream(file)))
  }

  private def resetReader(): Unit = {
    reader.foreach(IOUtils.closeStream)
    reader = None
  }

  private def readResults(nLines: Long): Seq[Row] = {
    reader = reader.orElse(LogFile.createReader(file, isRemoved))

    val logs = new ArrayList[Row]()
    reader.foreach { r =>
      var i = 1
      try {
        var line: String = r.readLine()
        while ((i < nLines || nLines <= 0) && line != null) {
          logs.add(Row(line))
          line = r.readLine()
          i += 1
        }
      } catch {
        case e: FileNotFoundException =>
          val operationHandle = file.getName
          val path = file.getAbsolutePath
          val msg = if (isRemoved) {
            s"Operation[$operationHandle] has been closed and the log file $path has been removed"
          } else {
            s"Operation[$operationHandle] Log file $path is not found"
          }
          throw new KyuubiSQLException(msg, e)
      }
    }
    logs.asScala
  }

  
  def write(msg: String): Unit = {
    writer.print(msg)
  }


  def close(): Unit = synchronized {
    try {
      reader.foreach(_.close())
      writer.close()
      if (!isRemoved) {
        FileUtils.forceDelete(file)
        isRemoved = true
      }
    } catch {
      case e: IOException =>
        error(s"Failed to remove corresponding log file of operation: ${file.getName}", e)
    }
  }
}

object LogFile {

  def createReader(file: File, isRemoved: Boolean): Option[BufferedReader] = try {
    Option(new BufferedReader(new InputStreamReader(new FileInputStream(file))))
  } catch {
    case e: FileNotFoundException =>
      val operationHandle = file.getName
      val path = file.getAbsolutePath
      val msg = if (isRemoved) {
        s"Operation[$operationHandle] has been closed and the log file $path has been removed"
      } else {
        s"Operation[$operationHandle] Log file $path is not found"
      }
      throw new KyuubiSQLException(msg, e)
  }
} 
Example 84
Source File: Utils.scala    From kyuubi   with Apache License 2.0 5 votes vote down vote up
package org.apache.kyuubi

import java.io.{File, InputStreamReader, IOException}
import java.net.{URI, URISyntaxException}
import java.nio.charset.StandardCharsets
import java.util.{Properties, UUID}

import scala.collection.JavaConverters._
import scala.util.{Success, Try}

private[kyuubi] object Utils extends Logging {

  import org.apache.kyuubi.config.KyuubiConf._

  def strToSeq(s: String): Seq[String] = {
    require(s != null)
    s.split(",").map(_.trim).filter(_.nonEmpty)
  }

  def getSystemProperties: Map[String, String] = {
    sys.props.toMap
  }

  def getDefaultPropertiesFile(env: Map[String, String] = sys.env): Option[File] = {
    env.get(KYUUBI_CONF_DIR)
      .orElse(env.get(KYUUBI_HOME).map(_ + File.separator + "/conf"))
      .map( d => new File(d + File.separator + KYUUBI_CONF_FILE_NAME))
      .filter(f => f.exists() && f.isFile)
  }

  def getPropertiesFromFile(file: Option[File]): Map[String, String] = {
    file.map { f =>
      info(s"Loading Kyuubi properties from ${f.getAbsolutePath}")
      val reader = new InputStreamReader(f.toURI.toURL.openStream(), StandardCharsets.UTF_8)
      try {
        val properties = new Properties()
        properties.load(reader)
        properties.stringPropertyNames().asScala.map { k =>
          (k, properties.getProperty(k).trim)
        }.toMap
      } catch {
        case e: IOException =>
          throw new KyuubiException(
            s"Failed when loading Kyuubi properties from ${f.getAbsolutePath}", e)
      } finally {
        reader.close()
      }
    }.getOrElse(Map.empty)
  }


  
  def createTempDir(
      root: String = System.getProperty("java.io.tmpdir"),
      namePrefix: String = "kyuubi"): File = {
    val dir = createDirectory(root, namePrefix)
    dir.deleteOnExit()
    dir
  }

} 
Example 85
Source File: CustomReceiver.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
// scalastyle:off println
package org.apache.spark.examples.streaming

import java.io.{InputStreamReader, BufferedReader, InputStream}
import java.net.Socket
import org.apache.spark.{SparkConf, Logging}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.receiver.Receiver
import java.io.File
import java.io.FileInputStream


  private def receive() {
   var socket: Socket = null
   var userInput: String = null
   try {
     logInfo("Connecting to " + host + ":" + port)
     socket = new Socket(host, port) //连接机器
     logInfo("Connected to " + host + ":" + port)
     //获取网络连接输入流
     println("isConnected:"+socket.isConnected())
     val socketInput=socket.getInputStream()
     //
     //val inputFile=new File("../data/mllib/als/testCustomReceiver.data")
    // val  in = new FileInputStream(inputFile)
    //  val  in = new FileInputStream(socketInput)
     val reader = new BufferedReader(new InputStreamReader(socketInput, "UTF-8"))
     userInput = reader.readLine()
     while(!isStopped && userInput != null) {
       store(userInput)//存储数据
       userInput = reader.readLine()//读取数据
       println("userInput:"+userInput)
     }
     reader.close()//关闭流
     socket.close()//关闭连接
     logInfo("Stopped receiving")
     restart("Trying to connect again")
   } catch {
     case e: java.net.ConnectException =>
       restart("Error connecting to " + host + ":" + port, e)
     case t: Throwable =>
       restart("Error receiving data", t)
   }
  }
}
// scalastyle:on println 
Example 86
Source File: MeetupReceiver.scala    From meetup-stream   with Apache License 2.0 5 votes vote down vote up
package receiver

import org.apache.spark.streaming.receiver.Receiver
import org.apache.spark.storage.StorageLevel
import org.apache.spark.Logging
import com.ning.http.client.AsyncHttpClientConfig
import com.ning.http.client._
import scala.collection.mutable.ArrayBuffer
import java.io.OutputStream
import java.io.ByteArrayInputStream
import java.io.InputStreamReader
import java.io.BufferedReader
import java.io.InputStream
import java.io.PipedInputStream
import java.io.PipedOutputStream

class MeetupReceiver(url: String) extends Receiver[String](StorageLevel.MEMORY_AND_DISK_2) with Logging {
  
  @transient var client: AsyncHttpClient = _
  
  @transient var inputPipe: PipedInputStream = _
  @transient var outputPipe: PipedOutputStream = _  
       
  def onStart() {    
    val cf = new AsyncHttpClientConfig.Builder()
    cf.setRequestTimeout(Integer.MAX_VALUE)
    cf.setReadTimeout(Integer.MAX_VALUE)
    cf.setPooledConnectionIdleTimeout(Integer.MAX_VALUE)      
    client= new AsyncHttpClient(cf.build())
    
    inputPipe = new PipedInputStream(1024 * 1024)
    outputPipe = new PipedOutputStream(inputPipe)
    val producerThread = new Thread(new DataConsumer(inputPipe))
    producerThread.start()
    
    client.prepareGet(url).execute(new AsyncHandler[Unit]{
        
      def onBodyPartReceived(bodyPart: HttpResponseBodyPart) = {
        bodyPart.writeTo(outputPipe)
        AsyncHandler.STATE.CONTINUE        
      }
      
      def onStatusReceived(status: HttpResponseStatus) = {
        AsyncHandler.STATE.CONTINUE
      }
      
      def onHeadersReceived(headers: HttpResponseHeaders) = {
        AsyncHandler.STATE.CONTINUE
      }
            
      def onCompleted = {
        println("completed")
      }
      
      
      def onThrowable(t: Throwable)={
        t.printStackTrace()
      }
        
    })    
    
    
  }

  def onStop() {
    if (Option(client).isDefined) client.close()
    if (Option(outputPipe).isDefined) {
     outputPipe.flush()
     outputPipe.close() 
    }
    if (Option(inputPipe).isDefined) {
     inputPipe.close() 
    }    
  }
  
  class DataConsumer(inputStream: InputStream) extends Runnable 
  {
       
      override
      def run()
      {        
        val bufferedReader = new BufferedReader( new InputStreamReader( inputStream ))
        var input=bufferedReader.readLine()
        while(input!=null){          
          store(input)
          input=bufferedReader.readLine()
        }            
      }  
      
  }

} 
Example 87
Source File: SslContexts.scala    From kubernetes-client   with Apache License 2.0 5 votes vote down vote up
package com.goyeau.kubernetes.client.util
import java.io.{ByteArrayInputStream, File, FileInputStream, InputStreamReader}
import java.security.cert.{CertificateFactory, X509Certificate}
import java.security.{KeyStore, SecureRandom, Security}
import java.util.Base64

import com.goyeau.kubernetes.client.KubeConfig
import javax.net.ssl.{KeyManagerFactory, SSLContext, TrustManagerFactory}
import org.bouncycastle.jce.provider.BouncyCastleProvider
import org.bouncycastle.openssl.jcajce.JcaPEMKeyConverter
import org.bouncycastle.openssl.{PEMKeyPair, PEMParser}

object SslContexts {
  private val TrustStoreSystemProperty         = "javax.net.ssl.trustStore"
  private val TrustStorePasswordSystemProperty = "javax.net.ssl.trustStorePassword"
  private val KeyStoreSystemProperty           = "javax.net.ssl.keyStore"
  private val KeyStorePasswordSystemProperty   = "javax.net.ssl.keyStorePassword"

  def fromConfig(config: KubeConfig): SSLContext = {
    val sslContext = SSLContext.getInstance("TLS")
    sslContext.init(keyManagers(config), trustManagers(config), new SecureRandom)
    sslContext
  }

  private def keyManagers(config: KubeConfig) = {
    // Client certificate
    val certDataStream = config.clientCertData.map(data => new ByteArrayInputStream(Base64.getDecoder.decode(data)))
    val certFileStream = config.clientCertFile.map(new FileInputStream(_))

    // Client key
    val keyDataStream = config.clientKeyData.map(data => new ByteArrayInputStream(Base64.getDecoder.decode(data)))
    val keyFileStream = config.clientKeyFile.map(new FileInputStream(_))

    for {
      keyStream  <- keyDataStream.orElse(keyFileStream)
      certStream <- certDataStream.orElse(certFileStream)
    } yield {
      Security.addProvider(new BouncyCastleProvider())
      val pemKeyPair =
        new PEMParser(new InputStreamReader(keyStream)).readObject().asInstanceOf[PEMKeyPair] // scalafix:ok
      val privateKey = new JcaPEMKeyConverter().setProvider("BC").getPrivateKey(pemKeyPair.getPrivateKeyInfo)

      val certificateFactory = CertificateFactory.getInstance("X509")
      val certificate        = certificateFactory.generateCertificate(certStream).asInstanceOf[X509Certificate] // scalafix:ok

      defaultKeyStore.setKeyEntry(
        certificate.getSubjectX500Principal.getName,
        privateKey,
        config.clientKeyPass.fold(Array.empty[Char])(_.toCharArray),
        Array(certificate)
      )
    }

    val keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm)
    keyManagerFactory.init(defaultKeyStore, Array.empty)
    keyManagerFactory.getKeyManagers
  }

  private lazy val defaultKeyStore = {
    val propertyKeyStoreFile =
      Option(System.getProperty(KeyStoreSystemProperty, "")).filter(_.nonEmpty).map(new File(_))

    val keyStore = KeyStore.getInstance(KeyStore.getDefaultType)
    keyStore.load(
      propertyKeyStoreFile.map(new FileInputStream(_)).orNull,
      System.getProperty(KeyStorePasswordSystemProperty, "").toCharArray
    )
    keyStore
  }

  private def trustManagers(config: KubeConfig) = {
    val certDataStream = config.caCertData.map(data => new ByteArrayInputStream(Base64.getDecoder.decode(data)))
    val certFileStream = config.caCertFile.map(new FileInputStream(_))

    certDataStream.orElse(certFileStream).foreach { certStream =>
      val certificateFactory = CertificateFactory.getInstance("X509")
      val certificate        = certificateFactory.generateCertificate(certStream).asInstanceOf[X509Certificate] // scalafix:ok
      defaultTrustStore.setCertificateEntry(certificate.getSubjectX500Principal.getName, certificate)
    }

    val trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm)
    trustManagerFactory.init(defaultTrustStore)
    trustManagerFactory.getTrustManagers
  }

  private lazy val defaultTrustStore = {
    val securityDirectory = s"${System.getProperty("java.home")}/lib/security"

    val propertyTrustStoreFile =
      Option(System.getProperty(TrustStoreSystemProperty, "")).filter(_.nonEmpty).map(new File(_))
    val jssecacertsFile = Option(new File(s"$securityDirectory/jssecacerts")).filter(f => f.exists && f.isFile)
    val cacertsFile     = new File(s"$securityDirectory/cacerts")

    val keyStore = KeyStore.getInstance(KeyStore.getDefaultType)
    keyStore.load(
      new FileInputStream(propertyTrustStoreFile.orElse(jssecacertsFile).getOrElse(cacertsFile)),
      System.getProperty(TrustStorePasswordSystemProperty, "changeit").toCharArray
    )
    keyStore
  }
} 
Example 88
Source File: CustomReceiver.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
// scalastyle:off println
package org.apache.spark.examples.streaming

import java.io.{BufferedReader, InputStreamReader}
import java.net.Socket
import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.receiver.Receiver


  private def receive() {
   var socket: Socket = null
   var userInput: String = null
   try {
     logInfo(s"Connecting to $host : $port")
     socket = new Socket(host, port)
     logInfo(s"Connected to $host : $port")
     val reader = new BufferedReader(
       new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))
     userInput = reader.readLine()
     while(!isStopped && userInput != null) {
       store(userInput)
       userInput = reader.readLine()
     }
     reader.close()
     socket.close()
     logInfo("Stopped receiving")
     restart("Trying to connect again")
   } catch {
     case e: java.net.ConnectException =>
       restart(s"Error connecting to $host : $port", e)
     case t: Throwable =>
       restart("Error receiving data", t)
   }
  }
}
// scalastyle:on println 
Example 89
Source File: CreateJacksonParser.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.catalyst.json

import java.io.{ByteArrayInputStream, InputStream, InputStreamReader}

import com.fasterxml.jackson.core.{JsonFactory, JsonParser}
import org.apache.hadoop.io.Text

import org.apache.spark.unsafe.types.UTF8String

private[sql] object CreateJacksonParser extends Serializable {
  def string(jsonFactory: JsonFactory, record: String): JsonParser = {
    jsonFactory.createParser(record)
  }

  def utf8String(jsonFactory: JsonFactory, record: UTF8String): JsonParser = {
    val bb = record.getByteBuffer
    assert(bb.hasArray)

    val bain = new ByteArrayInputStream(
      bb.array(), bb.arrayOffset() + bb.position(), bb.remaining())

    jsonFactory.createParser(new InputStreamReader(bain, "UTF-8"))
  }

  def text(jsonFactory: JsonFactory, record: Text): JsonParser = {
    jsonFactory.createParser(record.getBytes, 0, record.getLength)
  }

  def inputStream(jsonFactory: JsonFactory, record: InputStream): JsonParser = {
    jsonFactory.createParser(record)
  }
} 
Example 90
Source File: StreamMetadata.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.streaming

import java.io.{InputStreamReader, OutputStreamWriter}
import java.nio.charset.StandardCharsets

import scala.util.control.NonFatal

import org.apache.commons.io.IOUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, FSDataInputStream, FSDataOutputStream, Path}
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization

import org.apache.spark.internal.Logging
import org.apache.spark.sql.streaming.StreamingQuery


  def write(
      metadata: StreamMetadata,
      metadataFile: Path,
      hadoopConf: Configuration): Unit = {
    var output: FSDataOutputStream = null
    try {
      val fs = metadataFile.getFileSystem(hadoopConf)
      output = fs.create(metadataFile)
      val writer = new OutputStreamWriter(output)
      Serialization.write(metadata, writer)
      writer.close()
    } catch {
      case NonFatal(e) =>
        logError(s"Error writing stream metadata $metadata to $metadataFile", e)
        throw e
    } finally {
      IOUtils.closeQuietly(output)
    }
  }
} 
Example 91
Source File: CustomReceiver.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
// scalastyle:off println
package org.apache.spark.examples.streaming

import java.io.{InputStreamReader, BufferedReader, InputStream}
import java.net.Socket

import org.apache.spark.{SparkConf, Logging}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.receiver.Receiver


  private def receive() {
   var socket: Socket = null
   var userInput: String = null
   try {
     logInfo("Connecting to " + host + ":" + port)
     socket = new Socket(host, port)
     logInfo("Connected to " + host + ":" + port)
     val reader = new BufferedReader(new InputStreamReader(socket.getInputStream(), "UTF-8"))
     userInput = reader.readLine()
     while(!isStopped && userInput != null) {
       store(userInput)
       userInput = reader.readLine()
     }
     reader.close()
     socket.close()
     logInfo("Stopped receiving")
     restart("Trying to connect again")
   } catch {
     case e: java.net.ConnectException =>
       restart("Error connecting to " + host + ":" + port, e)
     case t: Throwable =>
       restart("Error receiving data", t)
   }
  }
}
// scalastyle:on println 
Example 92
Source File: DefaultRepositories.scala    From cosmos   with Apache License 2.0 5 votes vote down vote up
package com.mesosphere.cosmos.repository

import com.google.common.io.CharStreams
import com.mesosphere.cosmos.rpc
import com.twitter.util.Try
import io.circe.jawn.decode
import java.io.InputStreamReader
import scala.util.Either
import scala.util.Left
import scala.util.Right

private[repository] class DefaultRepositories private[repository](resourceName: String) {
  private val repos: Try[Either[io.circe.Error, List[rpc.v1.model.PackageRepository]]] = Try {
    Option(this.getClass.getResourceAsStream(resourceName)) match {
      case Some(is) =>
        val json = CharStreams.toString(new InputStreamReader(is))
        decode[List[rpc.v1.model.PackageRepository]](json)
      case _ =>
        throw new IllegalStateException(s"Unable to load classpath resource: $resourceName")
    }
  }
}

object DefaultRepositories {
  private[this] val loaded = new DefaultRepositories("/default-repositories.json")

  def apply(): DefaultRepositories = loaded

  implicit class DefaultRepositoriesOps(val dr: DefaultRepositories) extends AnyVal {
    def get(): Try[Either[io.circe.Error, List[rpc.v1.model.PackageRepository]]] = {
      dr.repos
    }

    def getOrThrow: List[rpc.v1.model.PackageRepository] = {
      get().map {
        case Right(list) => list
        case Left(err) => throw err
      }.get
    }

    def getOrElse(
      orElse: List[rpc.v1.model.PackageRepository]
    ): List[rpc.v1.model.PackageRepository] = {
      get().map(_.getOrElse(orElse)).getOrElse(orElse)
    }
  }

} 
Example 93
Source File: IntegrationBeforeAndAfterAll.scala    From cosmos   with Apache License 2.0 5 votes vote down vote up
package com.mesosphere.cosmos

import com.google.common.io.CharStreams
import com.mesosphere.cosmos.circe.Decoders.parse
import com.mesosphere.cosmos.http.CosmosRequests
import com.mesosphere.cosmos.test.CosmosIntegrationTestClient.CosmosClient
import com.mesosphere.cosmos.thirdparty.marathon.model.AppId
import io.lemonlabs.uri.dsl._
import com.twitter.finagle.http.Status
import io.circe.jawn.decode
import java.io.InputStreamReader
import org.scalatest.Assertion
import org.scalatest.BeforeAndAfterAll
import org.scalatest.Suite
import org.scalatest.concurrent.Eventually
import scala.concurrent.duration._

trait IntegrationBeforeAndAfterAll extends BeforeAndAfterAll with Eventually { this: Suite =>

  private[this] lazy val logger = org.slf4j.LoggerFactory.getLogger(getClass)

  private[this] val universeUri = "https://downloads.mesosphere.com/universe/02493e40f8564a39446d06c002f8dcc8e7f6d61f/repo-up-to-1.8.json"
  private[this] val universeConverterUri = "https://universe-converter.mesosphere.com/transform?url=" + universeUri

  override def beforeAll(): Unit = {
    Requests.deleteRepository(Some("Universe"))

    val customPkgMgrResource = s"/${ItObjects.customManagerAppName}.json"

    logger.info(s"Creating marathon app from $customPkgMgrResource")
    Requests
      .postMarathonApp(
        parse(
          Option(this.getClass.getResourceAsStream(customPkgMgrResource)) match {
            case Some(is) =>
              CharStreams.toString(new InputStreamReader(is))
            case _ =>
              throw new IllegalStateException(s"Unable to load classpath resource: $customPkgMgrResource")
          }
        ).toOption.get.asObject.get
      )
    Requests.waitForDeployments()

    Requests.addRepository(
      "Universe",
      universeConverterUri,
      Some(0)
    )

    Requests.addRepository(
      "V5Testpackage",
      ItObjects.V5TestPackage,
      Some(0)
    )

    Requests.addRepository(
      "V4TestUniverse",
      ItObjects.V4TestUniverseConverterURI,
      Some(0)
    )

    // This package is present only in V4TestUniverse and this method ensures that the
    // package collection cache is cleared before starting the integration tests
    val _ = waitUntilCacheReloads()
  }

  override def afterAll(): Unit = {
    Requests.deleteRepository(Some("V4TestUniverse"))
    Requests.deleteRepository(Some("V5Testpackage"))
    val customMgrAppId = AppId(ItObjects.customManagerAppName)
    Requests.deleteMarathonApp(customMgrAppId)
    Requests.waitForMarathonAppToDisappear(customMgrAppId)
    Requests.deleteRepository(None, Some(universeConverterUri))
    val _ = Requests.addRepository("Universe", "https://universe.mesosphere.com/repo")
  }

  private[this] def waitUntilCacheReloads(): Assertion = {
    val packageName = "helloworld-invalid"
    eventually(timeout(2.minutes), interval(10.seconds)) {
      val response = CosmosClient.submit(
        CosmosRequests.packageDescribeV3(rpc.v1.model.DescribeRequest(packageName, None))
      )
      assertResult(Status.Ok)(response.status)
      val Right(actualResponse) = decode[rpc.v3.model.DescribeResponse](response.contentString)
      assert(actualResponse.`package`.name == packageName)
    }
  }
} 
Example 94
Source File: GenerationContext.scala    From scalingua   with Apache License 2.0 5 votes vote down vote up
package ru.makkarpov.scalingua.plugin

import java.io.{BufferedReader, DataInputStream, FileInputStream, InputStreamReader}
import java.nio.charset.StandardCharsets

import ru.makkarpov.scalingua.LanguageId
import sbt._

object GenerationContext {
  val HashMarker = "## Hash: ## "
  val ScalaHashPrefix = s"// $HashMarker"
}

case class GenerationContext(pkg: String, implicitCtx: Option[String], lang: LanguageId, hasTags: Boolean,
                             src: File, target: File, log: Logger)
{
  val srcHash = src.hashString

  def mergeContext(ctx: Option[String]): Option[String] = (implicitCtx, ctx) match {
    case (None,    None)    => None
    case (Some(x), None)    => Some(x)
    case (None,    Some(y)) => Some(y)
    case (Some(x), Some(y)) => Some(x + ":" + y)
  }

  def filePrefix = "/" + pkg.replace('.', '/') + (if (pkg.nonEmpty) "/" else "")

  def checkBinaryHash: Boolean = target.exists() && {
    val storedHash = {
      val is = new DataInputStream(new FileInputStream(target))
      try is.readUTF()
      catch {
        case t: Throwable =>
          t.printStackTrace()
          ""
      } finally is.close()
    }

    srcHash == storedHash
  }

  def checkTextHash: Boolean = target.exists() && {
    import GenerationContext.HashMarker

    val storedHash = {
      val rd = new BufferedReader(new InputStreamReader(new FileInputStream(target), StandardCharsets.UTF_8))
      try {
        val l = rd.readLine()
        if ((l ne null) && l.contains(HashMarker)) {
          val idx = l.indexOf(HashMarker)
          l.substring(idx + HashMarker.length)
        } else ""
      } catch {
        case t: Throwable =>
          t.printStackTrace()
          ""
      } finally rd.close()
    }

    srcHash == storedHash
  }
} 
Example 95
Source File: TaggedParser.scala    From scalingua   with Apache License 2.0 5 votes vote down vote up
package ru.makkarpov.scalingua.extract

import java.io.{File, FileInputStream, InputStreamReader}
import java.nio.charset.StandardCharsets

import com.grack.nanojson.{JsonObject, JsonParser, JsonParserException}
import ru.makkarpov.scalingua.pofile.Message.{Plural, Singular}
import ru.makkarpov.scalingua.pofile._
import ru.makkarpov.scalingua.Compat.CollectionConverters._

object TaggedParser {
  val TaggedFileName = "tagged-messages.json"

  case class TaggedMessage(tag: String, msg: String, plural: Option[String], comment: Seq[String]) {
    def toMessage: Message = {
      val header = MessageHeader(comment, Nil, MessageLocation(TaggedFileName) :: Nil, MessageFlag.empty, Some(tag))

      plural match {
        case None => Singular(header, None, MultipartString(msg), MultipartString.empty)
        case Some(p) => Plural(header, None, MultipartString(msg), MultipartString(p),
          Seq(MultipartString.empty, MultipartString.empty))
      }
    }
  }

  
  def parse(f: File): Seq[TaggedMessage] = {
    val ret = Vector.newBuilder[TaggedMessage]

    try {
      val obj = {
        val r = new InputStreamReader(new FileInputStream(f), StandardCharsets.UTF_8)
        try JsonParser.`object`().from(r) finally r.close()
      }

      for (k <- obj.keySet().asScala) obj.get(k) match {
        case v: JsonObject =>
          if (!v.has("message"))
            throw TaggedParseException(s"Object with key '$k' has no 'message' field")

          if (!v.isString("message"))
            throw TaggedParseException(s"Object with key '$k' has non-string 'message' field")

          val msg = v.getString("message")

          val plural =
            if (v.has("plural")) {
              if (!v.isString("plural"))
                throw TaggedParseException(s"Object with key '$k' has non-string 'plural' field")
              Some(v.getString("plural"))
            } else None

          val comments =
            if (v.has("comments")) {
              if (v.isString("comments")) v.getString("comments") :: Nil
              else v.getArray("comments").asScala.toList.map(_.asInstanceOf[String])
            } else Nil

          ret += TaggedMessage(k, msg, plural, comments)

        case v: String =>
          ret += TaggedMessage(k, v, None, Nil)
      }
    } catch {
      case e: JsonParserException =>
        throw new TaggedParseException(s"Tagged JSON syntax error at ${f.getCanonicalPath}:${e.getLinePosition}:${e.getCharPosition}", e)
    }

    ret.result()
  }
} 
Example 96
Source File: UTF8ResourceBundle.scala    From typed-schema   with Apache License 2.0 5 votes vote down vote up
package ru.tinkoff.tschema.utils
import java.io.InputStreamReader
import java.nio.charset.StandardCharsets
import java.util.{Locale, PropertyResourceBundle, ResourceBundle}

object UTF8ResourceBundle {
  private[this] object Control extends ResourceBundle.Control {
    override def newBundle(
        baseName: String,
        locale: Locale,
        format: String,
        loader: ClassLoader,
        reload: Boolean
    ): ResourceBundle = {
      val bundleName   = toBundleName(baseName, locale)
      val resourceName = toResourceName(bundleName, "properties")

      def reloadStream = for {
        url        <- Option(loader.getResource(resourceName))
        connection <- Option(url.openConnection())
      } yield {
        connection.setUseCaches(false)
        connection.getInputStream
      }

      val stream = if (reload) reloadStream else Option(loader.getResourceAsStream(resourceName))

      stream.map { stream =>
        try {
          new PropertyResourceBundle(new InputStreamReader(stream, StandardCharsets.UTF_8))
        } finally {
          stream.close()
        }
      }.orNull
    }
  }

  def apply(bundleName: String, locale: Locale): ResourceBundle =
    ResourceBundle.getBundle(bundleName, locale, Control)
} 
Example 97
Source File: StringParserTest.scala    From MoVE   with Mozilla Public License 2.0 5 votes vote down vote up
package de.thm.move.loader.parser

import java.io.{ByteArrayInputStream, InputStreamReader}
import java.nio.charset.StandardCharsets

import scala.util._
import de.thm.move.MoveSpec
import de.thm.move.loader.parser.PropertyParser._
import de.thm.move.loader.parser.ast._

class StringParserTest extends MoveSpec {
  val parser = new ModelicaParser()
  def parseString(str:String): String = {
    parser.stringLiteral(str)
  }

  "The parser for Modelica strings" should "parse simple strings" in {
    val s = "this is a super awesome test"
    true shouldBe true
  }

  "PropertyParser#transformEscapeChars" should
    "transform literal escape characters to ansi escape characters" in {
      val s = "this\\t\\tis a\\n test\\rmöb\\b"
      parser.transformEscapeChars(s) shouldBe "this\t\tis a\n test\rmöb\b"

      val s2 = "\\n\\n\\t"
      parser.transformEscapeChars(s2) shouldBe "\n\n\t"
  }

  it should "return the same string for strings without escape characters" in {
    val s = "this is awesome"
    parser.transformEscapeChars(s) shouldBe s
  }
} 
Example 98
Source File: ReadableByString.scala    From aloha   with MIT License 5 votes vote down vote up
package com.eharmony.aloha.io

import java.io.{InputStreamReader, ByteArrayOutputStream, Reader, InputStream}
import org.apache.commons.io.IOUtils


    final def fromReader(r: Reader): A = {
        try {
            val baos = new ByteArrayOutputStream  // Don't need to close.
            IOUtils.copy(r, baos, inputCharset)
            fromString(new String(baos.toByteArray))
        }
        finally {
            IOUtils.closeQuietly(r)
        }
    }
} 
Example 99
Source File: ContainerReadableByString.scala    From aloha   with MIT License 5 votes vote down vote up
package com.eharmony.aloha.io

import scala.language.higherKinds
import org.apache.commons.io.IOUtils
import java.lang.String
import java.io.{ByteArrayOutputStream, Reader, InputStreamReader, InputStream}


    def fromReader[A](r: Reader): C[A] = {
        try {
            val baos = new ByteArrayOutputStream  // Don't need to close.
            IOUtils.copy(r, baos, inputCharset)
            fromString[A](new String(baos.toByteArray))
        }
        finally {
            IOUtils.closeQuietly(r)
        }
    }
} 
Example 100
Source File: IOCommon.scala    From Swallow   with Apache License 2.0 5 votes vote down vote up
package com.intel.hibench.sparkbench.common

import java.io.{File, FileInputStream, IOException, InputStreamReader}
import java.util.Properties

import org.apache.hadoop.io.compress.CompressionCodec
import org.apache.hadoop.io.{NullWritable, Text}
import org.apache.hadoop.mapred.SequenceFileOutputFormat
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkContext, SparkException}

import scala.collection.JavaConversions._
import scala.collection.mutable.HashMap
import scala.reflect.ClassTag
import scala.reflect.runtime.universe.TypeTag

class IOCommon(val sc:SparkContext) {
   def load[T:ClassTag:TypeTag](filename:String, force_format:Option[String]=None) = {
     val input_format = force_format.getOrElse(
       IOCommon.getProperty("sparkbench.inputformat").getOrElse("Text"))

     input_format match {
       case "Text" =>
         sc.textFile(filename)

       case "Sequence" =>
         sc.sequenceFile[NullWritable, Text](filename).map(_._2.toString)

       case _ => throw new UnsupportedOperationException(s"Unknown inpout format: $input_format")
     }
   }

   def save(filename:String, data:RDD[_], prefix:String) = {
     val output_format = IOCommon.getProperty(prefix).getOrElse("Text")
     val output_format_codec =
       loadClassByName[CompressionCodec](IOCommon.getProperty(prefix + ".codec"))

     output_format match {
       case "Text" =>
         if (output_format_codec.isEmpty)  data.saveAsTextFile(filename)
         else data.saveAsTextFile(filename, output_format_codec.get)

       case "Sequence" =>
         val sequence_data = data.map(x => (NullWritable.get(), new Text(x.toString)))
         if (output_format_codec.isEmpty) {
           sequence_data.saveAsHadoopFile[SequenceFileOutputFormat[NullWritable, Text]](filename)
         } else {
           sequence_data.saveAsHadoopFile[SequenceFileOutputFormat[NullWritable, Text]](filename,
             output_format_codec.get)
         }

       case _ => throw new UnsupportedOperationException(s"Unknown output format: $output_format")
     }
   }

   def save(filename:String, data:RDD[_]):Unit = save(filename, data, "sparkbench.outputformat")

   private def loadClassByName[T](name:Option[String]) = {
     if (!name.isEmpty) Some(Class.forName(name.get)
       .newInstance.asInstanceOf[T].getClass) else None
   }

   private def callMethod[T, R](obj:T, method_name:String) =
     obj.getClass.getMethod(method_name).invoke(obj).asInstanceOf[R]
 }

object IOCommon {
   private val sparkbench_conf: HashMap[String, String] =
     getPropertiesFromFile(System.getenv("SPARKBENCH_PROPERTIES_FILES"))

   def getPropertiesFromFile(filenames: String): HashMap[String, String] = {
     val result = new HashMap[String, String]
     filenames.split(',').filter(_.stripMargin.length > 0).foreach { filename =>
       val file = new File(filename)
       require(file.exists, s"Properties file $file does not exist")
       require(file.isFile, s"Properties file $file is not a normal file")

       val inReader = new InputStreamReader(new FileInputStream(file), "UTF-8")
       try {
         val properties = new Properties()
         properties.load(inReader)
         result ++= properties.stringPropertyNames()
           .map(k => (k, properties(k).trim)).toMap
       } catch {
         case e: IOException =>
           val message = s"Failed when loading Sparkbench properties file $file"
           throw new SparkException(message, e)
       } finally {
         inReader.close()
       }
     }
     result.filter{case (key, value) => value.toLowerCase != "none"}
   }

   def getProperty(key:String):Option[String] = sparkbench_conf.get(key)

   def dumpProperties(): Unit = sparkbench_conf
       .foreach{case (key, value)=> println(s"$key\t\t$value")}
 } 
Example 101
Source File: tty_loop.scala    From libisabelle   with Apache License 2.0 5 votes vote down vote up
package isabelle


import java.io.{IOException, Writer, Reader, InputStreamReader, BufferedReader}


class TTY_Loop(writer: Writer, reader: Reader,
  writer_lock: AnyRef = new Object,
  interrupt: Option[() => Unit] = None)
{
  private val console_output = Future.thread[Unit]("console_output") {
    try {
      var result = new StringBuilder(100)
      var finished = false
      while (!finished) {
        var c = -1
        var done = false
        while (!done && (result.length == 0 || reader.ready)) {
          c = reader.read
          if (c >= 0) result.append(c.asInstanceOf[Char])
          else done = true
        }
        if (result.length > 0) {
          System.out.print(result.toString)
          System.out.flush()
          result.length = 0
        }
        else {
          reader.close()
          finished = true
        }
      }
    }
    catch { case e: IOException => case Exn.Interrupt() => }
  }

  private val console_input = Future.thread[Unit]("console_input") {
    val console_reader = new BufferedReader(new InputStreamReader(System.in))
    def body
    {
      try {
        var finished = false
        while (!finished) {
          console_reader.readLine() match {
            case null =>
              writer.close()
              finished = true
            case line =>
              writer_lock.synchronized {
                writer.write(line)
                writer.write("\n")
                writer.flush()
              }
          }
        }
      }
      catch { case e: IOException => case Exn.Interrupt() => }
    }
    interrupt match {
      case None => body
      case Some(int) => POSIX_Interrupt.handler { int() } { body }
    }
  }

  def join { console_output.join; console_input.join }

  def cancel { console_input.cancel }
} 
Example 102
Source File: tty_loop.scala    From libisabelle   with Apache License 2.0 5 votes vote down vote up
package isabelle


import java.io.{IOException, Writer, Reader, InputStreamReader, BufferedReader}


class TTY_Loop(writer: Writer, reader: Reader,
  writer_lock: AnyRef = new Object,
  interrupt: Option[() => Unit] = None)
{
  private val console_output = Future.thread[Unit]("console_output") {
    try {
      var result = new StringBuilder(100)
      var finished = false
      while (!finished) {
        var c = -1
        var done = false
        while (!done && (result.length == 0 || reader.ready)) {
          c = reader.read
          if (c >= 0) result.append(c.asInstanceOf[Char])
          else done = true
        }
        if (result.length > 0) {
          System.out.print(result.toString)
          System.out.flush()
          result.length = 0
        }
        else {
          reader.close()
          finished = true
        }
      }
    }
    catch { case e: IOException => case Exn.Interrupt() => }
  }

  private val console_input = Future.thread[Unit]("console_input") {
    val console_reader = new BufferedReader(new InputStreamReader(System.in))
    def body
    {
      try {
        var finished = false
        while (!finished) {
          console_reader.readLine() match {
            case null =>
              writer.close()
              finished = true
            case line =>
              writer_lock.synchronized {
                writer.write(line)
                writer.write("\n")
                writer.flush()
              }
          }
        }
      }
      catch { case e: IOException => case Exn.Interrupt() => }
    }
    interrupt match {
      case None => body
      case Some(int) => POSIX_Interrupt.handler { int() } { body }
    }
  }

  def join { console_output.join; console_input.join }

  def cancel { console_input.cancel }
} 
Example 103
Source File: FileSystem.scala    From ohara   with Apache License 2.0 4 votes vote down vote up
package oharastream.ohara.client.filesystem

import java.io.{BufferedReader, BufferedWriter, IOException, InputStreamReader, OutputStreamWriter}
import java.nio.charset.{Charset, StandardCharsets}

import oharastream.ohara.client.filesystem.ftp.FtpFileSystem
import oharastream.ohara.client.filesystem.hdfs.HdfsFileSystem
import oharastream.ohara.client.filesystem.smb.SmbFileSystem
import oharastream.ohara.common.exception.FileSystemException

trait FileSystem extends oharastream.ohara.kafka.connector.storage.FileSystem {
  
  def readLines(path: String, encode: String = "UTF-8"): Array[String] = {
    val reader = new BufferedReader(new InputStreamReader(open(path), Charset.forName(encode)))
    try Iterator.continually(reader.readLine()).takeWhile(_ != null).toArray
    finally reader.close()
  }

  def wrap[T](f: () => T): T =
    try {
      f()
    } catch {
      case e: IOException           => throw new FileSystemException(e.getMessage, e)
      case e: IllegalStateException => throw new FileSystemException(e.getMessage, e)
    }
}

object FileSystem {
  def hdfsBuilder: HdfsFileSystem.Builder = HdfsFileSystem.builder
  def ftpBuilder: FtpFileSystem.Builder   = FtpFileSystem.builder
  def smbBuilder: SmbFileSystem.Builder   = SmbFileSystem.builder
}