java.io.Reader Scala Examples

The following examples show how to use java.io.Reader. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: LineReaderInputSource.scala    From mimir   with Apache License 2.0 5 votes vote down vote up
package mimir.util

import java.io.{Reader,File}
import org.jline.terminal.{Terminal,TerminalBuilder}
import org.jline.reader.{LineReader,LineReaderBuilder,EndOfFileException,UserInterruptException}
import com.typesafe.scalalogging.LazyLogging

class LineReaderInputSource(
  terminal: Terminal, 
  historyFile: String = LineReaderInputSource.defaultHistoryFile,
  prompt: String = "mimir> "
)
  extends Reader
  with LazyLogging
{
  val input: LineReader = 
    LineReaderBuilder.
      builder().
      terminal(terminal).
      variable(LineReader.HISTORY_FILE, historyFile).
      build()
  var pos: Int = 1;
  var curr: String = "";

  def close() = input.getTerminal.close
  def read(cbuf: Array[Char], offset: Int, len: Int): Int =
  {
    try { 
      var i:Int = 0;
      logger.debug(s"being asked for $len characters")
      while(i < len){
        while(pos >= curr.length){
          if(i > 0){ logger.debug(s"returning $i characters"); return i; }
          curr = input.readLine(prompt)
          if(curr == null){ logger.debug("Reached end"); return -1; }
          logger.debug(s"Read: '$curr'")
          pos = 0;
        }
        cbuf(i+offset) = curr.charAt(pos);
        i += 1; pos += 1;
      }
      logger.debug(s"Full!  Returning $i characters")
      return i;
    } catch {
      case _ : EndOfFileException => return -1;
      case _ : UserInterruptException => System.exit(0); return -1;
    }
  }


}

object LineReaderInputSource
{
  val defaultHistoryFile = System.getProperty("user.home") + File.separator + ".mimir_history"
} 
Example 2
Source File: Impl.scala    From jsdependencies   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package org.scalajs.jsdependencies.core.json

import org.json.simple.JSONValue

import java.io.{Writer, Reader}
import java.util.function.{BiConsumer, Consumer}

private[json] object Impl {

  type Repr = Object

  def fromString(x: String): Repr = x
  def fromNumber(x: Number): Repr = x
  def fromBoolean(x: Boolean): Repr = java.lang.Boolean.valueOf(x)

  def fromList(x: List[Repr]): Repr = {
    val result = new java.util.LinkedList[Repr]
    x.foreach(result.add(_))
    result
  }

  def fromMap(x: Map[String, Repr]): Repr = {
    val result = new java.util.HashMap[String, Repr]
    for ((key, value) <- x)
      result.put(key, value)
    result
  }

  def toString(x: Repr): String = x.asInstanceOf[String]
  def toNumber(x: Repr): Number = x.asInstanceOf[Number]
  def toBoolean(x: Repr): Boolean =
    x.asInstanceOf[java.lang.Boolean].booleanValue()

  def toList(x: Repr): List[Repr] = {
    val builder = List.newBuilder[Repr]
    x.asInstanceOf[java.util.List[Repr]].forEach(new Consumer[Repr] {
      def accept(elem: Repr): Unit =
        builder += elem
    })
    builder.result()
  }

  def toMap(x: Repr): Map[String, Repr] = {
    val builder = Map.newBuilder[String, Repr]
    x.asInstanceOf[java.util.Map[String, Repr]].forEach(new BiConsumer[String, Repr] {
      def accept(key: String, value: Repr): Unit =
        builder += key -> value
    })
    builder.result()
  }

  def serialize(x: Repr): String =
    JSONValue.toJSONString(x)

  def serialize(x: Repr, writer: Writer): Unit =
    JSONValue.writeJSONString(x, writer)

  def deserialize(str: String): Repr = JSONValue.parseWithException(str)

  def deserialize(reader: Reader): Repr = JSONValue.parseWithException(reader)

} 
Example 3
Source File: tty_loop.scala    From libisabelle   with Apache License 2.0 5 votes vote down vote up
package isabelle


import java.io.{IOException, Writer, Reader, InputStreamReader, BufferedReader}


class TTY_Loop(writer: Writer, reader: Reader,
  writer_lock: AnyRef = new Object,
  interrupt: Option[() => Unit] = None)
{
  private val console_output = Future.thread[Unit]("console_output") {
    try {
      var result = new StringBuilder(100)
      var finished = false
      while (!finished) {
        var c = -1
        var done = false
        while (!done && (result.length == 0 || reader.ready)) {
          c = reader.read
          if (c >= 0) result.append(c.asInstanceOf[Char])
          else done = true
        }
        if (result.length > 0) {
          System.out.print(result.toString)
          System.out.flush()
          result.length = 0
        }
        else {
          reader.close()
          finished = true
        }
      }
    }
    catch { case e: IOException => case Exn.Interrupt() => }
  }

  private val console_input = Future.thread[Unit]("console_input") {
    val console_reader = new BufferedReader(new InputStreamReader(System.in))
    def body
    {
      try {
        var finished = false
        while (!finished) {
          console_reader.readLine() match {
            case null =>
              writer.close()
              finished = true
            case line =>
              writer_lock.synchronized {
                writer.write(line)
                writer.write("\n")
                writer.flush()
              }
          }
        }
      }
      catch { case e: IOException => case Exn.Interrupt() => }
    }
    interrupt match {
      case None => body
      case Some(int) => POSIX_Interrupt.handler { int() } { body }
    }
  }

  def join { console_output.join; console_input.join }

  def cancel { console_input.cancel }
} 
Example 4
Source File: tty_loop.scala    From libisabelle   with Apache License 2.0 5 votes vote down vote up
package isabelle


import java.io.{IOException, Writer, Reader, InputStreamReader, BufferedReader}


class TTY_Loop(writer: Writer, reader: Reader,
  writer_lock: AnyRef = new Object,
  interrupt: Option[() => Unit] = None)
{
  private val console_output = Future.thread[Unit]("console_output") {
    try {
      var result = new StringBuilder(100)
      var finished = false
      while (!finished) {
        var c = -1
        var done = false
        while (!done && (result.length == 0 || reader.ready)) {
          c = reader.read
          if (c >= 0) result.append(c.asInstanceOf[Char])
          else done = true
        }
        if (result.length > 0) {
          System.out.print(result.toString)
          System.out.flush()
          result.length = 0
        }
        else {
          reader.close()
          finished = true
        }
      }
    }
    catch { case e: IOException => case Exn.Interrupt() => }
  }

  private val console_input = Future.thread[Unit]("console_input") {
    val console_reader = new BufferedReader(new InputStreamReader(System.in))
    def body
    {
      try {
        var finished = false
        while (!finished) {
          console_reader.readLine() match {
            case null =>
              writer.close()
              finished = true
            case line =>
              writer_lock.synchronized {
                writer.write(line)
                writer.write("\n")
                writer.flush()
              }
          }
        }
      }
      catch { case e: IOException => case Exn.Interrupt() => }
    }
    interrupt match {
      case None => body
      case Some(int) => POSIX_Interrupt.handler { int() } { body }
    }
  }

  def join { console_output.join; console_input.join }

  def cancel { console_input.cancel }
} 
Example 5
Source File: ContainerReadableByString.scala    From aloha   with MIT License 5 votes vote down vote up
package com.eharmony.aloha.io

import scala.language.higherKinds
import org.apache.commons.io.IOUtils
import java.lang.String
import java.io.{ByteArrayOutputStream, Reader, InputStreamReader, InputStream}


    def fromReader[A](r: Reader): C[A] = {
        try {
            val baos = new ByteArrayOutputStream  // Don't need to close.
            IOUtils.copy(r, baos, inputCharset)
            fromString[A](new String(baos.toByteArray))
        }
        finally {
            IOUtils.closeQuietly(r)
        }
    }
} 
Example 6
Source File: ReadableByString.scala    From aloha   with MIT License 5 votes vote down vote up
package com.eharmony.aloha.io

import java.io.{InputStreamReader, ByteArrayOutputStream, Reader, InputStream}
import org.apache.commons.io.IOUtils


    final def fromReader(r: Reader): A = {
        try {
            val baos = new ByteArrayOutputStream  // Don't need to close.
            IOUtils.copy(r, baos, inputCharset)
            fromString(new String(baos.toByteArray))
        }
        finally {
            IOUtils.closeQuietly(r)
        }
    }
} 
Example 7
Source File: ContainerReadable.scala    From aloha   with MIT License 5 votes vote down vote up
package com.eharmony.aloha.io

import scala.language.higherKinds

import java.io.{File, InputStream, Reader}
import java.net.URL
import org.apache.commons.{vfs => vfs1, vfs2}

trait ContainerReadable[C[_]] {
    def fromString[A](s: String): C[A]
    def fromFile[A](f: File): C[A]
    def fromInputStream[A](is: InputStream): C[A]
    def fromUrl[A](u: URL): C[A]
    def fromReader[A](r: Reader): C[A]
    def fromVfs1[A](foVfs1: vfs1.FileObject): C[A]
    def fromVfs2[A](foVfs2: vfs2.FileObject): C[A]
    def fromResource[A](s: String): C[A]
    def fromClasspathResource[A](s: String): C[A]
} 
Example 8
Source File: DummySourceAcl.scala    From kafka-security-manager   with MIT License 5 votes vote down vote up
package com.github.simplesteph.ksm.source

import java.io.{Reader, StringReader}

import com.github.simplesteph.ksm.TestFixtures._
import com.github.simplesteph.ksm.parser.CsvAclParser
import com.typesafe.config.Config

class DummySourceAcl extends SourceAcl {

  var noneNext = false
  var errorNext = false
  val csvAclParser: CsvAclParser = new CsvAclParser()

  // initial state
  val sar1 = Set(
    res1 -> acl1,
    res1 -> acl2,
    res2 -> acl3
  )

  // one deletion, one add
  val sar2 = Set(
    res1 -> acl1,
    res2 -> acl3,
    res3 -> acl2
  )

  // all gone
  val sar3 = Set()

  // all state changes
  val sars = List(sar1, sar2, sar3)
  // a states iterator, shifting its position changes current state
  private val sarsIterator = sars.iterator

  override def refresh(): Option[Reader] = {
    if (noneNext) {
      noneNext = false
      None
    } else if (errorNext) {
      errorNext = false
      throw new RuntimeException("triggered error")
    } else {
      Some(
        new StringReader(csvAclParser.formatAcls(sarsIterator.next().toList))
      )
    }
  }

  def setNoneNext(): Unit = {
    noneNext = true
  }

  def setErrorNext(): Unit = {
    errorNext = true
  }

  override def close(): Unit = ()

  
  override def configure(config: Config): Unit = ()
} 
Example 9
Source File: GitHubSourceAcl.scala    From kafka-security-manager   with MIT License 5 votes vote down vote up
package com.github.simplesteph.ksm.source

import java.io.{Reader, StringReader}
import java.nio.charset.Charset
import java.util.Base64

import com.fasterxml.jackson.databind.ObjectMapper
import com.typesafe.config.Config
import org.slf4j.LoggerFactory
import skinny.http.{HTTP, HTTPException, Request, Response}

import scala.util.Try

class GitHubSourceAcl extends SourceAcl {

  private val log = LoggerFactory.getLogger(classOf[GitHubSourceAcl])

  override val CONFIG_PREFIX: String = "github"
  final val USER_CONFIG = "user"
  final val REPO_CONFIG = "repo"
  final val FILEPATH_CONFIG = "filepath"
  final val BRANCH_CONFIG = "branch"
  final val HOSTNAME_CONFIG = "hostname"
  final val AUTH_BASIC_CONFIG = "auth.basic"
  final val AUTH_TOKEN_CONFIG = "auth.token"

  var lastModified: Option[String] = None
  val objectMapper = new ObjectMapper()
  var user: String = _
  var repo: String = _
  var filepath: String = _
  var branch: String = _
  var hostname: String = _
  var basicOpt: Option[String] = _
  var tokenOpt: Option[String] = _

  
  override def close(): Unit = {
    // HTTP
  }
} 
Example 10
Source File: BitbucketServerSourceAcl.scala    From kafka-security-manager   with MIT License 5 votes vote down vote up
package com.github.simplesteph.ksm.source

import java.io.{Reader, StringReader}
import java.nio.charset.Charset
import java.util.Base64

import com.fasterxml.jackson.databind.ObjectMapper
import com.typesafe.config.Config
import org.slf4j.LoggerFactory
import skinny.http.{HTTP, HTTPException, Request, Response}

class BitbucketServerSourceAcl extends SourceAcl {

  private val log = LoggerFactory.getLogger(classOf[BitbucketServerSourceAcl])

  override val CONFIG_PREFIX: String = "bitbucket-server"

  final val HOSTNAME_CONFIG = "hostname"
  final val PORT_CONFIG = "port"
  final val PROTOCOL_CONFIG = "protocol"
  final val PROJECT_CONFIG = "project"
  final val REPO_CONFIG = "repo"
  final val FILEPATH_CONFIG = "filepath"
  final val AUTH_USERNAME_CONFIG = "auth.username"
  final val AUTH_PASSWORD_CONFIG = "auth.password"
  final val BRANCH_CONFIG = "branch"

  var lastCommit: Option[String] = None
  val objectMapper = new ObjectMapper()
  var http: HTTP = HTTP

  var hostname: String = _
  var port: String = _
  var protocol: String = _
  var project: String = _
  var repo: String = _
  var filePath: String = _
  var username: String = _
  var password: String = _
  var branch: Option[String] = _

  
  override def close(): Unit = {
    // HTTP
  }
} 
Example 11
Source File: GitLabSourceAcl.scala    From kafka-security-manager   with MIT License 5 votes vote down vote up
package com.github.simplesteph.ksm.source

import java.io.{Reader, StringReader}
import java.nio.charset.Charset
import java.util.Base64

import com.fasterxml.jackson.databind.ObjectMapper
import com.typesafe.config.Config
import org.slf4j.LoggerFactory
import skinny.http.{HTTP, HTTPException, Request, Response}

class GitLabSourceAcl extends SourceAcl {

  private val log = LoggerFactory.getLogger(classOf[GitLabSourceAcl])

  override val CONFIG_PREFIX: String = "gitlab"
  final val REPOID_CONFIG = "repoid"
  final val FILEPATH_CONFIG = "filepath"
  final val BRANCH_CONFIG = "branch"
  final val HOSTNAME_CONFIG = "hostname"
  final val ACCESSTOKEN_CONFIG = "accesstoken"

  var lastModified: Option[String] = None
  val objectMapper = new ObjectMapper()
  var repoid: String = _
  var filepath: String = _
  var branch: String = _
  var hostname: String = _
  var accessToken: String = _

  
  override def close(): Unit = {
    // HTTP
  }
} 
Example 12
Source File: LuceneDocToSparkRowpec.scala    From spark-lucenerdd   with Apache License 2.0 5 votes vote down vote up
package org.zouzias.spark.lucenerdd

import java.io.{Reader, StringReader}

import org.apache.lucene.document.{Document, DoublePoint, Field, FloatPoint, IntPoint, LongPoint, StoredField, TextField}
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
import org.zouzias.spark.lucenerdd.models.SparkScoreDoc
import org.zouzias.spark.lucenerdd.models.SparkScoreDoc.{DocIdField, ScoreField, ShardField}

import scala.collection.JavaConverters._

class LuceneDocToSparkRowpec extends FlatSpec
  with Matchers
  with BeforeAndAfterEach {

  val (score: Float, docId: Int, shardIndex: Int) = (1.0f, 1, 2)
  val float: Float = 20.001f
  val double: Double = 10.1000000001D

  def generate_doc(): Document = {
    val doc = new Document()

    // Add long field
    doc.add(new LongPoint("longField", 10))
    doc.add(new StoredField("longField", 10))

    doc.add(new FloatPoint("floatField", float))
    doc.add(new StoredField("floatField", float))

    doc.add(new IntPoint("intField", 9))
    doc.add(new StoredField("intField", 9))

    doc.add(new DoublePoint("doubleField", double))
    doc.add(new StoredField("doubleField", double))

    doc.add(new TextField("textField", "hello world", Field.Store.NO))
    doc.add(new StoredField("textField", "hello world"))

    doc
  }

  private val doc: Document = generate_doc()

  val sparkScoreDoc = SparkScoreDoc(score, docId, shardIndex, doc)


  "SparkScoreDoc.toRow" should "return correct score" in {
    val row = sparkScoreDoc.toRow()
    row.getFloat(row.fieldIndex(ScoreField)) should equal(score)
  }

  "SparkScoreDoc.toRow" should "return correct docId" in {
    val row = sparkScoreDoc.toRow()
    row.getInt(row.fieldIndex(DocIdField)) should equal(docId)
  }

  "SparkScoreDoc.toRow" should "return correct shard number" in {
    val row = sparkScoreDoc.toRow()
    row.getInt(row.fieldIndex(ShardField)) should equal(shardIndex)
  }

  "SparkScoreDoc.toRow" should "return correct number of fields" in {
    val row = sparkScoreDoc.toRow()
    row.getFields().asScala.count(_.fieldType().stored()) should equal(8)
  }

  "SparkScoreDoc.toRow" should "set correctly DoublePoint" in {
    val row = sparkScoreDoc.toRow()
    row.getDouble(row.fieldIndex("doubleField")) should equal(double)
  }

  "SparkScoreDoc.toRow" should "set correctly FloatPoint" in {
    val row = sparkScoreDoc.toRow()
    row.getFloat(row.fieldIndex("floatField")) should equal(float)
  }
} 
Example 13
Source File: MimirCommand.scala    From mimir   with Apache License 2.0 5 votes vote down vote up
package mimir.parser

import fastparse._, NoWhitespace._
import fastparse.Parsed
import sparsity.parser.StreamParser
import java.io.Reader

sealed abstract class MimirCommand

case class SlashCommand(
  body: String
) extends MimirCommand

case class SQLCommand(
  body: MimirStatement
) extends MimirCommand

object MimirCommand
{
  def apply(input: Reader): StreamParser[MimirCommand] = 
    new StreamParser[MimirCommand](
      parse(_:Iterator[String], command(_), verboseFailures = true), 
      input
    )
  def apply(input: String): Parsed[MimirCommand] = 
    parse(input, command(_))
  
  def command[_:P]: P[MimirCommand] = P(

      slashCommand 
    | ( MimirSQL.statement.map { SQLCommand(_) } ~ ";" )
  )

  def slashCommand[_:P] = P(
    "/" ~/
    CharsWhile( 
      c => (c != '\n') && (c != '\r') 
    ).!.map { SlashCommand(_) } ~/
    CharsWhile( 
      c => (c == '\n') || (c == '\r') 
    ).?
  )
} 
Example 14
Source File: ReaderInputStream.scala    From better-files   with MIT License 5 votes vote down vote up
package better.files

import java.io.{InputStream, Reader}
import java.nio.{ByteBuffer, CharBuffer}
import java.nio.charset.{Charset, CharsetEncoder, CoderResult, CodingErrorAction}

import scala.annotation.tailrec


  private[this] val encoderOut = ByteBuffer.allocate(bufferSize >> 4).flip().asInstanceOf[ByteBuffer]

  private[this] var lastCoderResult = CoderResult.UNDERFLOW
  private[this] var endOfInput      = false

  private[this] def fillBuffer() = {
    assert(!endOfInput)
    if (lastCoderResult.isUnderflow) {
      val position = encoderIn.compact().position()
      //  We don't use Reader#read(CharBuffer) here because it is more efficient to write directly to the underlying char array
      // since the default implementation copies data to a temporary char array anyway
      reader.read(encoderIn.array, position, encoderIn.remaining) match {
        case EOF => endOfInput = true
        case c   => encoderIn.position(position + c)
      }
      encoderIn.flip()
    }
    lastCoderResult = encoder.encode(encoderIn, encoderOut.compact(), endOfInput)
    encoderOut.flip()
  }

  override def read(b: Array[Byte], off: Int, len: Int) = {
    if (len < 0 || off < 0 || (off + len) > b.length)
      throw new IndexOutOfBoundsException("Array Size=" + b.length + ", offset=" + off + ", length=" + len)
    if (len == 0) {
      0 // Always return 0 if len == 0
    } else {
      var read = 0
      @tailrec def loop(off: Int, len: Int): Unit =
        if (len > 0) {
          if (encoderOut.hasRemaining) {
            val c = encoderOut.remaining min len
            encoderOut.get(b, off, c)
            read += c
            loop(off + c, len - c)
          } else if (!endOfInput) {
            fillBuffer()
            loop(off, len)
          }
        }
      loop(off, len)
      if (read == 0 && endOfInput) EOF else read
    }
  }

  @tailrec final override def read() = {
    if (encoderOut.hasRemaining) {
      encoderOut.get & 0xff
    } else if (endOfInput) {
      EOF
    } else {
      fillBuffer()
      read()
    }
  }

  override def close() = reader.close()
} 
Example 15
Source File: TlcConfigLexer.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.io.tlc.config

import java.io.Reader

import scala.util.matching.Regex
import scala.util.parsing.combinator.RegexParsers


  def apply(reader: Reader): List[TlcConfigToken] = parseAll(program, reader) match {
    case Success(result, _) => result
    case NoSuccess(msg, next) => throw new TlcConfigParseError(msg, next.pos)
  }

  def program: Parser[List[TlcConfigToken]] = skip ~> rep(token <~ skip) <~ eof

  def eof: Parser[String] = "\\z".r | failure("unexpected character")

  def token: Parser[TlcConfigToken] =
    positioned(
      constant | init | next | specification | invariant | property | constraint | actionConstraint |
        symmetry | leftArrow | eq | identifier
    ) ///

  // it is important that linefeed is not a whiteSpace, as otherwise singleComment consumes the whole input!
  def skip: Parser[Unit] = rep(whiteSpace | singleComment | multiComment | linefeed) ^^^ Unit

  def linefeed: Parser[Unit] = "\n" ^^^ Unit

  def singleComment: Parser[Unit] = "\\*" ~ rep(not("\n") ~ ".".r) ^^^ Unit

  def multiComment: Parser[Unit] = "(*" ~ rep(not("*)") ~ "(?s).".r) ~ "*)" ^^^ Unit

  private def identifier: Parser[IDENT] = {
    "[a-zA-Z_][a-zA-Z0-9_]*".r ^^ { name => IDENT(name) }
  }

  private def constant: Parser[CONST] = {
    "CONSTANT(S|)".r  ^^ (_ => CONST())
  }

  private def init: Parser[INIT] = {
    "INIT"  ^^ (_ => INIT())
  }

  private def next: Parser[NEXT] = {
    "NEXT"  ^^ (_ => NEXT())
  }

  private def specification: Parser[SPECIFICATION] = {
    "SPECIFICATION" ^^ (_ => SPECIFICATION())
  }

  private def invariant: Parser[INVARIANT] = {
    "INVARIANT(S|)".r ^^ (_ => INVARIANT())
  }

  private def property: Parser[PROPERTY] = {
    "PROPERT(Y|IES)".r ^^ (_ => PROPERTY())
  }

  private def constraint: Parser[CONSTRAINT] = {
    "CONSTRAINT(S|)".r ^^ (_ => CONSTRAINT())
  }

  private def actionConstraint: Parser[ACTION_CONSTRAINT] = {
    "ACTION_CONSTRAINT(S|)".r ^^ (_ => ACTION_CONSTRAINT())
  }

  private def symmetry: Parser[SYMMETRY] = {
    "SYMMETRY".r ^^ (_ => SYMMETRY())
  }

  private def leftArrow: Parser[LEFT_ARROW] = {
    "<-" ^^ (_ => LEFT_ARROW())
  }

  private def eq: Parser[EQ] = {
    "=" ^^ (_ => EQ())
  }
} 
Example 16
Source File: package.scala    From tethys   with Apache License 2.0 5 votes vote down vote up
package tethys

import java.io.{Reader, Writer}

import com.fasterxml.jackson.core.JsonFactory
import tethys.readers.{FieldName, ReaderError}
import tethys.readers.tokens.{TokenIterator, TokenIteratorProducer}
import tethys.writers.tokens.{TokenWriter, TokenWriterProducer}

package object jackson {
  lazy val defaultJsonFactory: JsonFactory = {
    val f = new JsonFactory()
    f.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, false)
    f
  }


  implicit def jacksonTokenWriterProducer(implicit jsonFactory: JsonFactory = defaultJsonFactory): TokenWriterProducer = new TokenWriterProducer {
    override def forWriter(writer: Writer): TokenWriter = {
      new JacksonTokenWriter(jsonFactory.createGenerator(writer))
    }
  }

  implicit def jacksonTokenIteratorProducer(implicit jsonFactory: JsonFactory = defaultJsonFactory): TokenIteratorProducer = new TokenIteratorProducer {
    override def fromReader(reader: Reader): Either[ReaderError, TokenIterator] = {
      ReaderError.catchNonFatal(JacksonTokenIterator.fromFreshParser(jsonFactory.createParser(reader)))(FieldName())
    }
  }
} 
Example 17
Source File: package.scala    From tethys   with Apache License 2.0 5 votes vote down vote up
import java.io.{Reader, StringReader, StringWriter, Writer}

import tethys.readers.{FieldName, ReaderError}
import tethys.readers.tokens.{TokenIterator, TokenIteratorProducer}
import tethys.writers.tokens.{TokenWriter, TokenWriterProducer}

import scala.Specializable.Group

package object tethys {

  final val specializations = new Group((Short, Int, Long, Float, Double, Boolean))

  implicit class JsonWriterOps[A](val a: A) extends AnyVal {
    def asJson(implicit jsonWriter: JsonWriter[A], tokenWriterProducer: TokenWriterProducer): String = {
      val stringWriter = new StringWriter()
      writeJson(tokenWriterProducer.forWriter(stringWriter))
      stringWriter.toString
    }

    def asJsonWith(jsonWriter: JsonWriter[A])(implicit tokenWriterProducer: TokenWriterProducer): String = {
      asJson(jsonWriter, tokenWriterProducer)
    }

    def writeJson(tokenWriter: TokenWriter)(implicit jsonWriter: JsonWriter[A]): Unit = {
      try jsonWriter.write(a, tokenWriter) finally {
        tokenWriter.flush()
      }
    }
  }

  implicit class WriterOps(val w: Writer) extends AnyVal {
    def toTokenWriter(implicit tokenWriterProducer: TokenWriterProducer): TokenWriter = tokenWriterProducer.forWriter(w)
  }

  implicit class StringReaderOps(val json: String) extends AnyVal {
    def jsonAs[A](implicit jsonReader: JsonReader[A], producer: TokenIteratorProducer): Either[ReaderError, A] = {
      new StringReader(json).readJson[A]
    }

    def toTokenIterator(implicit producer: TokenIteratorProducer): Either[ReaderError, TokenIterator] = {
      new StringReader(json).toTokenIterator
    }
  }

  implicit class ReaderReaderOps(val reader: Reader) extends AnyVal {
    def readJson[A](implicit jsonReader: JsonReader[A], producer: TokenIteratorProducer): Either[ReaderError, A] = {
      implicit val root: FieldName = FieldName()
      producer.fromReader(reader).right.flatMap(_.readJson[A])
    }

    def readJsonWith[A](jsonReader: JsonReader[A])(implicit producer: TokenIteratorProducer): Either[ReaderError, A] = {
      readJson[A](jsonReader, producer)
    }

    def toTokenIterator(implicit producer: TokenIteratorProducer): Either[ReaderError, TokenIterator] = {
      producer.fromReader(reader)
    }
  }

  implicit class TokenIteratorOps(val tokenIterator: TokenIterator) extends AnyVal {
    def readJson[A](implicit jsonReader: JsonReader[A]): Either[ReaderError, A] = {
      implicit val fieldName: FieldName = FieldName()
      ReaderError.catchNonFatal(jsonReader.read(tokenIterator))
    }
  }
} 
Example 18
Source File: CsvReader.scala    From kantan.csv   with Apache License 2.0 5 votes vote down vote up
package kantan.csv

import java.io.Reader
import kantan.codecs.resource.ResourceIterator
import kantan.csv.engine.ReaderEngine


  def apply[A: HeaderDecoder](reader: Reader, conf: CsvConfiguration)(
    implicit e: ReaderEngine
  ): CsvReader[ReadResult[A]] = {
    val data: CsvReader[ReadResult[Seq[String]]] = e.readerFor(reader, conf)

    val decoder =
      if(conf.hasHeader && data.hasNext)
        data.next.flatMap(header => HeaderDecoder[A].fromHeader(header.map(_.trim)))
      else Right(HeaderDecoder[A].noHeader)

    decoder
      .map(d => data.map(_.flatMap(d.decode)))
      .left
      .map(error => ResourceIterator(ReadResult.failure(error)))
      .merge
  }
} 
Example 19
Source File: AnnotationIndexer.scala    From ike   with Apache License 2.0 5 votes vote down vote up
package org.allenai.ike.index

import org.allenai.blacklab.index.complex.ComplexFieldProperty
import org.allenai.blacklab.index.complex.ComplexFieldProperty.SensitivitySetting
import org.allenai.blacklab.index.{ DocIndexerXmlHandlers, Indexer }
import org.xml.sax.Attributes

import java.io.Reader

class AnnotationIndexer(indexer: Indexer, fileName: String, reader: Reader)
    extends DocIndexerXmlHandlers(indexer, fileName, reader) {
  val mainProp = getMainProperty
  val punctProp = getPropPunct
  val posProp = addProperty("pos", SensitivitySetting.ONLY_INSENSITIVE)
  val chunkProp = addProperty("chunk", SensitivitySetting.ONLY_INSENSITIVE)
  val lemmaProp = addProperty("lemma", SensitivitySetting.ONLY_SENSITIVE)
  addHandler("/document", new DocumentElementHandler())
  addHandler("word", new WordHandlerBase() {
    def addAttribute(name: String, attrs: Attributes, prop: ComplexFieldProperty): Unit = {
      if (attrs.getValue(name) != null) prop.addValue(attrs.getValue(name))
    }
    def addPos(attrs: Attributes): Unit = addAttribute("pos", attrs, posProp)
    def addChunk(attrs: Attributes): Unit = addAttribute("chunk", attrs, chunkProp)
    def addLemma(attrs: Attributes): Unit = addAttribute("lemma", attrs, lemmaProp)
    def addAttrs(attrs: Attributes): Unit = {
      addPos(attrs)
      addChunk(attrs)
      addLemma(attrs)
    }
    override def startElement(uri: String, ln: String, qName: String, attrs: Attributes): Unit = {
      super.startElement(uri, ln, qName, attrs)
      addAttrs(attrs)
      punctProp.addValue(consumeCharacterContent)
    }
    override def endElement(uri: String, localName: String, qName: String): Unit = {
      super.endElement(uri, localName, qName)
      mainProp.addValue(consumeCharacterContent)
    }
  })
  addHandler("sentence", new InlineTagHandler)
} 
Example 20
Source File: RecordSplitter.scala    From PureCSV   with Apache License 2.0 5 votes vote down vote up
package purecsv.unsafe

import java.io.Reader


object RecordSplitter {
  val defaultFieldSeparator = ','
  val defaultFieldSeparatorStr = defaultFieldSeparator.toString
  val defaultQuoteChar = '"'
  val defaultQuoteStr = defaultQuoteChar.toString
}

trait RecordSplitter[R] {

  
  def getRecordsSkipHeader(r: R,
                fieldSep:  Char = RecordSplitter.defaultFieldSeparator,
                quoteChar: Char = RecordSplitter.defaultQuoteChar): Iterator[Array[String]] = {
    getRecords(r, fieldSep, quoteChar,1)
  }
} 
Example 21
Source File: LookupTable.scala    From jigg   with Apache License 2.0 5 votes vote down vote up
package jigg.util



import java.io.Reader

import breeze.linalg.DenseMatrix
import org.json4s.{DefaultFormats, _}
import org.json4s.jackson.JsonMethods
import org.json4s.JsonAST.JValue

class LookupTable(rawTable: JValue) {

  implicit private val formats = DefaultFormats
  private val tables = rawTable.extract[Map[String, Map[String, Map[String, String]]]]

  private val key2id = tables("_lookup")("_key2id")
  private val id2key = tables("_lookup")("_id2key")

  // For raw text
  def encodeCharacter(str: String): DenseMatrix[Float] = {
    val strArray = str.map{x =>
      // Note: For skipping unknown character, this encoder returns dummy id.
      key2id.getOrElse(x.toString, "3").toFloat
    }.toArray
    new DenseMatrix[Float](1, str.length, strArray)
  }

  // For list of words
  def encodeWords(words: Array[String]): DenseMatrix[Float] = {
    val wordsArray = words.map{x =>
      // Note: For skipping unknown words, this encoder returns dummy id.
      key2id.getOrElse(x.toString, "3").toFloat
    }
    new DenseMatrix[Float](1, words.length, wordsArray)
  }

  def decode(data: DenseMatrix[Float]): Array[String] =
    data.map{x => id2key.getOrElse(x.toInt.toString, "NONE")}.toArray

  def getId(key: String): Int = key2id.getOrElse(key, "0").toInt
  def getId(key: Char): Int = getId(key.toString)

  def getKey(id: Int): String = id2key.getOrElse(id.toString, "UNKNOWN")
}


object LookupTable {

  // Load from a path on the file system
  def fromFile(path: String) = mkTable(IOUtil.openIn(path))

  // Load from class loader
  def fromResource(path: String) = mkTable(IOUtil.openResourceAsReader(path))

  private def mkTable(input: Reader) = {
    val j = try { JsonMethods.parse(input) } finally { input.close }
    new LookupTable(j)
  }
} 
Example 22
Source File: ArtifactSourceBackedMustacheResolver.scala    From rug   with GNU General Public License v3.0 5 votes vote down vote up
package com.atomist.project.common.template

import java.io.{Reader, StringReader}

import com.atomist.source.ArtifactSource
import com.github.mustachejava.resolver.DefaultResolver
import com.typesafe.scalalogging.LazyLogging

class ArtifactSourceBackedMustacheResolver(artifactSource: ArtifactSource)
  extends DefaultResolver
    with LazyLogging{

  override def getReader(resourceName: String): Reader = {
    logger.debug(s"Need to return Reader for $resourceName")
    artifactSource.findFile(resourceName) match {
      case Some(f) => new StringReader(f.content)
      case _ => new StringReader(resourceName)
    }
  }
}