java.io.Writer Scala Examples

The following examples show how to use java.io.Writer. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: UnivocityGenerator.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources.csv

import java.io.Writer

import com.univocity.parsers.csv.CsvWriter

import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.types._

private[csv] class UnivocityGenerator(
    schema: StructType,
    writer: Writer,
    options: CSVOptions) {
  private val writerSettings = options.asWriterSettings
  writerSettings.setHeaders(schema.fieldNames: _*)
  private val gen = new CsvWriter(writer, writerSettings)
  private var printHeader = options.headerFlag

  // A `ValueConverter` is responsible for converting a value of an `InternalRow` to `String`.
  // When the value is null, this converter should not be called.
  private type ValueConverter = (InternalRow, Int) => String

  // `ValueConverter`s for all values in the fields of the schema
  private val valueConverters: Array[ValueConverter] =
    schema.map(_.dataType).map(makeConverter).toArray

  private def makeConverter(dataType: DataType): ValueConverter = dataType match {
    case DateType =>
      (row: InternalRow, ordinal: Int) =>
        options.dateFormat.format(DateTimeUtils.toJavaDate(row.getInt(ordinal)))

    case TimestampType =>
      (row: InternalRow, ordinal: Int) =>
        options.timestampFormat.format(DateTimeUtils.toJavaTimestamp(row.getLong(ordinal)))

    case udt: UserDefinedType[_] => makeConverter(udt.sqlType)

    case dt: DataType =>
      (row: InternalRow, ordinal: Int) =>
        row.get(ordinal, dt).toString
  }

  private def convertRow(row: InternalRow): Seq[String] = {
    var i = 0
    val values = new Array[String](row.numFields)
    while (i < row.numFields) {
      if (!row.isNullAt(i)) {
        values(i) = valueConverters(i).apply(row, i)
      } else {
        values(i) = options.nullValue
      }
      i += 1
    }
    values
  }

  
  def write(row: InternalRow): Unit = {
    if (printHeader) {
      gen.writeHeaders()
    }
    gen.writeRow(convertRow(row): _*)
    printHeader = false
  }

  def close(): Unit = gen.close()

  def flush(): Unit = gen.flush()
} 
Example 2
Source File: InteractiveChecksumLogger.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.publish.checksum.logger

import java.io.{OutputStream, OutputStreamWriter, Writer}

import coursier.publish.checksum.ChecksumType
import coursier.publish.fileset.FileSet
import coursier.publish.logging.ProgressLogger

final class InteractiveChecksumLogger(out: Writer, verbosity: Int) extends ChecksumLogger {

  private val underlying = new ProgressLogger[Object](
    "Computed",
    "checksums",
    out
  )

  override def computingSet(id: Object, fs: FileSet): Unit =
    underlying.processingSet(id, Some(fs.elements.length))
  override def computing(id: Object, type0: ChecksumType, path: String): Unit = {
    if (verbosity >= 2)
      out.write(s"Computing ${type0.name} checksum of ${path.repr}\n")
    underlying.processing(path, id)
  }
  override def computed(id: Object, type0: ChecksumType, path: String, errorOpt: Option[Throwable]): Unit = {
    if (verbosity >= 2)
      out.write(s"Computed ${type0.name} checksum of ${path.repr}\n")
    underlying.processed(path, id, errorOpt.nonEmpty)
  }
  override def computedSet(id: Object, fs: FileSet): Unit =
    underlying.processedSet(id)

  override def start(): Unit =
    underlying.start()
  override def stop(keep: Boolean): Unit =
    underlying.stop(keep)
}

object InteractiveChecksumLogger {
  def create(out: OutputStream, verbosity: Int): InteractiveChecksumLogger =
    new InteractiveChecksumLogger(new OutputStreamWriter(out), verbosity)
} 
Example 3
Source File: InteractiveSignerLogger.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.publish.signing.logger

import java.io.{OutputStream, OutputStreamWriter, Writer}

import coursier.publish.fileset.{FileSet, Path}
import coursier.publish.logging.ProgressLogger

final class InteractiveSignerLogger(out: Writer, verbosity: Int) extends SignerLogger {

  private val underlying = new ProgressLogger[Object](
    "Signed",
    "files",
    out,
    updateOnChange = true,
    doneEmoji = Some("\u270D\uFE0F ")
  )

  override def signing(id: Object, fileSet: FileSet): Unit = {
    underlying.processingSet(id, Some(fileSet.elements.length))
  }
  override def signed(id: Object, fileSet: FileSet): Unit =
    underlying.processedSet(id)

  override def signingElement(id: Object, path: Path): Unit = {
    if (verbosity >= 2)
      out.write(s"Signing ${path.repr}\n")
    underlying.processing(path.repr, id)
  }
  override def signedElement(id: Object, path: Path, excOpt: Option[Throwable]): Unit = {
    if (verbosity >= 2)
      out.write(s"Signed ${path.repr}\n")
    underlying.processed(path.repr, id, excOpt.nonEmpty)
  }

  override def start(): Unit =
    underlying.start()
  override def stop(keep: Boolean): Unit =
    underlying.stop(keep)
}

object InteractiveSignerLogger {
  def create(out: OutputStream, verbosity: Int): SignerLogger =
    new InteractiveSignerLogger(new OutputStreamWriter(out), verbosity)
} 
Example 4
Source File: Terminal.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.cache.internal

import java.io.{File, Writer}

import scala.util.Try

object Terminal {

  // A few things were cut-n-pasted and adapted from
  // https://github.com/lihaoyi/Ammonite/blob/10854e3b8b454a74198058ba258734a17af32023/terminal/src/main/scala/ammonite/terminal/Utils.scala

  private lazy val pathedTput = if (new File("/usr/bin/tput").exists()) "/usr/bin/tput" else "tput"

  private lazy val ttyAvailable0: Boolean =
    new File("/dev/tty").exists()

  @deprecated("Should be made private at some point in future releases", "2.0.0-RC3")
  lazy val ttyAvailable: Boolean =
    ttyAvailable0

  @deprecated("Should be removed at some point in future releases", "2.0.0-RC3")
  def consoleDim(s: String): Option[Int] =
    if (ttyAvailable0) {
      import sys.process._
      val nullLog = new ProcessLogger {
        def out(s: => String): Unit = {}
        def err(s: => String): Unit = {}
        def buffer[T](f: => T): T = f
      }
      Try(Process(Seq("bash", "-c", s"$pathedTput $s 2> /dev/tty")).!!(nullLog).trim.toInt).toOption
    } else
      None

  @deprecated("Should be removed at some point in future releases", "2.0.0-RC3")
  def consoleDimOrThrow(s: String): Int =
    if (ttyAvailable0) {
      import sys.process._
      val nullLog = new ProcessLogger {
        def out(s: => String): Unit = {}
        def err(s: => String): Unit = {}
        def buffer[T](f: => T): T = f
      }
      Process(Seq("bash", "-c", s"$pathedTput $s 2> /dev/tty")).!!(nullLog).trim.toInt
    } else
      throw new Exception("TTY not available")

  private def consoleDimsFromTty(): Option[(Int, Int)] =
    if (ttyAvailable0) {
      import sys.process._
      val nullLog = new ProcessLogger {
        def out(s: => String): Unit = {}
        def err(s: => String): Unit = {}
        def buffer[T](f: => T): T = f
      }
      def valueOpt(s: String) =
        Try(Process(Seq("bash", "-c", s"$pathedTput $s 2> /dev/tty")).!!(nullLog).trim.toInt).toOption

      for {
        width <- valueOpt("cols")
        height <- valueOpt("lines")
      } yield (width, height)
    } else
      None

  private lazy val isWindows = System.getProperty("os.name").toLowerCase(java.util.Locale.ROOT).contains("windows")

  private def fromJLine(): Option[(Int, Int)] =
    if (isWindows) {
      val size = io.github.alexarchambault.windowsansi.WindowsAnsi.terminalSize()
      Some((size.getWidth, size.getHeight))
    } else
      None

  def consoleDims(): (Int, Int) =
    consoleDimsFromTty()
      .orElse(fromJLine())
      .getOrElse {
        // throw instead?
        (80, 25)
      }

  implicit class Ansi(val output: Writer) extends AnyVal {
    private def control(n: Int, c: Char): Unit =
      output.write("\u001b[" + n + c)

    
    def clearLine(n: Int): Unit =
      control(n, 'K')
  }

} 
Example 5
Source File: FallbackRefreshDisplay.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.cache.loggers

import java.io.Writer

import coursier.cache.loggers.RefreshInfo.{CheckUpdateInfo, DownloadInfo}

import scala.concurrent.duration.{Duration, DurationInt}

class FallbackRefreshDisplay(quiet: Boolean = false) extends RefreshDisplay {

  private var previous = Set.empty[String]
  @volatile private var lastInstantOpt = Option.empty[Long]

  private def describe(info: RefreshInfo): String =
    info match {
      case downloadInfo: DownloadInfo =>
        val pctOpt = downloadInfo.fraction.map(100.0 * _)

        if (downloadInfo.length.isEmpty && downloadInfo.downloaded == 0L)
          ""
        else
          s"(${pctOpt.map(pct => f"$pct%.2f %%, ").mkString}${downloadInfo.downloaded}${downloadInfo.length.map(" / " + _).mkString})"

      case _: CheckUpdateInfo =>
        "Checking for updates"
    }

  val refreshInterval: Duration =
    1.second

  override def newEntry(out: Writer, url: String, info: RefreshInfo): Unit = {
    lastInstantOpt = Some(System.currentTimeMillis())

    if (!quiet) {
      val msg = info match {
        case _: DownloadInfo =>
          s"Downloading $url\n"
        case _: CheckUpdateInfo =>
          s"Checking $url\n"
      }
      out.write(msg)
      out.flush()
    }
  }

  override def removeEntry(out: Writer, url: String, info: RefreshInfo): Unit = {
    lastInstantOpt = Some(System.currentTimeMillis())

    if (!quiet) {
      val prefix = if (info.watching) "(watching) " else ""
      val msg = info match {
        case _: DownloadInfo =>
          s"Downloaded $url\n"
        case _: CheckUpdateInfo =>
          s"Checked $url\n"
      }

      out.write(prefix + msg)
      out.flush()
    }
  }

  def update(
    out: Writer,
    done: Seq[(String, RefreshInfo)],
    downloads: Seq[(String, RefreshInfo)],
    changed: Boolean
  ): Unit = {

    val now = System.currentTimeMillis()

    // displaying updates if last message is more than 5 s old
    if (lastInstantOpt.exists(now > _ + 5000L)) {
      val downloads0 = downloads.filter { case (url, _) => previous(url) }
      if (downloads0.nonEmpty) {
        out.write("Still downloading:\n")
        for ((url, info) <- downloads0) {
          assert(info != null, s"Incoherent state ($url)")
          out.write(s"$url ${describe(info)}\n")
        }

        out.write("\n")

        out.flush()
        lastInstantOpt = Some(now)
      }
    }

    previous = previous ++ downloads.map(_._1)
  }
  override def stop(out: Writer): Unit = {
    previous = Set.empty
    lastInstantOpt = None
  }
} 
Example 6
Source File: RefreshDisplay.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.cache.loggers

import java.io.Writer
import java.util.Locale

import scala.concurrent.duration.Duration

trait RefreshDisplay {

  // note about concurrency: newEntry / removeEntry may be called concurrently to update, and the update arguments
  // may be out-of-sync with them
  def newEntry(out: Writer, url: String, info: RefreshInfo): Unit = ()
  def removeEntry(out: Writer, url: String, info: RefreshInfo): Unit = ()

  def sizeHint(n: Int): Unit = ()
  def update(
    out: Writer,
    done: Seq[(String, RefreshInfo)],
    downloads: Seq[(String, RefreshInfo)],
    changed: Boolean
  ): Unit
  def stop(out: Writer): Unit = ()

  def refreshInterval: Duration

}

object RefreshDisplay {

  private lazy val isWindows: Boolean =
    sys.props
      .get("os.name")
      .map(_.toLowerCase(Locale.ROOT))
      .exists(_.contains("windows"))

  def truncated(s: String, width: Int): String =
    if (s.length <= width)
      s
    else if (isWindows)
      // seems unicode character '…' isn't fine in Windows terminal, plus width is actually shorter (scrollbar?)
      s.take(width - 4) + "..."
    else
      s.take(width - 1) + "…"

} 
Example 7
Source File: WriterOutputStream.scala    From better-files   with MIT License 5 votes vote down vote up
package better.files

import java.io.{OutputStream, Writer}
import java.nio.charset.{Charset, CharsetDecoder, CodingErrorAction}
import java.nio.{ByteBuffer, CharBuffer}

import scala.annotation.tailrec


  private[this] val decoderIn = ByteBuffer.allocate(bufferSize >> 4)

  def this(
      writer: Writer,
      bufferSize: Int = DefaultBufferSize,
      flushImmediately: Boolean = false
  )(implicit
      charset: Charset = DefaultCharset
  ) =
    this(
      writer = writer,
      decoder = charset.newDecoder
        .onMalformedInput(CodingErrorAction.REPLACE)
        .onUnmappableCharacter(CodingErrorAction.REPLACE)
        .replaceWith("?"),
      bufferSize = bufferSize,
      flushImmediately = flushImmediately
    )

  override def write(b: Array[Byte], off: Int, len: Int) = {
    @tailrec def loop(off: Int, len: Int): Unit =
      if (len > 0) {
        val c = decoderIn.remaining min len
        decoderIn.put(b, off, c)
        processInput(endOfInput = false)
        loop(off + c, len - c)
      }
    loop(off, len)
    if (flushImmediately) flushOutput()
  }

  override def write(b: Int) = write(Array(b.toByte))

  override def flush() = {
    flushOutput()
    writer.flush()
  }

  override def close() = {
    processInput(endOfInput = true)
    flushOutput()
    writer.close()
  }

  private[this] def processInput(endOfInput: Boolean) = {
    decoderIn.flip()
    @tailrec def loop(): Unit = {
      val coderResult = decoder.decode(decoderIn, decoderOut, endOfInput)
      if (coderResult.isOverflow) {
        flushOutput()
        loop()
      } else {
        assert(coderResult.isUnderflow, "decoder is configured to replace malformed input and unmappable characters")
      }
    }
    loop()
    decoderIn.compact()
  }

  private[this] def flushOutput(): Unit = {
    val p = decoderOut.position()
    if (p > 0) {
      writer.write(decoderOut.array, 0, p)
      val _ = decoderOut.rewind()
    }
  }
} 
Example 8
Source File: Compiler.scala    From midas   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
// See LICENSE for license details.

package midas

import passes.Utils.writeEmittedCircuit

import chisel3.{Data, Bundle, Record, Clock, Bool}
import chisel3.internal.firrtl.Port
import firrtl.ir.Circuit
import firrtl.{Transform, CircuitState}
import firrtl.annotations.Annotation
import firrtl.CompilerUtils.getLoweringTransforms
import firrtl.passes.memlib._
import freechips.rocketchip.config.{Parameters, Field}
import java.io.{File, FileWriter, Writer}
import logger._

// Directory into which output files are dumped. Set by dir argument
case object OutputDir extends Field[File]

// Compiler for Midas Transforms
private class MidasCompiler extends firrtl.Compiler {
  def emitter = new firrtl.LowFirrtlEmitter
  def transforms =
    getLoweringTransforms(firrtl.ChirrtlForm, firrtl.MidForm) ++
    Seq(new InferReadWrite) ++
    getLoweringTransforms(firrtl.MidForm, firrtl.LowForm)
}

// These next two compilers split LFO from the rest of the lowering
// compilers to schedule around the presence of internal & non-standard WIR
// nodes (Dshlw) present after LFO, which custom transforms can't handle
private class HostTransformCompiler extends firrtl.Compiler {
  def emitter = new firrtl.LowFirrtlEmitter
  def transforms =
    Seq(new firrtl.IRToWorkingIR,
        new firrtl.ResolveAndCheck,
        new firrtl.HighFirrtlToMiddleFirrtl) ++
    getLoweringTransforms(firrtl.MidForm, firrtl.LowForm)
}

// Custom transforms have been scheduled -> do the final lowering
private class LastStageVerilogCompiler extends firrtl.Compiler {
  def emitter = new firrtl.VerilogEmitter
  def transforms = Seq(new firrtl.LowFirrtlOptimization,
                       new firrtl.transforms.RemoveReset)
}

object MidasCompiler {
  def apply(
      chirrtl: Circuit,
      targetAnnos: Seq[Annotation],
      io: Seq[(String, Data)],
      dir: File,
      targetTransforms: Seq[Transform], // Run pre-MIDAS transforms, on the target RTL
      hostTransforms: Seq[Transform]    // Run post-MIDAS transformations
    )
     (implicit p: Parameters): CircuitState = {
    val midasAnnos = Seq(
      firrtl.TargetDirAnnotation(dir.getPath()),
      InferReadWriteAnnotation)
    val midasTransforms = new passes.MidasTransforms(io)(p alterPartial { case OutputDir => dir })
    val compiler = new MidasCompiler
    val midas = compiler.compile(firrtl.CircuitState(
      chirrtl, firrtl.ChirrtlForm, targetAnnos ++ midasAnnos),
      targetTransforms :+ midasTransforms)

    val postHostTransforms = new HostTransformCompiler().compile(midas, hostTransforms)
    val result = new LastStageVerilogCompiler().compileAndEmit(postHostTransforms)

    writeEmittedCircuit(result, new File(dir, s"FPGATop.v"))
    result
  }

  // Unlike above, elaborates the target locally, before constructing the target IO Record.
  def apply[T <: chisel3.core.UserModule](
      w: => T,
      dir: File,
      targetTransforms: Seq[Transform] = Seq.empty,
      hostTransforms: Seq[Transform] = Seq.empty
    )
     (implicit p: Parameters): CircuitState = {
    dir.mkdirs
    lazy val target = w
    val circuit = chisel3.Driver.elaborate(() => target)
    val chirrtl = firrtl.Parser.parse(chisel3.Driver.emit(circuit))
    val io = target.getPorts map (p => p.id.instanceName -> p.id)
    apply(chirrtl, circuit.annotations.map(_.toFirrtl), io, dir, targetTransforms, hostTransforms)
  }
} 
Example 9
Source File: GoldenGateCompilerPhase.scala    From midas   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
// See LICENSE for license details.

package midas.stage

import midas._

import firrtl.ir.Circuit
import firrtl.{Transform, CircuitState, AnnotationSeq}
import firrtl.annotations.{Annotation}
import firrtl.options.{Phase, TargetDirAnnotation}
import firrtl.stage.{FirrtlCircuitAnnotation}
import firrtl.CompilerUtils.getLoweringTransforms
import firrtl.passes.memlib._

import freechips.rocketchip.config.{Parameters, Config, Field}
import freechips.rocketchip.util.{ParsedInputNames}
import java.io.{File, FileWriter, Writer}
import logger._

class GoldenGateCompilerPhase extends Phase with ConfigLookup {

  def transform(annotations: AnnotationSeq): AnnotationSeq = {
    val allCircuits = annotations.collect({ case FirrtlCircuitAnnotation(circuit) => circuit })
    require(allCircuits.size == 1, "Golden Gate can only process a single Firrtl Circuit at a time.")
    val circuit = allCircuits.head

    val targetDir = annotations.collectFirst({ case TargetDirAnnotation(targetDir) => new File(targetDir) }).get
    val configPackage = annotations.collectFirst({ case ConfigPackageAnnotation(p) => p }).get
    val configString  = annotations.collectFirst({ case ConfigStringAnnotation(s) => s }).get
    val pNames = ParsedInputNames("UNUSED", "UNUSED", "UNUSED", configPackage, configString)

    // MIDAS Legacy requirement -- GGRELEASE: Remove
    val io: Seq[(String, chisel3.Data)] = Seq.empty

    val midasAnnos = Seq(InferReadWriteAnnotation)

    implicit val p = getParameters(pNames).alterPartial({
      case OutputDir => targetDir
    })
    // Ran prior to Golden Gate tranforms (target-time)
    val targetTransforms = p(TargetTransforms).flatMap(transformCtor => transformCtor(p))
    // Ran after Golden Gate transformations (host-time)
    val hostTransforms = p(HostTransforms).flatMap(transformCtor => transformCtor(p))
    val midasTransforms = new passes.MidasTransforms(io)
    val compiler = new MidasCompiler
    val midas = compiler.compile(firrtl.CircuitState(
      circuit, firrtl.HighForm, annotations ++ midasAnnos),
      targetTransforms :+ midasTransforms)

    val postHostTransforms = new HostTransformCompiler().compile(midas, hostTransforms)
    val result = new LastStageVerilogCompiler().compileAndEmit(postHostTransforms, Seq())
    result.annotations
  }

} 
Example 10
Source File: RuntimeConfigGenerationPhase.scala    From midas   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
// See LICENSE for license details.

package midas.stage

import midas.{OutputDir}
import midas.widgets.{SerializableBridgeAnnotation}

import freechips.rocketchip.util.{ParsedInputNames}

import firrtl.{Transform, CircuitState, AnnotationSeq}
import firrtl.options.{Phase, TargetDirAnnotation}

import java.io.{File, FileWriter, Writer}
import logger._

class RuntimeConfigGenerationPhase extends Phase with ConfigLookup {

  def transform(annotations: AnnotationSeq): AnnotationSeq = {
    val targetDir = annotations.collectFirst({ case TargetDirAnnotation(targetDir) => new File(targetDir) }).get
    val configPackage = annotations.collectFirst({ case ConfigPackageAnnotation(p) => p }).get
    val configString  = annotations.collectFirst({ case ConfigStringAnnotation(s) => s }).get
    val runtimeConfigName  = annotations.collectFirst({ case RuntimeConfigNameAnnotation(s) => s }).get

    val pNames = ParsedInputNames("UNUSED", "UNUSED", "UNUSED", configPackage, configString)

    implicit val p = getParameters(pNames).alterPartial({
      case OutputDir => targetDir
    })

    val fasedBridgeAnnos = annotations.collect({
      case anno @ SerializableBridgeAnnotation(_,_,className,_)
        if className == classOf[midas.models.FASEDMemoryTimingModel].getName => anno
    })
    // Since presently all memory models share the same runtime configuration. Grab only the first 
    // FASED BridgeAnnotation, and use that to elaborate a memory model
    fasedBridgeAnnos.headOption.map({ anno =>
      // Here we're spoofing elaboration that occurs in FPGATop, which assumes ExtractBridges has been run
      lazy val memModel = anno.toIOAnnotation("").elaborateWidget.asInstanceOf[midas.models.FASEDMemoryTimingModel]
      chisel3.Driver.elaborate(() => memModel)
      memModel.getSettings(runtimeConfigName)
    })
    annotations
  }
} 
Example 11
Source File: InteractiveUploadLogger.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.publish.upload.logger

import java.io.{OutputStream, OutputStreamWriter, Writer}

import com.lightbend.emoji.ShortCodes.Defaults.defaultImplicit.emoji
import coursier.publish.fileset.FileSet
import coursier.publish.logging.ProgressLogger
import coursier.publish.upload.Upload

// FIXME Would have been better if dummy was passed by the Upload instance when calling the methods of UploadLogger
final class InteractiveUploadLogger(out: Writer, dummy: Boolean, isLocal: Boolean) extends UploadLogger {

  private val underlying = new ProgressLogger[Object](
    if (isLocal) {
      if (dummy)
        "Would have written"
      else
        "Wrote"
    } else {
      if (dummy)
        "Would have uploaded"
      else
        "Uploaded"
    },
    "files",
    out,
    doneEmoji = emoji("truck").map(_.toString())
  )

  override def uploadingSet(id: Object, fileSet: FileSet): Unit =
    underlying.processingSet(id, Some(fileSet.elements.length))
  override def uploadedSet(id: Object, fileSet: FileSet): Unit =
    underlying.processedSet(id)

  override def uploading(url: String, idOpt: Option[Object], totalOpt: Option[Long]): Unit =
    for (id <- idOpt)
      underlying.processing(url, id)

  override def progress(url: String, idOpt: Option[Object], uploaded: Long, total: Long): Unit =
    for (id <- idOpt)
      underlying.progress(url, id, uploaded, total)
  override def uploaded(url: String, idOpt: Option[Object], errorOpt: Option[Upload.Error]): Unit =
    for (id <- idOpt)
      underlying.processed(url, id, errorOpt.nonEmpty)

  override def start(): Unit =
    underlying.start()
  override def stop(keep: Boolean): Unit =
    underlying.stop(keep)
}

object InteractiveUploadLogger {
  def create(out: OutputStream, dummy: Boolean, isLocal: Boolean): UploadLogger =
    new InteractiveUploadLogger(new OutputStreamWriter(out), dummy, isLocal)
} 
Example 12
Source File: UnivocityGenerator.scala    From mimir   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources.ubodin.csv

import java.io.Writer

import com.univocity.parsers.csv.CsvWriter

import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.types._

private[csv] class UnivocityGenerator(
    schema: StructType,
    writer: Writer,
    options: CSVOptions) {
  private val writerSettings = options.asWriterSettings
  writerSettings.setHeaders(schema.fieldNames: _*)
  private val gen = new CsvWriter(writer, writerSettings)
  private var printHeader = options.headerFlag

  // A `ValueConverter` is responsible for converting a value of an `InternalRow` to `String`.
  // When the value is null, this converter should not be called.
  private type ValueConverter = (InternalRow, Int) => String

  // `ValueConverter`s for all values in the fields of the schema
  private val valueConverters: Array[ValueConverter] =
    schema.map(_.dataType).map(makeConverter).toArray

  private def makeConverter(dataType: DataType): ValueConverter = dataType match {
    case DateType =>
      (row: InternalRow, ordinal: Int) =>
        options.dateFormat.format(DateTimeUtils.toJavaDate(row.getInt(ordinal)))

    case TimestampType =>
      (row: InternalRow, ordinal: Int) =>
        options.timestampFormat.format(DateTimeUtils.toJavaTimestamp(row.getLong(ordinal)))

    case udt: UserDefinedType[_] => makeConverter(udt.sqlType)

    case dt: DataType =>
      (row: InternalRow, ordinal: Int) =>
        row.get(ordinal, dt).toString
  }

  private def convertRow(row: InternalRow): Seq[String] = {
    var i = 0
    val values = new Array[String](row.numFields)
    while (i < row.numFields) {
      if (!row.isNullAt(i)) {
        values(i) = valueConverters(i).apply(row, i)
      } else {
        values(i) = options.nullValue
      }
      i += 1
    }
    values
  }

  
  def write(row: InternalRow): Unit = {
    if (printHeader) {
      gen.writeHeaders()
    }
    gen.writeRow(convertRow(row): _*)
    printHeader = false
  }

  def close(): Unit = gen.close()

  def flush(): Unit = gen.flush()
} 
Example 13
Source File: MetricFamilySamplesEntity.scala    From prometheus-akka-http   with MIT License 5 votes vote down vote up
package com.lonelyplanet.prometheus.api

import java.io.{StringWriter, Writer}
import java.util

import akka.http.scaladsl.marshalling.{ToEntityMarshaller, Marshaller}
import akka.http.scaladsl.model._
import io.prometheus.client.Collector.MetricFamilySamples
import io.prometheus.client.CollectorRegistry
import io.prometheus.client.exporter.common.TextFormat

case class MetricFamilySamplesEntity(samples: util.Enumeration[MetricFamilySamples])

object MetricFamilySamplesEntity {
  private val mediaTypeParams = Map("version" -> "0.0.4")
  private val mediaType = MediaType.customWithFixedCharset("text", "plain", HttpCharsets.`UTF-8`, params = mediaTypeParams)

  def fromRegistry(collectorRegistry: CollectorRegistry): MetricFamilySamplesEntity = {
    MetricFamilySamplesEntity(collectorRegistry.metricFamilySamples())
  }

  def toPrometheusTextFormat(e: MetricFamilySamplesEntity): String = {
    val writer: Writer = new StringWriter()
    TextFormat.write004(writer, e.samples)

    writer.toString
  }

  implicit val metricsFamilySamplesMarshaller: ToEntityMarshaller[MetricFamilySamplesEntity] = {
    Marshaller.withFixedContentType(mediaType) { s =>
      HttpEntity(mediaType, toPrometheusTextFormat(s))
    }
  }

} 
Example 14
Source File: CustomWriter.scala    From asura   with MIT License 5 votes vote down vote up
package asura.core.script

import java.io.Writer

class CustomWriter extends Writer {

  var log: (CharSequence) => Unit = null

  override def write(cbuf: Array[Char], off: Int, len: Int): Unit = {
    if (null != log) {
      if (!(len == 1 && cbuf(off).equals('\n'))) {
        log(cbuf.subSequence(off, off + len))
      }
    }
  }

  override def flush(): Unit = {}

  override def close(): Unit = {
    log = null
  }
} 
Example 15
Source File: VCFHeaderWriter.scala    From glow   with Apache License 2.0 5 votes vote down vote up
package htsjdk.variant.variantcontext.writer

import java.io.{StringWriter, Writer}

import htsjdk.variant.vcf.VCFHeader

object VCFHeaderWriter {
  // Shim into default visibility VCFWriter class
  def writeHeader(
      header: VCFHeader,
      writer: Writer,
      versionLine: String,
      streamNameForError: String): VCFHeader = {
    VCFWriter.writeHeader(header, writer, versionLine, streamNameForError)
  }

  def writeHeaderAsString(header: VCFHeader): String = {
    val writer = new StringWriter()
    writeHeader(header, writer, VCFWriter.getVersionLine, "headerBuffer")
    writer.toString
  }
} 
Example 16
Source File: tty_loop.scala    From libisabelle   with Apache License 2.0 5 votes vote down vote up
package isabelle


import java.io.{IOException, Writer, Reader, InputStreamReader, BufferedReader}


class TTY_Loop(writer: Writer, reader: Reader,
  writer_lock: AnyRef = new Object,
  interrupt: Option[() => Unit] = None)
{
  private val console_output = Future.thread[Unit]("console_output") {
    try {
      var result = new StringBuilder(100)
      var finished = false
      while (!finished) {
        var c = -1
        var done = false
        while (!done && (result.length == 0 || reader.ready)) {
          c = reader.read
          if (c >= 0) result.append(c.asInstanceOf[Char])
          else done = true
        }
        if (result.length > 0) {
          System.out.print(result.toString)
          System.out.flush()
          result.length = 0
        }
        else {
          reader.close()
          finished = true
        }
      }
    }
    catch { case e: IOException => case Exn.Interrupt() => }
  }

  private val console_input = Future.thread[Unit]("console_input") {
    val console_reader = new BufferedReader(new InputStreamReader(System.in))
    def body
    {
      try {
        var finished = false
        while (!finished) {
          console_reader.readLine() match {
            case null =>
              writer.close()
              finished = true
            case line =>
              writer_lock.synchronized {
                writer.write(line)
                writer.write("\n")
                writer.flush()
              }
          }
        }
      }
      catch { case e: IOException => case Exn.Interrupt() => }
    }
    interrupt match {
      case None => body
      case Some(int) => POSIX_Interrupt.handler { int() } { body }
    }
  }

  def join { console_output.join; console_input.join }

  def cancel { console_input.cancel }
} 
Example 17
Source File: tty_loop.scala    From libisabelle   with Apache License 2.0 5 votes vote down vote up
package isabelle


import java.io.{IOException, Writer, Reader, InputStreamReader, BufferedReader}


class TTY_Loop(writer: Writer, reader: Reader,
  writer_lock: AnyRef = new Object,
  interrupt: Option[() => Unit] = None)
{
  private val console_output = Future.thread[Unit]("console_output") {
    try {
      var result = new StringBuilder(100)
      var finished = false
      while (!finished) {
        var c = -1
        var done = false
        while (!done && (result.length == 0 || reader.ready)) {
          c = reader.read
          if (c >= 0) result.append(c.asInstanceOf[Char])
          else done = true
        }
        if (result.length > 0) {
          System.out.print(result.toString)
          System.out.flush()
          result.length = 0
        }
        else {
          reader.close()
          finished = true
        }
      }
    }
    catch { case e: IOException => case Exn.Interrupt() => }
  }

  private val console_input = Future.thread[Unit]("console_input") {
    val console_reader = new BufferedReader(new InputStreamReader(System.in))
    def body
    {
      try {
        var finished = false
        while (!finished) {
          console_reader.readLine() match {
            case null =>
              writer.close()
              finished = true
            case line =>
              writer_lock.synchronized {
                writer.write(line)
                writer.write("\n")
                writer.flush()
              }
          }
        }
      }
      catch { case e: IOException => case Exn.Interrupt() => }
    }
    interrupt match {
      case None => body
      case Some(int) => POSIX_Interrupt.handler { int() } { body }
    }
  }

  def join { console_output.join; console_input.join }

  def cancel { console_input.cancel }
} 
Example 18
Source File: Impl.scala    From jsdependencies   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package org.scalajs.jsdependencies.core.json

import org.json.simple.JSONValue

import java.io.{Writer, Reader}
import java.util.function.{BiConsumer, Consumer}

private[json] object Impl {

  type Repr = Object

  def fromString(x: String): Repr = x
  def fromNumber(x: Number): Repr = x
  def fromBoolean(x: Boolean): Repr = java.lang.Boolean.valueOf(x)

  def fromList(x: List[Repr]): Repr = {
    val result = new java.util.LinkedList[Repr]
    x.foreach(result.add(_))
    result
  }

  def fromMap(x: Map[String, Repr]): Repr = {
    val result = new java.util.HashMap[String, Repr]
    for ((key, value) <- x)
      result.put(key, value)
    result
  }

  def toString(x: Repr): String = x.asInstanceOf[String]
  def toNumber(x: Repr): Number = x.asInstanceOf[Number]
  def toBoolean(x: Repr): Boolean =
    x.asInstanceOf[java.lang.Boolean].booleanValue()

  def toList(x: Repr): List[Repr] = {
    val builder = List.newBuilder[Repr]
    x.asInstanceOf[java.util.List[Repr]].forEach(new Consumer[Repr] {
      def accept(elem: Repr): Unit =
        builder += elem
    })
    builder.result()
  }

  def toMap(x: Repr): Map[String, Repr] = {
    val builder = Map.newBuilder[String, Repr]
    x.asInstanceOf[java.util.Map[String, Repr]].forEach(new BiConsumer[String, Repr] {
      def accept(key: String, value: Repr): Unit =
        builder += key -> value
    })
    builder.result()
  }

  def serialize(x: Repr): String =
    JSONValue.toJSONString(x)

  def serialize(x: Repr, writer: Writer): Unit =
    JSONValue.writeJSONString(x, writer)

  def deserialize(str: String): Repr = JSONValue.parseWithException(str)

  def deserialize(reader: Reader): Repr = JSONValue.parseWithException(reader)

} 
Example 19
Source File: IO.scala    From RosHTTP   with MIT License 5 votes vote down vote up
package fr.hmil.roshttp.tools.io

import java.io.{ByteArrayOutputStream, OutputStream, Writer, _}

import scala.annotation.tailrec
import scala.reflect.ClassTag


  def pipe(in: Reader, out: Writer): Unit = {
    val buffer = newBuffer[Char]

    @tailrec
    def loop(): Unit = {
      val size = in.read(buffer)
      if (size > 0) {
        out.write(buffer, 0, size)
        loop()
      }
    }
    loop()
  }

  @inline
  private def newBuffer[T: ClassTag] = new Array[T](4096)
} 
Example 20
Source File: SHA.scala    From ScalaStan   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.scalastan

import java.io.Writer

case class SHA() {

  private val md = java.security.MessageDigest.getInstance("SHA-1")

  def update(str: String): SHA = update(str.getBytes)

  def update(bytes: Array[Byte]): SHA = {
    md.update(bytes)
    this
  }

  def digest: String = {
    val hashBytes = md.digest.flatMap { b =>
      val digits = "0123456789abcdef"
      digits((b.toInt & 255) >> 4).toString + digits(b.toInt & 15).toString
    }
    new String(hashBytes)
  }
}

case class ShaWriter(writer: Writer) extends Writer {

  val sha = SHA()

  override def write(cbuf: Array[Char], off: Int, len: Int): Unit = {
    sha.update(new String(cbuf.slice(off, len)).getBytes)
    writer.write(cbuf, off, len)
  }

  override def flush(): Unit = writer.flush()

  override def close(): Unit = writer.close()
}

object SHA {
  def hash(str: String): String = SHA().update(str).digest
} 
Example 21
Source File: MetricsController.scala    From daf   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package controllers

import java.io.Writer
import javax.inject.Singleton

import akka.util.ByteString
import io.prometheus.client._
import io.prometheus.client.exporter.common.TextFormat
import io.prometheus.client.hotspot.DefaultExports
import play.api.http.HttpEntity
import play.api.mvc._

@Singleton
class MetricsController extends Controller {
  //export default jvm metrics
  DefaultExports.initialize()

  def index = Action {
    val samples = new StringBuilder()
    val writer = new WriterAdapter(samples)

    TextFormat.write004(writer, CollectorRegistry.defaultRegistry.metricFamilySamples())
    writer.close()

    Result(
      header = ResponseHeader(200, Map.empty),
      body = HttpEntity.Strict(ByteString(samples.toString), Some(TextFormat.CONTENT_TYPE_004))
    )
  }
}

class WriterAdapter(buffer: StringBuilder) extends Writer {
  override def write(charArray: Array[Char], offset: Int, length: Int): Unit = {
    buffer ++= new String(new String(charArray, offset, length).getBytes("UTF-8"), "UTF-8")
  }
  override def flush(): Unit = {}
  override def close(): Unit = {}
} 
Example 22
Source File: MetricsController.scala    From daf   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package controllers
import java.io.Writer
import javax.inject.Singleton

import akka.util.ByteString
import io.prometheus.client._
import io.prometheus.client.exporter.common.TextFormat
import io.prometheus.client.hotspot.DefaultExports
import play.api.http.HttpEntity
import play.api.mvc._


@Singleton
class MetricsController extends Controller {
  //export default jvm metrics
  DefaultExports.initialize()

  def index = Action {
    val samples = new StringBuilder()
    val writer = new WriterAdapter(samples)

    TextFormat.write004(writer, CollectorRegistry.defaultRegistry.metricFamilySamples())
    writer.close()

    Result(
      header = ResponseHeader(200, Map.empty),
      body = HttpEntity.Strict(ByteString(samples.toString), Some(TextFormat.CONTENT_TYPE_004))
    )
  }
}

class WriterAdapter(buffer: StringBuilder) extends Writer {
  override def write(charArray: Array[Char], offset: Int, length: Int): Unit = {
    val b = buffer.append(new String(new String(charArray, offset, length).getBytes("UTF-8"), "UTF-8"))
  }
  override def flush(): Unit = {}
  override def close(): Unit = {}
} 
Example 23
Source File: MetricsController.scala    From daf   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package controllers

import java.io.Writer
import javax.inject.Singleton

import akka.util.ByteString
import io.prometheus.client._
import io.prometheus.client.exporter.common.TextFormat
import io.prometheus.client.hotspot.DefaultExports
import play.api.http.HttpEntity
import play.api.mvc._


@Singleton
class MetricsController extends Controller {
  //export default jvm metrics
  DefaultExports.initialize()

  def index = Action {
    val samples = new StringBuilder()
    val writer = new WriterAdapter(samples)

    TextFormat.write004(writer, CollectorRegistry.defaultRegistry.metricFamilySamples())
    writer.close()

    Result(
      header = ResponseHeader(200, Map.empty),
      body = HttpEntity.Strict(ByteString(samples.toString), Some(TextFormat.CONTENT_TYPE_004))
    )
  }
}

class WriterAdapter(buffer: StringBuilder) extends Writer {
  override def write(charArray: Array[Char], offset: Int, length: Int): Unit = {
    buffer ++= new String(new String(charArray, offset, length).getBytes("UTF-8"), "UTF-8")

  }
  override def flush(): Unit = {}
  override def close(): Unit = {}
} 
Example 24
Source File: MetricsController.scala    From daf   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package controllers

import java.io.Writer
import javax.inject.Singleton

import akka.util.ByteString
import io.prometheus.client._
import io.prometheus.client.exporter.common.TextFormat
import io.prometheus.client.hotspot.DefaultExports
import play.api.http.HttpEntity
import play.api.mvc._


@Singleton
class MetricsController extends Controller {
  //export default jvm metrics
  DefaultExports.initialize()

  def index = Action {
    val samples = new StringBuilder()
    val writer = new WriterAdapter(samples)

    TextFormat.write004(writer, CollectorRegistry.defaultRegistry.metricFamilySamples())
    writer.close()

    Result(
      header = ResponseHeader(200, Map.empty),
      body = HttpEntity.Strict(ByteString(samples.toString), Some(TextFormat.CONTENT_TYPE_004))
    )
  }
}

class WriterAdapter(buffer: StringBuilder) extends Writer {
  override def write(charArray: Array[Char], offset: Int, length: Int): Unit = {
    buffer ++= new String(new String(charArray, offset, length).getBytes("UTF-8"), "UTF-8")
  }
  override def flush(): Unit = {}
  override def close(): Unit = {}
} 
Example 25
Source File: package.scala    From Converter   with GNU General Public License v3.0 5 votes vote down vote up
package com.olvind

import java.io.{StringWriter, Writer}

import com.olvind.logging.Logger.{AppendableLogger, Stored, StoringLogger, WriterLogger}
import fansi.Str

package object logging {
  type Ctx = Map[Str, Str]

  private[logging] val emptyContext: Ctx = Map.empty

  def stdout: Logger[Unit] =
    appendable(System.out).void

  def appendable[A <: Appendable](
      appendable: A,
      pattern:    Pattern = Pattern.default,
      ctx:        Ctx = emptyContext,
  ): Logger[A] =
    new AppendableLogger(appendable, pattern, ctx)

  def writer[W <: Writer](
      writer:  W       = System.out,
      pattern: Pattern = Pattern.default,
      ctx:     Ctx     = emptyContext,
  ): Logger[W] =
    new WriterLogger(new AppendableLogger(writer, pattern, ctx))

  def stringWriter(pattern: Pattern = Pattern.default, ctx: Ctx = emptyContext): Logger[StringWriter] =
    writer(new StringWriter)

  def storing(ctx: Ctx = emptyContext): Logger[Array[Stored]] =
    new StoringLogger(new Logger.Store, ctx)
} 
Example 26
Source File: CsvSupport.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.csv

import java.io.{OutputStream, Writer}

import com.univocity.parsers.csv.{CsvParser, CsvParserSettings, CsvWriter, CsvWriterSettings}

object CsvSupport {

  def createParser(format: CsvFormat,
                   ignoreLeadingWhitespaces: Boolean = true,
                   ignoreTrailingWhitespaces: Boolean = true,
                   skipEmptyLines: Boolean = true,
                   emptyCellValue: String = null,
                   nullValue: String = null,
                   skipRows: Option[Long] = None,
                   selectedColumns: Seq[String] = Seq.empty): CsvParser = {
    val settings = new CsvParserSettings()
    settings.getFormat.setDelimiter(format.delimiter)
    settings.getFormat.setQuote(format.quoteChar)
    settings.getFormat.setQuoteEscape(format.quoteEscape)
    settings.setLineSeparatorDetectionEnabled(true)
    // this is always true as we will fetch the headers ourselves by reading first row
    settings.setHeaderExtractionEnabled(false)
    settings.setIgnoreLeadingWhitespaces(ignoreLeadingWhitespaces)
    settings.setIgnoreTrailingWhitespaces(ignoreTrailingWhitespaces)
    settings.setSkipEmptyLines(skipEmptyLines)
    settings.setCommentCollectionEnabled(true)
    settings.setEmptyValue(emptyCellValue)
    settings.setNullValue(nullValue)
    settings.setMaxCharsPerColumn(-1)
    settings.setMaxColumns(2048)
    settings.setReadInputOnSeparateThread(false)
    skipRows.foreach(settings.setNumberOfRowsToSkip)
    selectedColumns.headOption.foreach(_ => settings.selectFields(selectedColumns: _*))
    new com.univocity.parsers.csv.CsvParser(settings)
  }

  def writerSettings(format: CsvFormat,
                     ignoreLeadingWhitespaces: Boolean,
                     ignoreTrailingWhitespaces: Boolean): CsvWriterSettings = {
    val settings = new CsvWriterSettings()
    settings.getFormat.setDelimiter(format.delimiter)
    settings.getFormat.setQuote(format.quoteChar)
    settings.getFormat.setQuoteEscape(format.quoteEscape)
    // we will handle header writing ourselves
    settings.setHeaderWritingEnabled(false)
    settings.setIgnoreLeadingWhitespaces(ignoreLeadingWhitespaces)
    settings.setIgnoreTrailingWhitespaces(ignoreTrailingWhitespaces)
    settings
  }

  def createWriter(writer: Writer,
                   format: CsvFormat,
                   ignoreLeadingWhitespaces: Boolean,
                   ignoreTrailingWhitespaces: Boolean): CsvWriter = {
    new CsvWriter(writer, writerSettings(format, ignoreLeadingWhitespaces, ignoreTrailingWhitespaces))
  }

  def createWriter(output: OutputStream,
                   format: CsvFormat,
                   ignoreLeadingWhitespaces: Boolean,
                   ignoreTrailingWhitespaces: Boolean): CsvWriter = {
    new CsvWriter(output, writerSettings(format, ignoreLeadingWhitespaces, ignoreTrailingWhitespaces))
  }
} 
Example 27
Source File: UnivocityGenerator.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources.csv

import java.io.Writer

import com.univocity.parsers.csv.CsvWriter

import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.types._

private[csv] class UnivocityGenerator(
    schema: StructType,
    writer: Writer,
    options: CSVOptions) {
  private val writerSettings = options.asWriterSettings
  writerSettings.setHeaders(schema.fieldNames: _*)
  private val gen = new CsvWriter(writer, writerSettings)
  private var printHeader = options.headerFlag

  // A `ValueConverter` is responsible for converting a value of an `InternalRow` to `String`.
  // When the value is null, this converter should not be called.
  private type ValueConverter = (InternalRow, Int) => String

  // `ValueConverter`s for all values in the fields of the schema
  private val valueConverters: Array[ValueConverter] =
    schema.map(_.dataType).map(makeConverter).toArray

  private def makeConverter(dataType: DataType): ValueConverter = dataType match {
    case DateType =>
      (row: InternalRow, ordinal: Int) =>
        options.dateFormat.format(DateTimeUtils.toJavaDate(row.getInt(ordinal)))

    case TimestampType =>
      (row: InternalRow, ordinal: Int) =>
        options.timestampFormat.format(DateTimeUtils.toJavaTimestamp(row.getLong(ordinal)))

    case udt: UserDefinedType[_] => makeConverter(udt.sqlType)

    case dt: DataType =>
      (row: InternalRow, ordinal: Int) =>
        row.get(ordinal, dt).toString
  }

  private def convertRow(row: InternalRow): Seq[String] = {
    var i = 0
    val values = new Array[String](row.numFields)
    while (i < row.numFields) {
      if (!row.isNullAt(i)) {
        values(i) = valueConverters(i).apply(row, i)
      } else {
        values(i) = options.nullValue
      }
      i += 1
    }
    values
  }

  
  def write(row: InternalRow): Unit = {
    if (printHeader) {
      gen.writeHeaders()
    }
    gen.writeRow(convertRow(row): _*)
    printHeader = false
  }

  def close(): Unit = gen.close()

  def flush(): Unit = gen.flush()
} 
Example 28
Source File: WriteSupport.scala    From CodeAnalyzerTutorial   with Apache License 2.0 5 votes vote down vote up
package tutor.utils

import java.io.{BufferedWriter, File, FileWriter, Writer}

trait WriteSupport {

  def withWriter(path: String)(f: Writer => Unit): Unit ={
    var writer: Writer = null
    try {
      val file = new File(path)
      if (!file.exists()) file.createNewFile()
      writer = new BufferedWriter(new FileWriter(file))
      f(writer)
      writer.flush()
    } finally {
      if (writer != null) writer.close()
    }
  }
} 
Example 29
Source File: JsonUtils.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.kafka010

import java.io.Writer

import scala.collection.mutable.HashMap
import scala.util.control.NonFatal

import org.apache.kafka.common.TopicPartition
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization


  def partitionOffsets(partitionOffsets: Map[TopicPartition, Long]): String = {
    val result = new HashMap[String, HashMap[Int, Long]]()
    partitionOffsets.foreach { case (tp, off) =>
        val parts = result.getOrElse(tp.topic, new HashMap[Int, Long])
        parts += tp.partition -> off
        result += tp.topic -> parts
    }
    Serialization.write(result)
  }
} 
Example 30
Source File: CompiledModel.scala    From ScalaStan   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.scalastan

import java.io.Writer

import com.cibo.scalastan.ast.{StanDataDeclaration, StanParameterDeclaration}
import com.cibo.scalastan.run.StanRunner

sealed trait InitialValue

case object DefaultInitialValue extends InitialValue
case class InitialValueDouble(v: Double) extends InitialValue
case class InitialValueMapping(mapping: Map[String, DataMapping[_]]) extends InitialValue

case class CompiledModel(
  model: StanModel,
  runner: StanRunner,
  dataMapping: Map[String, DataMapping[_]] = Map.empty,
  initialValue: InitialValue = DefaultInitialValue
) {

  private def emitMapping(mapping: Map[String, DataMapping[_]], writer: Writer): Unit = {
    mapping.values.foreach { value =>
      writer.write(value.emit)
      writer.write("\n")
    }
  }

  final def emitData(writer: Writer): Unit = emitMapping(dataMapping, writer)
  final def emitInitialValues(writer: Writer): Unit = initialValue match {
    case InitialValueMapping(mapping) => emitMapping(mapping, writer)
    case _                            => ()
  }

  
  final def run(
    chains: Int = 4,
    seed: Int = -1,
    cache: Boolean = true,
    method: RunMethod.Method = RunMethod.Sample()
  ): StanResults = {
    require(chains > 0, s"Must run at least one chain")

    // Make sure all the necessary data is provided.
    model.program.data.filterNot(v => dataMapping.contains(v.emit)).foreach { v =>
      throw new IllegalStateException(s"data not supplied for ${v.name}")
    }

    runner.run(
      compiledModel = this,
      chains = chains,
      seed = seed,
      cache = cache,
      method = method
    )
  }
} 
Example 31
Source File: ScalaObjectHandler.scala    From fintrospect   with Apache License 2.0 5 votes vote down vote up
package io.fintrospect.templating

import java.io.Writer
import java.lang.reflect.{Field, Method}

import com.github.mustachejava.Iteration
import com.github.mustachejava.reflect.ReflectionObjectHandler

import scala.collection.JavaConversions.mapAsJavaMap
import scala.reflect.ClassTag
import scala.runtime.BoxedUnit

class ScalaObjectHandler extends ReflectionObjectHandler {

  override def checkMethod(member: Method) {}

  override def checkField(member: Field) {}

  override def coerce(value: AnyRef) = value match {
    case m: collection.Map[_, _] => mapAsJavaMap(m)
    case _: BoxedUnit => null
    case Some(some: AnyRef) => coerce(some)
    case None => null
    case _ => value
  }

  override def iterate(iteration: Iteration, writer: Writer, value: AnyRef, scopes: java.util.List[AnyRef]) = value match {
    case TraversableAnyRef(t) => {
      var newWriter = writer
      t foreach (next => newWriter = iteration.next(newWriter, coerce(next), scopes))
      newWriter
    }
    case n: Number => if (n.intValue() == 0) writer else iteration.next(writer, coerce(value), scopes)
    case _ => super.iterate(iteration, writer, value, scopes)
  }

  override def falsey(iteration: Iteration, writer: Writer, value: AnyRef, scopes: java.util.List[AnyRef]) = value match {
    case TraversableAnyRef(t) => if (t.isEmpty) iteration.next(writer, value, scopes) else writer
    case n: Number => if (n.intValue() == 0) iteration.next(writer, coerce(value), scopes) else writer
    case _ => super.falsey(iteration, writer, value, scopes)
  }

  private val TraversableAnyRef = new Def[Traversable[AnyRef]]

  private class Def[C: ClassTag] {
    def unapply[X: ClassTag](x: X): Option[C] = x match {
      case c: C => Some(c)
      case _ => None
    }
  }

} 
Example 32
Source File: InternalWriter.scala    From kantan.csv   with Apache License 2.0 5 votes vote down vote up
package kantan.csv
package engine

import java.io.Writer
import scala.annotation.tailrec

private[csv] class InternalWriter(private val out: Writer, val conf: CsvConfiguration) extends CsvWriter[Seq[String]] {

  private def safeWrite(str: String): Unit = {
    @tailrec
    def escape(mark: Int, i: Int): Unit =
      if(i >= str.length) {
        if(mark != i) out.write(str, mark, i - mark)
      }
      else if(str.charAt(i) == conf.quote) {
        out.write(str, mark, i - mark + 1)
        out.write(conf.quote.toInt)
        escape(i + 1, i + 1)
      }
      else escape(mark, i + 1)

    @tailrec
    def escapeIndex(index: Int): Int =
      if(index >= str.length) -1
      else {
        val c = str.charAt(index)
        if(c == conf.quote || c == conf.cellSeparator || c == '\n' || c == '\r') index
        else escapeIndex(index + 1)
      }

    // If we're configured to always quote, do so.
    if(conf.quotePolicy == CsvConfiguration.QuotePolicy.Always) {
      out.write(conf.quote.toInt)
      out.write(str)
      out.write(conf.quote.toInt)
    }

    // Otherwise, only quotes when needed.
    else {
      val index = escapeIndex(0)

      if(index == -1) out.write(str)
      else {
        out.write(conf.quote.toInt)
        out.write(str, 0, index)
        escape(index, index)
        out.write(conf.quote.toInt)
      }
    }
  }

  @SuppressWarnings(Array("org.wartremover.warts.Var"))
  override def write(ss: Seq[String]): CsvWriter[Seq[String]] = {
    var first = true
    for(s <- ss) {
      if(first) first = false
      else out.write(conf.cellSeparator.toInt)
      safeWrite(s)
    }

    out.write("\r\n") // According to the RFC, \n alone is not valid.
    this
  }

  override def close(): Unit = out.close()
} 
Example 33
Source File: PlaySEIGenerator.scala    From play-soap   with Apache License 2.0 5 votes vote down vote up
package play.soap.sbtplugin

import java.io.Writer

import org.apache.cxf.tools.wsdlto.frontend.jaxws.generators.SEIGenerator


class PlaySEIGenerator extends SEIGenerator with PlayGenerator {
  override def setCommonAttributes() = {
    super.setCommonAttributes()
    setPlayAttributes()
  }

  def setAttribute(name: String, value: AnyRef) = setAttributes(name, value)

  override def doWrite(templateName: String, outputs: Writer) = {
    // Override the template... it should only ever be sei.vm, but in case it's not.
    val newTemplate = if (templateName.endsWith("/sei.vm")) {
      "play/soap/sbtplugin/sei.vm"
    } else templateName

    // Add the future API to the velocity context.  The reason this must be done here is that the method that invokes
    // doWrite() first clears the context before invoking this.
    setAttributes("future", new FutureGenerator(env.get(classOf[Imports.WsdlKeys.FutureApi])))

    super.doWrite(newTemplate, outputs)
  }
} 
Example 34
Source File: PlayClientGenerator.scala    From play-soap   with Apache License 2.0 5 votes vote down vote up
package play.soap.sbtplugin

import java.io.Writer

import org.apache.cxf.tools.common.model.JavaPort
import org.apache.cxf.tools.util.ClassCollector
import org.apache.cxf.tools.wsdlto.frontend.jaxws.generators.ServiceGenerator


  override def parseOutputName(packageName: String, filename: String) = {
    register(env.get(classOf[ClassCollector]), packageName, filename)
    parseOutputName(packageName, filename, ".scala")
  }

  private object PortMethodNameGenerator {
    def transform(port: JavaPort): String = {
      port.getName.head.toLower + port.getName.tail
    }
  }
} 
Example 35
Source File: package.scala    From tethys   with Apache License 2.0 5 votes vote down vote up
import java.io.{Reader, StringReader, StringWriter, Writer}

import tethys.readers.{FieldName, ReaderError}
import tethys.readers.tokens.{TokenIterator, TokenIteratorProducer}
import tethys.writers.tokens.{TokenWriter, TokenWriterProducer}

import scala.Specializable.Group

package object tethys {

  final val specializations = new Group((Short, Int, Long, Float, Double, Boolean))

  implicit class JsonWriterOps[A](val a: A) extends AnyVal {
    def asJson(implicit jsonWriter: JsonWriter[A], tokenWriterProducer: TokenWriterProducer): String = {
      val stringWriter = new StringWriter()
      writeJson(tokenWriterProducer.forWriter(stringWriter))
      stringWriter.toString
    }

    def asJsonWith(jsonWriter: JsonWriter[A])(implicit tokenWriterProducer: TokenWriterProducer): String = {
      asJson(jsonWriter, tokenWriterProducer)
    }

    def writeJson(tokenWriter: TokenWriter)(implicit jsonWriter: JsonWriter[A]): Unit = {
      try jsonWriter.write(a, tokenWriter) finally {
        tokenWriter.flush()
      }
    }
  }

  implicit class WriterOps(val w: Writer) extends AnyVal {
    def toTokenWriter(implicit tokenWriterProducer: TokenWriterProducer): TokenWriter = tokenWriterProducer.forWriter(w)
  }

  implicit class StringReaderOps(val json: String) extends AnyVal {
    def jsonAs[A](implicit jsonReader: JsonReader[A], producer: TokenIteratorProducer): Either[ReaderError, A] = {
      new StringReader(json).readJson[A]
    }

    def toTokenIterator(implicit producer: TokenIteratorProducer): Either[ReaderError, TokenIterator] = {
      new StringReader(json).toTokenIterator
    }
  }

  implicit class ReaderReaderOps(val reader: Reader) extends AnyVal {
    def readJson[A](implicit jsonReader: JsonReader[A], producer: TokenIteratorProducer): Either[ReaderError, A] = {
      implicit val root: FieldName = FieldName()
      producer.fromReader(reader).right.flatMap(_.readJson[A])
    }

    def readJsonWith[A](jsonReader: JsonReader[A])(implicit producer: TokenIteratorProducer): Either[ReaderError, A] = {
      readJson[A](jsonReader, producer)
    }

    def toTokenIterator(implicit producer: TokenIteratorProducer): Either[ReaderError, TokenIterator] = {
      producer.fromReader(reader)
    }
  }

  implicit class TokenIteratorOps(val tokenIterator: TokenIterator) extends AnyVal {
    def readJson[A](implicit jsonReader: JsonReader[A]): Either[ReaderError, A] = {
      implicit val fieldName: FieldName = FieldName()
      ReaderError.catchNonFatal(jsonReader.read(tokenIterator))
    }
  }
} 
Example 36
Source File: package.scala    From tethys   with Apache License 2.0 5 votes vote down vote up
package tethys

import java.io.{Reader, Writer}

import com.fasterxml.jackson.core.JsonFactory
import tethys.readers.{FieldName, ReaderError}
import tethys.readers.tokens.{TokenIterator, TokenIteratorProducer}
import tethys.writers.tokens.{TokenWriter, TokenWriterProducer}

package object jackson {
  lazy val defaultJsonFactory: JsonFactory = {
    val f = new JsonFactory()
    f.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, false)
    f
  }


  implicit def jacksonTokenWriterProducer(implicit jsonFactory: JsonFactory = defaultJsonFactory): TokenWriterProducer = new TokenWriterProducer {
    override def forWriter(writer: Writer): TokenWriter = {
      new JacksonTokenWriter(jsonFactory.createGenerator(writer))
    }
  }

  implicit def jacksonTokenIteratorProducer(implicit jsonFactory: JsonFactory = defaultJsonFactory): TokenIteratorProducer = new TokenIteratorProducer {
    override def fromReader(reader: Reader): Either[ReaderError, TokenIterator] = {
      ReaderError.catchNonFatal(JacksonTokenIterator.fromFreshParser(jsonFactory.createParser(reader)))(FieldName())
    }
  }
} 
Example 37
Source File: BfsStrategyStopWatchDecorator.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.bmcmt.search

import java.io.{FileWriter, PrintWriter, Writer}
import java.time.{Duration, LocalDateTime}

import at.forsyte.apalache.tla.bmcmt.search.SearchStrategy.{Finish, FinishOnDeadlock, NextStep}


class BfsStrategyStopWatchDecorator(strategy: SearchStrategy, filename: String) extends SearchStrategy {
  private var currentStep: Int = 0
  private var printWriter: Option[PrintWriter] = None
  private var startTime: LocalDateTime = LocalDateTime.now()

  override def getCommand: SearchStrategy.Command = {
    val command = strategy.getCommand
    command match {
      case NextStep(stepNo, _, _) =>
        if (stepNo == 0) {
          currentStep = 0
          // create a log file and add a header
          printWriter = Some(new PrintWriter(new FileWriter(filename, false)))
          printWriter.get.println("step,total_sec,nanosec_adjustment")
          // start the timer
          startTime = LocalDateTime.now()
        } else {
          appendCsvEntry()
          currentStep = stepNo
        }

      case Finish() | FinishOnDeadlock() =>
        appendCsvEntry()
        printWriter.get.close()
    }
    command
  }

  private def appendCsvEntry(): Unit = {
    val currentTime = LocalDateTime.now()
    val duration = Duration.between(startTime, currentTime)
    printWriter.get.println("%d,%d,%d".format(currentStep, duration.getSeconds, duration.getNano))
    printWriter.get.flush() // get the results as soon as possible
  }

  override def registerResponse(response: SearchStrategy.Response): Unit = {
    strategy.registerResponse(response)
  }
}